2019-12-30 02:21:51 +01:00
|
|
|
import time
|
2021-08-11 14:39:54 +02:00
|
|
|
from datetime import datetime, timezone
|
2022-06-08 04:52:09 +02:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2017-03-08 12:12:02 +01:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2022-06-08 04:52:09 +02:00
|
|
|
from django.http import HttpRequest
|
2018-05-29 07:25:08 +02:00
|
|
|
from django.test import override_settings
|
2017-03-08 12:12:02 +01:00
|
|
|
|
|
|
|
from zerver.lib.initial_password import initial_password
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2023-06-07 23:01:42 +02:00
|
|
|
from zerver.lib.test_helpers import get_test_image_file, ratelimit_rule
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_all_api_keys
|
2021-08-11 14:39:54 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Draft,
|
|
|
|
NotificationTriggers,
|
|
|
|
ScheduledMessageNotificationEmail,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user_profile_by_api_key
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2017-03-08 12:12:02 +01:00
|
|
|
|
|
|
|
class ChangeSettingsTest(ZulipTestCase):
|
|
|
|
# TODO: requires method consolidation, right now, there's no alternative
|
|
|
|
# for check_for_toggle_param for PATCH.
|
2017-11-05 10:51:25 +01:00
|
|
|
def check_for_toggle_param_patch(self, pattern: str, param: str) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
json_result = self.client_patch(pattern, {param: orjson.dumps(True).decode()})
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assert_json_success(json_result)
|
|
|
|
# refetch user_profile object to correctly handle caching
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assertEqual(getattr(user_profile, param), True)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
json_result = self.client_patch(pattern, {param: orjson.dumps(False).decode()})
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assert_json_success(json_result)
|
|
|
|
# refetch user_profile object to correctly handle caching
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assertEqual(getattr(user_profile, param), False)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_successful_change_settings(self) -> None:
|
2017-03-08 12:12:02 +01:00
|
|
|
"""
|
2017-07-31 20:44:52 +02:00
|
|
|
A call to /json/settings with valid parameters changes the user's
|
2017-03-08 12:12:02 +01:00
|
|
|
settings correctly and returns correct values.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2017-07-31 20:44:52 +02:00
|
|
|
json_result = self.client_patch(
|
|
|
|
"/json/settings",
|
2017-03-08 12:12:02 +01:00
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="Foo Bar",
|
2020-03-12 14:17:25 +01:00
|
|
|
old_password=initial_password(user.delivery_email),
|
2021-02-12 08:20:45 +01:00
|
|
|
new_password="foobar1",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assert_json_success(json_result)
|
2020-03-06 18:40:46 +01:00
|
|
|
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertEqual(user.full_name, "Foo Bar")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2020-03-06 18:40:46 +01:00
|
|
|
|
|
|
|
# This is one of the few places we log in directly
|
|
|
|
# with Django's client (to test the password change
|
|
|
|
# with as few moving parts as possible).
|
2020-06-26 19:29:37 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
2020-03-06 18:40:46 +01:00
|
|
|
self.assertTrue(
|
|
|
|
self.client.login(
|
2020-06-26 19:29:37 +02:00
|
|
|
request=request,
|
2020-03-12 14:17:25 +01:00
|
|
|
username=user.delivery_email,
|
2021-02-12 08:20:45 +01:00
|
|
|
password="foobar1",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
realm=user.realm,
|
|
|
|
),
|
2020-03-06 18:40:46 +01:00
|
|
|
)
|
|
|
|
self.assert_logged_in_user_id(user.id)
|
2017-03-08 12:12:02 +01:00
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
def test_password_change_check_strength(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
with self.settings(PASSWORD_MIN_LENGTH=3, PASSWORD_MIN_GUESSES=1000):
|
|
|
|
json_result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="Foo Bar",
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
2021-02-12 08:20:45 +01:00
|
|
|
new_password="easy",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
self.assert_json_error(json_result, "New password is too weak!")
|
|
|
|
|
|
|
|
json_result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="Foo Bar",
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
2021-02-12 08:20:45 +01:00
|
|
|
new_password="f657gdGGk9",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
self.assert_json_success(json_result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_illegal_name_changes(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2017-03-08 12:12:02 +01:00
|
|
|
full_name = user.full_name
|
|
|
|
|
|
|
|
with self.settings(NAME_CHANGES_DISABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
json_result = self.client_patch("/json/settings", dict(full_name="Foo Bar"))
|
2017-03-08 12:12:02 +01:00
|
|
|
|
|
|
|
# We actually fail silently here, since this only happens if
|
|
|
|
# somebody is trying to game our API, and there's no reason to
|
|
|
|
# give them the courtesy of an error reason.
|
|
|
|
self.assert_json_success(json_result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assertEqual(user.full_name, full_name)
|
|
|
|
|
|
|
|
# Now try a too-long name
|
2021-02-12 08:20:45 +01:00
|
|
|
json_result = self.client_patch("/json/settings", dict(full_name="x" * 1000))
|
|
|
|
self.assert_json_error(json_result, "Name too long!")
|
2017-03-08 12:12:02 +01:00
|
|
|
|
2022-08-08 01:39:32 +02:00
|
|
|
# Now try too-short names
|
|
|
|
short_names = ["", "x"]
|
|
|
|
for name in short_names:
|
|
|
|
json_result = self.client_patch("/json/settings", dict(full_name=name))
|
|
|
|
self.assert_json_error(json_result, "Name too short!")
|
2017-05-12 04:21:49 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_illegal_characters_in_name_changes(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-08 12:12:02 +01:00
|
|
|
|
|
|
|
# Now try a name with invalid characters
|
2021-02-12 08:20:45 +01:00
|
|
|
json_result = self.client_patch("/json/settings", dict(full_name="Opheli*"))
|
|
|
|
self.assert_json_error(json_result, "Invalid characters in name!")
|
2017-03-08 12:12:02 +01:00
|
|
|
|
2018-05-04 18:15:54 +02:00
|
|
|
def test_change_email_to_disposable_email(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login_user(hamlet)
|
|
|
|
realm = hamlet.realm
|
2018-05-04 18:15:54 +02:00
|
|
|
realm.disallow_disposable_email_addresses = True
|
2018-07-27 23:26:29 +02:00
|
|
|
realm.emails_restricted_to_domains = False
|
2018-05-04 18:15:54 +02:00
|
|
|
realm.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
json_result = self.client_patch("/json/settings", dict(email="hamlet@mailnator.com"))
|
|
|
|
self.assert_json_error(json_result, "Please use your real email address.")
|
2018-05-04 18:15:54 +02:00
|
|
|
|
2021-07-22 10:05:04 +02:00
|
|
|
def test_change_email_batching_period(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
2021-08-11 14:39:54 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
2021-07-22 10:05:04 +02:00
|
|
|
self.login_user(hamlet)
|
|
|
|
|
|
|
|
# Default is two minutes
|
|
|
|
self.assertEqual(hamlet.email_notifications_batching_period_seconds, 120)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings", {"email_notifications_batching_period_seconds": -1}
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid email batching period: -1 seconds")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings", {"email_notifications_batching_period_seconds": 7 * 24 * 60 * 60 + 10}
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid email batching period: 604810 seconds")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings", {"email_notifications_batching_period_seconds": 5 * 60}
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.email_notifications_batching_period_seconds, 300)
|
|
|
|
|
2021-08-11 14:39:54 +02:00
|
|
|
# Test that timestamps get updated for existing ScheduledMessageNotificationEmail rows
|
|
|
|
hamlet_msg_id1 = self.send_stream_message(sender=cordelia, stream_name="Verona")
|
|
|
|
hamlet_msg_id2 = self.send_stream_message(sender=cordelia, stream_name="Verona")
|
|
|
|
othello_msg_id1 = self.send_stream_message(sender=cordelia, stream_name="Verona")
|
|
|
|
|
|
|
|
def create_entry(user_profile_id: int, message_id: int, timestamp: datetime) -> int:
|
|
|
|
# The above messages don't actually mention anyone. We just fill up the trigger
|
|
|
|
# because we need to.
|
|
|
|
entry = ScheduledMessageNotificationEmail.objects.create(
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id=message_id,
|
|
|
|
trigger=NotificationTriggers.MENTION,
|
|
|
|
scheduled_timestamp=timestamp,
|
|
|
|
)
|
|
|
|
return entry.id
|
|
|
|
|
|
|
|
def get_datetime_object(minutes: int) -> datetime:
|
|
|
|
return datetime(
|
|
|
|
year=2021, month=8, day=10, hour=10, minute=minutes, second=15, tzinfo=timezone.utc
|
|
|
|
)
|
|
|
|
|
|
|
|
hamlet_timestamp = get_datetime_object(10)
|
|
|
|
othello_timestamp = get_datetime_object(20)
|
|
|
|
|
|
|
|
hamlet_entry1_id = create_entry(hamlet.id, hamlet_msg_id1, hamlet_timestamp)
|
|
|
|
hamlet_entry2_id = create_entry(hamlet.id, hamlet_msg_id2, hamlet_timestamp)
|
|
|
|
othello_entry1_id = create_entry(othello.id, othello_msg_id1, othello_timestamp)
|
|
|
|
|
|
|
|
# Update Hamlet's setting from 300 seconds (5 minutes) to 600 seconds (10 minutes)
|
|
|
|
self.assertEqual(hamlet.email_notifications_batching_period_seconds, 300)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings", {"email_notifications_batching_period_seconds": 10 * 60}
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.email_notifications_batching_period_seconds, 10 * 60)
|
|
|
|
|
|
|
|
def check_scheduled_timestamp(entry_id: int, expected_timestamp: datetime) -> None:
|
|
|
|
entry = ScheduledMessageNotificationEmail.objects.get(id=entry_id)
|
|
|
|
self.assertEqual(entry.scheduled_timestamp, expected_timestamp)
|
|
|
|
|
|
|
|
# For Hamlet, the new scheduled timestamp should have been updated
|
|
|
|
expected_hamlet_timestamp = get_datetime_object(15)
|
|
|
|
check_scheduled_timestamp(hamlet_entry1_id, expected_hamlet_timestamp)
|
|
|
|
check_scheduled_timestamp(hamlet_entry2_id, expected_hamlet_timestamp)
|
|
|
|
|
|
|
|
# Nothing should have changed for Othello
|
|
|
|
check_scheduled_timestamp(othello_entry1_id, othello_timestamp)
|
|
|
|
|
2021-09-08 18:32:53 +02:00
|
|
|
def test_toggling_boolean_user_settings(self) -> None:
|
2017-07-14 00:30:55 +02:00
|
|
|
"""Test updating each boolean setting in UserProfile property_types"""
|
2021-02-12 08:19:30 +01:00
|
|
|
boolean_settings = (
|
|
|
|
s for s in UserProfile.property_types if UserProfile.property_types[s] is bool
|
|
|
|
)
|
2021-09-08 18:32:53 +02:00
|
|
|
for user_setting in boolean_settings:
|
|
|
|
self.check_for_toggle_param_patch("/json/settings", user_setting)
|
2017-03-08 12:12:02 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_wrong_old_password(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-07-31 20:44:52 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
2017-03-08 12:12:02 +01:00
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
old_password="bad_password",
|
2017-03-08 12:12:02 +01:00
|
|
|
new_password="ignored",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2017-03-08 12:12:02 +01:00
|
|
|
self.assert_json_error(result, "Wrong password!")
|
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
@override_settings(RATE_LIMITING_AUTHENTICATE=True)
|
|
|
|
@ratelimit_rule(10, 2, domain="authenticate_by_username")
|
2019-12-30 02:21:51 +01:00
|
|
|
def test_wrong_old_password_rate_limiter(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2023-06-07 23:01:42 +02:00
|
|
|
start_time = time.time()
|
|
|
|
with mock.patch("time.time", return_value=start_time):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password="bad_password",
|
|
|
|
new_password="ignored",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Wrong password!")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password="bad_password",
|
|
|
|
new_password="ignored",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Wrong password!")
|
|
|
|
|
|
|
|
# We're over the limit, so we'll get blocked even with the correct password.
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
|
|
|
new_password="ignored",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "You're making too many attempts! Try again in 10 seconds."
|
|
|
|
)
|
|
|
|
|
|
|
|
# After time passes, we should be able to succeed if we give the correct password.
|
|
|
|
with mock.patch("time.time", return_value=start_time + 11):
|
|
|
|
json_result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
|
|
|
new_password="foobar1",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_success(json_result)
|
2019-12-30 02:21:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipDummyBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2018-05-29 07:25:08 +02:00
|
|
|
def test_change_password_ldap_backend(self) -> None:
|
2019-10-16 18:12:59 +02:00
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "cn", "short_name": "sn"}
|
2018-05-29 07:25:08 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2019-10-16 18:12:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
LDAP_APPEND_DOMAIN="zulip.com", AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map
|
|
|
|
):
|
2018-05-29 07:25:08 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
|
|
|
new_password="ignored",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2018-05-29 07:25:08 +02:00
|
|
|
self.assert_json_error(result, "Your Zulip password is managed in LDAP")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
2020-10-23 02:43:28 +02:00
|
|
|
old_password=self.ldap_password("hamlet"), # hamlet's password in LDAP
|
2018-05-29 07:25:08 +02:00
|
|
|
new_password="ignored",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2018-05-29 07:25:08 +02:00
|
|
|
self.assert_json_error(result, "Your Zulip password is managed in LDAP")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
LDAP_APPEND_DOMAIN="example.com", AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("zulip.ldap", "DEBUG") as debug_log:
|
2018-05-29 07:25:08 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
|
|
|
new_password="ignored",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2018-05-29 07:25:08 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
debug_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: Email hamlet@zulip.com does not match LDAP domain example.com."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
with self.settings(LDAP_APPEND_DOMAIN=None, AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map):
|
2018-05-29 07:25:08 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings",
|
|
|
|
dict(
|
|
|
|
old_password=initial_password(self.example_email("hamlet")),
|
|
|
|
new_password="ignored",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2018-05-29 07:25:08 +02:00
|
|
|
self.assert_json_error(result, "Your Zulip password is managed in LDAP")
|
|
|
|
|
2021-08-11 15:34:25 +02:00
|
|
|
def do_test_change_user_setting(self, setting_name: str) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
test_changes: Dict[str, Any] = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
default_language="de",
|
2023-10-23 09:02:57 +02:00
|
|
|
web_home_view="all_messages",
|
2021-02-12 08:20:45 +01:00
|
|
|
emojiset="google",
|
2022-02-10 01:45:44 +01:00
|
|
|
timezone="America/Denver",
|
2021-02-12 08:19:30 +01:00
|
|
|
demote_inactive_streams=2,
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=2,
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=2,
|
2023-04-25 12:29:15 +02:00
|
|
|
web_stream_unreads_count_display_policy=2,
|
2021-02-12 08:19:30 +01:00
|
|
|
color_scheme=2,
|
2021-09-08 17:00:44 +02:00
|
|
|
email_notifications_batching_period_seconds=100,
|
|
|
|
notification_sound="ding",
|
|
|
|
desktop_icon_count_display=2,
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=3,
|
2023-01-14 20:36:37 +01:00
|
|
|
realm_name_in_email_notifications_policy=2,
|
2023-06-17 17:37:04 +02:00
|
|
|
automatically_follow_topics_policy=1,
|
|
|
|
automatically_unmute_topics_in_muted_streams_policy=1,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2017-03-08 12:20:56 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-07-14 00:30:55 +02:00
|
|
|
test_value = test_changes.get(setting_name)
|
|
|
|
# Error if a setting in UserProfile.property_types does not have test values
|
|
|
|
if test_value is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"No test created for {setting_name}")
|
2017-07-14 00:30:55 +02:00
|
|
|
|
2023-04-05 21:35:47 +02:00
|
|
|
if setting_name not in [
|
|
|
|
"demote_inactive_streams",
|
|
|
|
"user_list_style",
|
|
|
|
"color_scheme",
|
|
|
|
"web_mark_read_on_scroll_policy",
|
2023-04-25 12:29:15 +02:00
|
|
|
"web_stream_unreads_count_display_policy",
|
2023-04-05 21:35:47 +02:00
|
|
|
]:
|
2021-05-04 18:11:59 +02:00
|
|
|
data = {setting_name: test_value}
|
|
|
|
else:
|
|
|
|
data = {setting_name: orjson.dumps(test_value).decode()}
|
2019-03-17 14:48:51 +01:00
|
|
|
|
2021-07-07 22:08:11 +02:00
|
|
|
result = self.client_patch("/json/settings", data)
|
2017-03-14 10:53:09 +01:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-07-14 00:30:55 +02:00
|
|
|
self.assertEqual(getattr(user_profile, setting_name), test_value)
|
2017-03-14 10:53:09 +01:00
|
|
|
|
2021-08-11 15:34:25 +02:00
|
|
|
def test_change_user_setting(self) -> None:
|
2017-07-14 00:30:55 +02:00
|
|
|
"""Test updating each non-boolean setting in UserProfile property_types"""
|
2021-02-12 08:19:30 +01:00
|
|
|
user_settings = (
|
2021-09-08 17:00:44 +02:00
|
|
|
s for s in UserProfile.property_types if UserProfile.property_types[s] is not bool
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-14 00:30:55 +02:00
|
|
|
for setting in user_settings:
|
2021-08-11 15:34:25 +02:00
|
|
|
self.do_test_change_user_setting(setting)
|
|
|
|
self.do_test_change_user_setting("timezone")
|
2017-04-02 21:05:33 +02:00
|
|
|
|
2021-09-08 16:21:51 +02:00
|
|
|
def test_invalid_setting_value(self) -> None:
|
|
|
|
invalid_values_dict = dict(
|
|
|
|
default_language="invalid_de",
|
2023-10-23 09:02:57 +02:00
|
|
|
web_home_view="invalid_view",
|
2021-09-08 16:21:51 +02:00
|
|
|
emojiset="apple",
|
|
|
|
timezone="invalid_US/Mountain",
|
|
|
|
demote_inactive_streams=10,
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=10,
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=10,
|
2023-04-25 12:29:15 +02:00
|
|
|
web_stream_unreads_count_display_policy=10,
|
2021-09-08 16:21:51 +02:00
|
|
|
color_scheme=10,
|
|
|
|
notification_sound="invalid_sound",
|
|
|
|
desktop_icon_count_display=10,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login("hamlet")
|
2022-12-12 03:39:16 +01:00
|
|
|
for setting_name in invalid_values_dict:
|
2021-09-08 16:21:51 +02:00
|
|
|
invalid_value = invalid_values_dict.get(setting_name)
|
|
|
|
if isinstance(invalid_value, str):
|
|
|
|
invalid_value = orjson.dumps(invalid_value).decode()
|
|
|
|
|
|
|
|
req = {setting_name: invalid_value}
|
|
|
|
result = self.client_patch("/json/settings", req)
|
|
|
|
|
|
|
|
expected_error_msg = f"Invalid {setting_name}"
|
|
|
|
if setting_name == "notification_sound":
|
|
|
|
expected_error_msg = f"Invalid notification sound '{invalid_value}'"
|
2022-06-28 00:43:57 +02:00
|
|
|
elif setting_name == "timezone":
|
|
|
|
expected_error_msg = "timezone is not a recognized time zone"
|
2021-09-08 16:21:51 +02:00
|
|
|
self.assert_json_error(result, expected_error_msg)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertNotEqual(getattr(hamlet, setting_name), invalid_value)
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def do_change_emojiset(self, emojiset: str) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-05-08 17:36:47 +02:00
|
|
|
data = {"emojiset": emojiset}
|
2021-07-07 22:08:11 +02:00
|
|
|
result = self.client_patch("/json/settings", data)
|
2018-07-20 09:28:32 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def test_emojiset(self) -> None:
|
2021-09-08 21:21:51 +02:00
|
|
|
"""Test banned emoji sets are not accepted."""
|
2021-02-12 08:20:45 +01:00
|
|
|
banned_emojisets = ["apple", "emojione"]
|
|
|
|
valid_emojisets = ["google", "google-blob", "text", "twitter"]
|
2018-07-20 09:28:32 +02:00
|
|
|
|
|
|
|
for emojiset in banned_emojisets:
|
|
|
|
result = self.do_change_emojiset(emojiset)
|
2020-04-04 00:06:39 +02:00
|
|
|
self.assert_json_error(result, "Invalid emojiset")
|
2018-07-20 09:28:32 +02:00
|
|
|
|
|
|
|
for emojiset in valid_emojisets:
|
|
|
|
result = self.do_change_emojiset(emojiset)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2019-04-29 08:41:00 +02:00
|
|
|
def test_avatar_changes_disabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2019-04-29 08:41:00 +02:00
|
|
|
|
|
|
|
with self.settings(AVATAR_CHANGES_DISABLED=True):
|
|
|
|
result = self.client_delete("/json/users/me/avatar")
|
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.", 400)
|
|
|
|
|
|
|
|
with self.settings(AVATAR_CHANGES_DISABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as fp1:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"f1": fp1})
|
2019-04-29 08:41:00 +02:00
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.", 400)
|
2017-04-02 21:05:33 +02:00
|
|
|
|
2021-07-15 16:57:07 +02:00
|
|
|
def test_invalid_setting_name(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
# Now try an invalid setting name
|
2022-08-25 18:31:21 +02:00
|
|
|
result = self.client_patch("/json/settings", dict(invalid_setting="value"))
|
|
|
|
self.assert_json_success(result, ignored_parameters=["invalid_setting"])
|
2021-07-15 16:57:07 +02:00
|
|
|
|
2021-07-07 22:08:11 +02:00
|
|
|
def test_changing_setting_using_display_setting_endpoint(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is just for adding coverage for `/settings/display` endpoint which is
|
2021-10-18 16:30:46 +02:00
|
|
|
now deprecated.
|
2021-07-07 22:08:11 +02:00
|
|
|
"""
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings/display", dict(color_scheme=UserProfile.COLOR_SCHEME_NIGHT)
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.color_scheme, UserProfile.COLOR_SCHEME_NIGHT)
|
|
|
|
|
|
|
|
def test_changing_setting_using_notification_setting_endpoint(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is just for adding coverage for `/settings/notifications` endpoint which is
|
2021-10-18 16:30:46 +02:00
|
|
|
now deprecated.
|
2021-07-07 22:08:11 +02:00
|
|
|
"""
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/settings/notifications",
|
|
|
|
dict(enable_stream_desktop_notifications=orjson.dumps(True).decode()),
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.enable_stream_desktop_notifications, True)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-08 12:20:56 +01:00
|
|
|
class UserChangesTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_api_key(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2017-05-07 21:25:59 +02:00
|
|
|
email = user.email
|
2019-01-05 02:25:06 +01:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-08-01 10:53:40 +02:00
|
|
|
old_api_keys = get_all_api_keys(user)
|
2019-01-05 02:25:06 +01:00
|
|
|
# Ensure the old API keys are in the authentication cache, so
|
|
|
|
# that the below logic can test whether we have a cache-flushing bug.
|
|
|
|
for api_key in old_api_keys:
|
|
|
|
self.assertEqual(get_user_profile_by_api_key(api_key).email, email)
|
|
|
|
|
2022-02-18 21:49:17 +01:00
|
|
|
# First verify this endpoint is not registered in the /json/... path
|
|
|
|
# to prevent access with only a session.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/users/me/api_key/regenerate")
|
2022-02-18 21:49:17 +01:00
|
|
|
self.assertEqual(result.status_code, 404)
|
|
|
|
|
|
|
|
# A logged-in session doesn't allow access to an /api/v1/ endpoint
|
|
|
|
# of course.
|
|
|
|
result = self.client_post("/api/v1/users/me/api_key/regenerate")
|
|
|
|
self.assertEqual(result.status_code, 401)
|
|
|
|
|
|
|
|
result = self.api_post(user, "/api/v1/users/me/api_key/regenerate")
|
2022-06-07 01:37:01 +02:00
|
|
|
new_api_key = self.assert_json_success(result)["api_key"]
|
2018-08-01 10:53:40 +02:00
|
|
|
self.assertNotIn(new_api_key, old_api_keys)
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2018-08-01 10:53:40 +02:00
|
|
|
current_api_keys = get_all_api_keys(user)
|
|
|
|
self.assertIn(new_api_key, current_api_keys)
|
2019-01-05 02:16:38 +01:00
|
|
|
|
|
|
|
for api_key in old_api_keys:
|
|
|
|
with self.assertRaises(UserProfile.DoesNotExist):
|
|
|
|
get_user_profile_by_api_key(api_key)
|
|
|
|
|
|
|
|
for api_key in current_api_keys:
|
|
|
|
self.assertEqual(get_user_profile_by_api_key(api_key).email, email)
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
class UserDraftSettingsTests(ZulipTestCase):
|
|
|
|
def test_enable_drafts_syncing(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
hamlet.enable_drafts_synchronization = False
|
|
|
|
hamlet.save()
|
|
|
|
payload = {"enable_drafts_synchronization": orjson.dumps(True).decode()}
|
|
|
|
resp = self.api_patch(hamlet, "/api/v1/settings", payload)
|
|
|
|
self.assert_json_success(resp)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertTrue(hamlet.enable_drafts_synchronization)
|
|
|
|
|
|
|
|
def test_disable_drafts_syncing(self) -> None:
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
self.assertTrue(aaron.enable_drafts_synchronization)
|
|
|
|
|
|
|
|
initial_count = Draft.objects.count()
|
|
|
|
|
|
|
|
# Create some drafts. These should be deleted once aaron disables
|
|
|
|
# syncing drafts.
|
|
|
|
visible_stream_id = self.get_stream_id(self.get_streams(aaron)[0])
|
|
|
|
draft_dicts = [
|
|
|
|
{
|
|
|
|
"type": "stream",
|
|
|
|
"to": [visible_stream_id],
|
|
|
|
"topic": "thinking out loud",
|
|
|
|
"content": "What if pigs really could fly?",
|
|
|
|
"timestamp": 15954790199,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"type": "private",
|
|
|
|
"to": [],
|
|
|
|
"topic": "",
|
|
|
|
"content": "What if made it possible to sync drafts in Zulip?",
|
|
|
|
"timestamp": 1595479020,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
payload = {"drafts": orjson.dumps(draft_dicts).decode()}
|
|
|
|
resp = self.api_post(aaron, "/api/v1/drafts", payload)
|
|
|
|
self.assert_json_success(resp)
|
|
|
|
self.assertEqual(Draft.objects.count() - initial_count, 2)
|
|
|
|
|
|
|
|
payload = {"enable_drafts_synchronization": orjson.dumps(False).decode()}
|
|
|
|
resp = self.api_patch(aaron, "/api/v1/settings", payload)
|
|
|
|
self.assert_json_success(resp)
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
self.assertFalse(aaron.enable_drafts_synchronization)
|
|
|
|
self.assertEqual(Draft.objects.count() - initial_count, 0)
|