2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Dict, List, Union
|
|
|
|
from unittest import mock
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import ujson
|
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_remove_realm_custom_profile_field,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
try_add_realm_custom_profile_field,
|
|
|
|
try_reorder_realm_custom_profile_fields,
|
|
|
|
)
|
2019-08-24 13:52:25 +02:00
|
|
|
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
|
2020-06-28 12:26:18 +02:00
|
|
|
from zerver.lib.markdown import convert as markdown_convert
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import queries_captured
|
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
|
|
|
custom_profile_fields_for_realm,
|
|
|
|
get_realm,
|
|
|
|
)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2019-10-01 04:13:32 +02:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
class CustomProfileFieldTestCase(ZulipTestCase):
|
2018-04-30 19:51:16 +02:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2018-05-06 11:35:34 +02:00
|
|
|
self.realm = get_realm("zulip")
|
|
|
|
self.original_count = len(custom_profile_fields_for_realm(self.realm.id))
|
|
|
|
|
|
|
|
def custom_field_exists_in_realm(self, field_id: int) -> bool:
|
|
|
|
fields = custom_profile_fields_for_realm(self.realm.id)
|
|
|
|
field_ids = [field.id for field in fields]
|
|
|
|
return (field_id in field_ids)
|
2018-04-30 19:51:16 +02:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
class CreateCustomProfileFieldTest(CustomProfileFieldTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_create(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2018-04-08 18:13:37 +02:00
|
|
|
realm = get_realm('zulip')
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {"name": "Phone", "field_type": "text id"}
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Argument "field_type" is not valid JSON.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
data["name"] = ""
|
|
|
|
data["field_type"] = 100
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Label cannot be blank.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-08-16 20:12:49 +02:00
|
|
|
data["name"] = "*" * 41
|
|
|
|
data["field_type"] = 100
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'name is too long (limit: 40 characters)')
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
data["name"] = "Phone"
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Invalid field type.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
data["name"] = "Phone"
|
2018-03-31 07:30:24 +02:00
|
|
|
data["hint"] = "*" * 81
|
|
|
|
data["field_type"] = CustomProfileField.SHORT_TEXT
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2018-05-03 23:21:16 +02:00
|
|
|
msg = "hint is too long (limit: 80 characters)"
|
2018-03-31 07:30:24 +02:00
|
|
|
self.assert_json_error(result, msg)
|
|
|
|
|
|
|
|
data["name"] = "Phone"
|
|
|
|
data["hint"] = "Contact number"
|
2017-03-17 10:07:22 +01:00
|
|
|
data["field_type"] = CustomProfileField.SHORT_TEXT
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2018-04-08 18:13:37 +02:00
|
|
|
field = CustomProfileField.objects.get(name="Phone", realm=realm)
|
|
|
|
self.assertEqual(field.id, field.order)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result,
|
2020-04-09 21:51:58 +02:00
|
|
|
'A field with that label already exists.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-08 09:50:05 +02:00
|
|
|
def test_create_choice_field(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Union[str, int]] = {}
|
2018-04-08 09:50:05 +02:00
|
|
|
data["name"] = "Favorite programming language"
|
|
|
|
data["field_type"] = CustomProfileField.CHOICE
|
|
|
|
|
|
|
|
data['field_data'] = 'invalid'
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
error_msg = "Bad value for 'field_data': invalid"
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'python': ['1'],
|
|
|
|
'java': ['2'],
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data is not a dict')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'python': {'text': 'Python'},
|
|
|
|
'java': {'text': 'Java'},
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, "order key is missing from field_data")
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'python': {'text': 'Python', 'order': ''},
|
|
|
|
'java': {'text': 'Java', 'order': '2'},
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data["order"] cannot be blank.')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'': {'text': 'Python', 'order': '1'},
|
|
|
|
'java': {'text': 'Java', 'order': '2'},
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, "'value' cannot be blank.")
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'python': {'text': 'Python', 'order': 1},
|
|
|
|
'java': {'text': 'Java', 'order': '2'},
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data["order"] is not a string')
|
|
|
|
|
2018-08-10 20:39:13 +02:00
|
|
|
data["field_data"] = ujson.dumps({})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'Field must have at least one choice.')
|
|
|
|
|
2018-04-08 09:50:05 +02:00
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'python': {'text': 'Python', 'order': '1'},
|
|
|
|
'java': {'text': 'Java', 'order': '2'},
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2019-08-24 13:52:25 +02:00
|
|
|
def test_create_default_external_account_field(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 13:52:25 +02:00
|
|
|
realm = get_realm("zulip")
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
field_type: int = CustomProfileField.EXTERNAL_ACCOUNT
|
|
|
|
field_data: str = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': 'twitter',
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
})
|
|
|
|
invalid_field_name: str = "Not required field name"
|
|
|
|
invalid_field_hint: str = "Not required field hint"
|
2019-08-24 13:52:25 +02:00
|
|
|
|
|
|
|
result = self.client_post("/json/realm/profile_fields",
|
|
|
|
info=dict(
|
|
|
|
field_type=field_type,
|
|
|
|
field_data=field_data,
|
|
|
|
hint=invalid_field_hint,
|
|
|
|
name=invalid_field_name,
|
|
|
|
))
|
|
|
|
self.assert_json_success(result)
|
2020-03-28 01:25:56 +01:00
|
|
|
# Silently overwrite name and hint with values set in default fields dict
|
2019-08-24 13:52:25 +02:00
|
|
|
# for default custom external account fields.
|
|
|
|
with self.assertRaises(CustomProfileField.DoesNotExist):
|
|
|
|
field = CustomProfileField.objects.get(name=invalid_field_name, realm=realm)
|
|
|
|
# The field is created with 'Twitter' name as per values in default fields dict
|
|
|
|
field = CustomProfileField.objects.get(name='Twitter')
|
|
|
|
self.assertEqual(field.name, DEFAULT_EXTERNAL_ACCOUNTS['twitter']['name'])
|
|
|
|
self.assertEqual(field.hint, DEFAULT_EXTERNAL_ACCOUNTS['twitter']['hint'])
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
result = self.client_delete(f"/json/realm/profile_fields/{field.id}")
|
2019-08-24 13:52:25 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Should also work without name or hint and only external field type and subtype data
|
|
|
|
result = self.client_post("/json/realm/profile_fields",
|
|
|
|
info=dict(field_type=field_type, field_data=field_data))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Default external account field data cannot be updated
|
|
|
|
field = CustomProfileField.objects.get(name="Twitter", realm=realm)
|
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2019-08-24 13:52:25 +02:00
|
|
|
info={'name': 'Twitter username',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'field_type': CustomProfileField.EXTERNAL_ACCOUNT},
|
2019-08-24 13:52:25 +02:00
|
|
|
)
|
|
|
|
self.assert_json_error(result, 'Default custom field cannot be updated.')
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
result = self.client_delete(f"/json/realm/profile_fields/{field.id}")
|
2019-08-24 13:52:25 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2019-05-27 10:59:55 +02:00
|
|
|
def test_create_external_account_field(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-05-27 10:59:55 +02:00
|
|
|
realm = get_realm('zulip')
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Union[str, int, Dict[str, str]]] = {}
|
2019-05-27 10:59:55 +02:00
|
|
|
data["name"] = "Twitter"
|
|
|
|
data["field_type"] = CustomProfileField.EXTERNAL_ACCOUNT
|
|
|
|
|
|
|
|
data['field_data'] = 'invalid'
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, "Bad value for 'field_data': invalid")
|
|
|
|
|
|
|
|
data['field_data'] = ujson.dumps({})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, "subtype key is missing from field_data")
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': '',
|
2019-05-27 10:59:55 +02:00
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data["subtype"] cannot be blank.')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': '123',
|
2019-05-27 10:59:55 +02:00
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'Invalid external account type')
|
|
|
|
|
|
|
|
non_default_external_account = 'linkedin'
|
|
|
|
data["field_data"] = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': non_default_external_account,
|
2019-05-27 10:59:55 +02:00
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'Invalid external account type')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': 'twitter',
|
2019-05-27 10:59:55 +02:00
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
twitter_field = CustomProfileField.objects.get(name="Twitter", realm=realm)
|
|
|
|
self.assertEqual(twitter_field.field_type, CustomProfileField.EXTERNAL_ACCOUNT)
|
|
|
|
self.assertEqual(twitter_field.name, "Twitter")
|
|
|
|
self.assertEqual(ujson.loads(twitter_field.field_data)['subtype'], 'twitter')
|
|
|
|
|
|
|
|
data['name'] = 'Reddit'
|
|
|
|
data["field_data"] = ujson.dumps({
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'subtype': 'custom',
|
2019-05-27 10:59:55 +02:00
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'Custom external account must define url pattern')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'subtype': 'custom',
|
|
|
|
'url_pattern': 123,
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data["url_pattern"] is not a string')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'subtype': 'custom',
|
|
|
|
'url_pattern': 'invalid',
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2019-08-03 02:30:15 +02:00
|
|
|
self.assert_json_error(result, 'Malformed URL pattern.')
|
2019-05-27 10:59:55 +02:00
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'subtype': 'custom',
|
|
|
|
'url_pattern': 'https://www.reddit.com/%(username)s/user/%(username)s',
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2019-08-03 02:30:15 +02:00
|
|
|
self.assert_json_error(result, 'Malformed URL pattern.')
|
2019-05-27 10:59:55 +02:00
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'subtype': 'custom',
|
|
|
|
'url_pattern': 'reddit.com/%(username)s',
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_error(result, 'field_data["url_pattern"] is not a URL')
|
|
|
|
|
|
|
|
data["field_data"] = ujson.dumps({
|
|
|
|
'subtype': 'custom',
|
|
|
|
'url_pattern': 'https://www.reddit.com/user/%(username)s',
|
|
|
|
})
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
custom_field = CustomProfileField.objects.get(name="Reddit", realm=realm)
|
|
|
|
self.assertEqual(custom_field.field_type, CustomProfileField.EXTERNAL_ACCOUNT)
|
|
|
|
self.assertEqual(custom_field.name, "Reddit")
|
|
|
|
field_data = ujson.loads(custom_field.field_data)
|
|
|
|
self.assertEqual(field_data['subtype'], 'custom')
|
|
|
|
self.assertEqual(field_data['url_pattern'], 'https://www.reddit.com/user/%(username)s')
|
|
|
|
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
2019-08-03 02:30:15 +02:00
|
|
|
self.assert_json_error(result, "A field with that label already exists.")
|
2019-05-27 10:59:55 +02:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
def test_create_field_of_type_user(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
data = {"name": "Your mentor",
|
|
|
|
"field_type": CustomProfileField.USER,
|
|
|
|
}
|
|
|
|
result = self.client_post("/json/realm/profile_fields", info=data)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_not_realm_admin(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_post("/json/realm/profile_fields")
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Must be an organization administrator')
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_delete("/json/realm/profile_fields/1")
|
2018-03-08 01:47:17 +01:00
|
|
|
self.assert_json_error(result, 'Must be an organization administrator')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
class DeleteCustomProfileFieldTest(CustomProfileFieldTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
realm = get_realm('zulip')
|
2018-03-19 20:17:52 +01:00
|
|
|
field = CustomProfileField.objects.get(name="Phone number", realm=realm)
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_delete("/json/realm/profile_fields/100")
|
|
|
|
self.assert_json_error(result, 'Field id 100 not found.')
|
|
|
|
|
2018-05-06 11:35:34 +02:00
|
|
|
self.assertTrue(self.custom_field_exists_in_realm(field.id))
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_delete(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}")
|
2017-03-17 10:07:22 +01:00
|
|
|
self.assert_json_success(result)
|
2018-05-06 11:35:34 +02:00
|
|
|
self.assertFalse(self.custom_field_exists_in_realm(field.id))
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
def test_delete_field_value(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(iago)
|
2019-08-24 15:10:49 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
invalid_field_id = 1234
|
|
|
|
result = self.client_delete("/json/users/me/profile_data", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'data': ujson.dumps([invalid_field_id]),
|
2019-08-24 15:10:49 +02:00
|
|
|
})
|
|
|
|
self.assert_json_error(result,
|
2020-06-13 08:59:37 +02:00
|
|
|
f'Field id {invalid_field_id} not found.')
|
2019-08-24 15:10:49 +02:00
|
|
|
|
|
|
|
field = CustomProfileField.objects.get(name="Mentor", realm=realm)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Dict[str, Union[int, str, List[int]]]] = [
|
|
|
|
{'id': field.id, 'value': [self.example_user("aaron").id]},
|
|
|
|
]
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(iago, data)
|
2019-08-24 15:10:49 +02:00
|
|
|
|
|
|
|
iago_value = CustomProfileFieldValue.objects.get(user_profile=iago, field=field)
|
|
|
|
converter = field.FIELD_CONVERTERS[field.field_type]
|
|
|
|
self.assertEqual([self.example_user("aaron").id], converter(iago_value.value))
|
|
|
|
|
|
|
|
result = self.client_delete("/json/users/me/profile_data", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'data': ujson.dumps([field.id]),
|
2019-08-24 15:10:49 +02:00
|
|
|
})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Don't throw an exception here
|
|
|
|
result = self.client_delete("/json/users/me/profile_data", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'data': ujson.dumps([field.id]),
|
2019-08-24 15:10:49 +02:00
|
|
|
})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_delete_internals(self) -> None:
|
|
|
|
user_profile = self.example_user('iago')
|
|
|
|
realm = user_profile.realm
|
|
|
|
field = CustomProfileField.objects.get(name="Phone number", realm=realm)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Dict[str, Union[int, str, List[int]]]] = [
|
|
|
|
{'id': field.id, 'value': '123456'},
|
|
|
|
]
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(user_profile, data)
|
2019-08-24 15:10:49 +02:00
|
|
|
|
|
|
|
self.assertTrue(self.custom_field_exists_in_realm(field.id))
|
|
|
|
self.assertEqual(user_profile.customprofilefieldvalue_set.count(), self.original_count)
|
|
|
|
|
|
|
|
do_remove_realm_custom_profile_field(realm, field)
|
|
|
|
|
|
|
|
self.assertFalse(self.custom_field_exists_in_realm(field.id))
|
|
|
|
self.assertEqual(user_profile.customprofilefieldvalue_set.count(), self.original_count - 1)
|
|
|
|
|
|
|
|
class UpdateCustomProfileFieldTest(CustomProfileFieldTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm/profile_fields/100",
|
|
|
|
info={'name': 'Phone Number',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'field_type': CustomProfileField.SHORT_TEXT},
|
2017-03-17 10:07:22 +01:00
|
|
|
)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Field id 100 not found.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-03-19 20:17:52 +01:00
|
|
|
field = CustomProfileField.objects.get(name="Phone number", realm=realm)
|
2019-08-24 13:13:48 +02:00
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2019-08-24 13:13:48 +02:00
|
|
|
info={'name': '',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'field_type': CustomProfileField.SHORT_TEXT},
|
2019-08-24 13:13:48 +02:00
|
|
|
)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_json_error(result, 'Label cannot be blank.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-30 19:51:16 +02:00
|
|
|
self.assertEqual(CustomProfileField.objects.count(), self.original_count)
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-03-19 20:17:52 +01:00
|
|
|
info={'name': 'New phone number',
|
2017-03-17 10:07:22 +01:00
|
|
|
'field_type': CustomProfileField.SHORT_TEXT})
|
|
|
|
self.assert_json_success(result)
|
2018-03-31 07:30:24 +02:00
|
|
|
field = CustomProfileField.objects.get(id=field.id, realm=realm)
|
2018-04-30 19:51:16 +02:00
|
|
|
self.assertEqual(CustomProfileField.objects.count(), self.original_count)
|
2018-03-31 07:30:24 +02:00
|
|
|
self.assertEqual(field.name, 'New phone number')
|
|
|
|
self.assertIs(field.hint, '')
|
|
|
|
self.assertEqual(field.field_type, CustomProfileField.SHORT_TEXT)
|
|
|
|
|
2018-08-16 20:12:49 +02:00
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-08-16 20:12:49 +02:00
|
|
|
info={'name': '*' * 41,
|
|
|
|
'field_type': CustomProfileField.SHORT_TEXT})
|
|
|
|
msg = "name is too long (limit: 40 characters)"
|
|
|
|
self.assert_json_error(result, msg)
|
|
|
|
|
2018-03-31 07:30:24 +02:00
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-03-31 07:30:24 +02:00
|
|
|
info={'name': 'New phone number',
|
|
|
|
'hint': '*' * 81,
|
|
|
|
'field_type': CustomProfileField.SHORT_TEXT})
|
2018-05-03 23:21:16 +02:00
|
|
|
msg = "hint is too long (limit: 80 characters)"
|
2018-03-31 07:30:24 +02:00
|
|
|
self.assert_json_error(result, msg)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-03-31 07:30:24 +02:00
|
|
|
info={'name': 'New phone number',
|
|
|
|
'hint': 'New contact number',
|
|
|
|
'field_type': CustomProfileField.SHORT_TEXT})
|
|
|
|
self.assert_json_success(result)
|
2018-03-19 20:17:52 +01:00
|
|
|
|
|
|
|
field = CustomProfileField.objects.get(id=field.id, realm=realm)
|
2018-04-30 19:51:16 +02:00
|
|
|
self.assertEqual(CustomProfileField.objects.count(), self.original_count)
|
2018-03-19 20:17:52 +01:00
|
|
|
self.assertEqual(field.name, 'New phone number')
|
2018-03-31 07:30:24 +02:00
|
|
|
self.assertEqual(field.hint, 'New contact number')
|
2017-03-17 10:07:22 +01:00
|
|
|
self.assertEqual(field.field_type, CustomProfileField.SHORT_TEXT)
|
|
|
|
|
2018-04-08 09:50:05 +02:00
|
|
|
field = CustomProfileField.objects.get(name="Favorite editor", realm=realm)
|
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-04-08 09:50:05 +02:00
|
|
|
info={'name': 'Favorite editor',
|
|
|
|
'field_data': 'invalid'})
|
|
|
|
self.assert_json_error(result, "Bad value for 'field_data': invalid")
|
|
|
|
|
|
|
|
field_data = ujson.dumps({
|
|
|
|
'vim': 'Vim',
|
|
|
|
'emacs': {'order': '2', 'text': 'Emacs'},
|
|
|
|
})
|
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-04-08 09:50:05 +02:00
|
|
|
info={'name': 'Favorite editor',
|
|
|
|
'field_data': field_data})
|
|
|
|
self.assert_json_error(result, "field_data is not a dict")
|
|
|
|
|
|
|
|
field_data = ujson.dumps({
|
|
|
|
'vim': {'order': '1', 'text': 'Vim'},
|
|
|
|
'emacs': {'order': '2', 'text': 'Emacs'},
|
|
|
|
'notepad': {'order': '3', 'text': 'Notepad'},
|
|
|
|
})
|
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field.id}",
|
2018-04-08 09:50:05 +02:00
|
|
|
info={'name': 'Favorite editor',
|
|
|
|
'field_data': field_data})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_is_aware_of_uniqueness(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
realm = get_realm('zulip')
|
2020-04-09 21:51:58 +02:00
|
|
|
field_1 = try_add_realm_custom_profile_field(realm, "Phone",
|
2018-05-06 11:35:34 +02:00
|
|
|
CustomProfileField.SHORT_TEXT)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
field_2 = try_add_realm_custom_profile_field(realm, "Phone 1",
|
2018-05-06 11:35:34 +02:00
|
|
|
CustomProfileField.SHORT_TEXT)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-05-06 11:35:34 +02:00
|
|
|
self.assertTrue(self.custom_field_exists_in_realm(field_1.id))
|
|
|
|
self.assertTrue(self.custom_field_exists_in_realm(field_2.id))
|
2017-03-17 10:07:22 +01:00
|
|
|
result = self.client_patch(
|
2020-06-09 00:25:09 +02:00
|
|
|
f"/json/realm/profile_fields/{field_2.id}",
|
2017-03-17 10:07:22 +01:00
|
|
|
info={'name': 'Phone', 'field_type': CustomProfileField.SHORT_TEXT})
|
|
|
|
self.assert_json_error(
|
2020-04-09 21:51:58 +02:00
|
|
|
result, 'A field with that label already exists.')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-29 11:55:16 +02:00
|
|
|
def assert_error_update_invalid_value(self, field_name: str, new_value: object, error_msg: str) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2018-04-29 11:55:16 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
field = CustomProfileField.objects.get(name=field_name, realm=realm)
|
|
|
|
|
|
|
|
# Update value of field
|
|
|
|
result = self.client_patch("/json/users/me/profile_data",
|
|
|
|
{'data': ujson.dumps([{"id": field.id, "value": new_value}])})
|
|
|
|
self.assert_json_error(result, error_msg)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-29 11:55:16 +02:00
|
|
|
def test_update_invalid_field(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
data = [{'id': 1234, 'value': '12'}]
|
|
|
|
result = self.client_patch("/json/users/me/profile_data", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'data': ujson.dumps(data),
|
2017-03-17 10:07:22 +01:00
|
|
|
})
|
|
|
|
self.assert_json_error(result,
|
2020-04-09 21:51:58 +02:00
|
|
|
"Field id 1234 not found.")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_invalid_short_text(self) -> None:
|
2018-04-29 11:55:16 +02:00
|
|
|
field_name = "Phone number"
|
|
|
|
self.assert_error_update_invalid_value(field_name, 't' * 201,
|
2020-06-09 00:25:09 +02:00
|
|
|
f"{field_name} is too long (limit: 50 characters)")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-03 18:06:13 +02:00
|
|
|
def test_update_invalid_date(self) -> None:
|
2018-04-29 11:55:16 +02:00
|
|
|
field_name = "Birthday"
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_error_update_invalid_value(field_name, "a-b-c",
|
2020-06-09 00:25:09 +02:00
|
|
|
f"{field_name} is not a date")
|
2018-04-29 11:55:16 +02:00
|
|
|
self.assert_error_update_invalid_value(field_name, 123,
|
2020-06-09 00:25:09 +02:00
|
|
|
f"{field_name} is not a string")
|
2018-04-03 18:06:13 +02:00
|
|
|
|
2018-04-25 19:20:58 +02:00
|
|
|
def test_update_invalid_url(self) -> None:
|
2019-06-28 09:25:26 +02:00
|
|
|
field_name = "Favorite website"
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assert_error_update_invalid_value(field_name, "not URL",
|
2020-06-09 00:25:09 +02:00
|
|
|
f"{field_name} is not a URL")
|
2018-04-25 19:20:58 +02:00
|
|
|
|
2018-05-06 09:43:38 +02:00
|
|
|
def test_update_invalid_user_field(self) -> None:
|
|
|
|
field_name = "Mentor"
|
|
|
|
invalid_user_id = 1000
|
2018-06-07 20:01:31 +02:00
|
|
|
self.assert_error_update_invalid_value(field_name, [invalid_user_id],
|
2020-06-14 02:57:50 +02:00
|
|
|
f"Invalid user ID: {invalid_user_id}")
|
2018-05-06 09:43:38 +02:00
|
|
|
|
2018-04-29 11:55:16 +02:00
|
|
|
def test_update_profile_data_successfully(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
fields = [
|
2018-11-06 10:05:31 +01:00
|
|
|
('Phone number', '*short* text data'),
|
|
|
|
('Biography', '~~short~~ **long** text data'),
|
|
|
|
('Favorite food', 'long short text data'),
|
2018-04-08 09:50:05 +02:00
|
|
|
('Favorite editor', 'vim'),
|
2018-04-03 18:06:13 +02:00
|
|
|
('Birthday', '1909-3-5'),
|
2020-06-08 23:04:39 +02:00
|
|
|
('Favorite website', 'https://zulip.com'),
|
2018-06-07 20:01:31 +02:00
|
|
|
('Mentor', [self.example_user("cordelia").id]),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
('GitHub', 'zulip-mobile'),
|
2017-03-17 10:07:22 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
data = []
|
|
|
|
for i, field_value in enumerate(fields):
|
2018-03-19 20:17:52 +01:00
|
|
|
name, value = field_value
|
|
|
|
field = CustomProfileField.objects.get(name=name, realm=realm)
|
2017-03-17 10:07:22 +01:00
|
|
|
data.append({
|
|
|
|
'id': field.id,
|
|
|
|
'value': value,
|
2018-11-06 10:05:31 +01:00
|
|
|
'field': field,
|
2017-03-17 10:07:22 +01:00
|
|
|
})
|
|
|
|
|
2018-03-19 20:17:52 +01:00
|
|
|
# Update value of field
|
2020-06-21 03:22:21 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/users/me/profile_data",
|
|
|
|
{"data": ujson.dumps([{"id": f["id"], "value": f["value"]} for f in data])},
|
|
|
|
)
|
2017-03-17 10:07:22 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-05-23 20:57:59 +02:00
|
|
|
iago = self.example_user('iago')
|
2017-03-17 10:07:22 +01:00
|
|
|
expected_value = {f['id']: f['value'] for f in data}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
expected_rendered_value: Dict[Union[int, float, str, None], Union[str, None]] = {}
|
2018-11-06 10:05:31 +01:00
|
|
|
for f in data:
|
|
|
|
if f['field'].is_renderable():
|
2020-06-28 12:26:18 +02:00
|
|
|
expected_rendered_value[f['id']] = markdown_convert(f['value'])
|
2018-11-06 10:05:31 +01:00
|
|
|
else:
|
|
|
|
expected_rendered_value[f['id']] = None
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-05-10 23:37:20 +02:00
|
|
|
for field_dict in iago.profile_data:
|
|
|
|
self.assertEqual(field_dict['value'], expected_value[field_dict['id']])
|
2018-11-06 10:05:31 +01:00
|
|
|
self.assertEqual(field_dict['rendered_value'], expected_rendered_value[field_dict['id']])
|
2018-04-08 09:50:05 +02:00
|
|
|
for k in ['id', 'type', 'name', 'field_data']:
|
2017-05-10 23:37:20 +02:00
|
|
|
self.assertIn(k, field_dict)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-03-19 20:17:52 +01:00
|
|
|
# Update value of one field.
|
|
|
|
field = CustomProfileField.objects.get(name='Biography', realm=realm)
|
2017-03-17 10:07:22 +01:00
|
|
|
data = [{
|
|
|
|
'id': field.id,
|
|
|
|
'value': 'foobar',
|
|
|
|
}]
|
|
|
|
|
|
|
|
result = self.client_patch("/json/users/me/profile_data",
|
|
|
|
{'data': ujson.dumps(data)})
|
|
|
|
self.assert_json_success(result)
|
2019-08-04 02:00:19 +02:00
|
|
|
for field_dict in iago.profile_data:
|
|
|
|
if field_dict['id'] == field.id:
|
|
|
|
self.assertEqual(field_dict['value'], 'foobar')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-29 11:55:16 +02:00
|
|
|
def test_update_invalid_choice_field(self) -> None:
|
|
|
|
field_name = "Favorite editor"
|
|
|
|
self.assert_error_update_invalid_value(field_name, "foobar",
|
2020-06-09 00:25:09 +02:00
|
|
|
f"'foobar' is not a valid choice for '{field_name}'.")
|
2018-04-29 11:55:16 +02:00
|
|
|
|
|
|
|
def test_update_choice_field_successfully(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2018-04-08 09:50:05 +02:00
|
|
|
realm = get_realm('zulip')
|
2018-04-29 11:55:16 +02:00
|
|
|
field = CustomProfileField.objects.get(name='Favorite editor', realm=realm)
|
2018-04-08 09:50:05 +02:00
|
|
|
data = [{
|
|
|
|
'id': field.id,
|
|
|
|
'value': 'emacs',
|
|
|
|
}]
|
|
|
|
|
|
|
|
result = self.client_patch("/json/users/me/profile_data",
|
|
|
|
{'data': ujson.dumps(data)})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2018-11-06 10:05:31 +01:00
|
|
|
def test_null_value_and_rendered_value(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2018-11-06 10:05:31 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
quote = try_add_realm_custom_profile_field(
|
|
|
|
realm=realm,
|
|
|
|
name="Quote",
|
|
|
|
hint="Saying or phrase which you known for.",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
field_type=CustomProfileField.SHORT_TEXT,
|
2018-11-06 10:05:31 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
iago_profile_quote = iago.profile_data[-1]
|
|
|
|
value = iago_profile_quote["value"]
|
|
|
|
rendered_value = iago_profile_quote["rendered_value"]
|
|
|
|
self.assertIsNone(value)
|
|
|
|
self.assertIsNone(rendered_value)
|
|
|
|
|
2020-07-02 03:13:26 +02:00
|
|
|
update_dict: Dict[str, Union[int, str, List[int]]] = {
|
2018-11-06 10:05:31 +01:00
|
|
|
"id": quote.id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"value": "***beware*** of jealousy...",
|
2018-11-06 10:05:31 +01:00
|
|
|
}
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(iago, [update_dict])
|
2018-11-06 10:05:31 +01:00
|
|
|
|
|
|
|
iago_profile_quote = self.example_user("iago").profile_data[-1]
|
|
|
|
value = iago_profile_quote["value"]
|
|
|
|
rendered_value = iago_profile_quote["rendered_value"]
|
|
|
|
self.assertIsNotNone(value)
|
|
|
|
self.assertIsNotNone(rendered_value)
|
|
|
|
self.assertEqual("<p><strong><em>beware</em></strong> of jealousy...</p>", rendered_value)
|
2019-08-24 15:10:49 +02:00
|
|
|
|
2019-10-01 04:13:32 +02:00
|
|
|
def test_do_update_value_not_changed(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(iago)
|
2019-10-01 04:13:32 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
# Set field value:
|
|
|
|
field = CustomProfileField.objects.get(name="Mentor", realm=realm)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Dict[str, Union[int, str, List[int]]]] = [
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
{'id': field.id, 'value': [self.example_user("aaron").id]},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(iago, data)
|
2019-10-01 04:13:32 +02:00
|
|
|
|
|
|
|
with mock.patch("zerver.lib.actions.notify_user_update_custom_profile_data") as mock_notify:
|
|
|
|
# Attempting to "update" the field value, when it wouldn't actually change,
|
|
|
|
# if always_notify is disabled, shouldn't trigger notify.
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(iago, data)
|
2019-10-01 04:13:32 +02:00
|
|
|
mock_notify.assert_not_called()
|
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
class ListCustomProfileFieldTest(CustomProfileFieldTestCase):
|
|
|
|
def test_list(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
result = self.client_get("/json/realm/profile_fields")
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(200, result.status_code)
|
|
|
|
content = result.json()
|
|
|
|
self.assertEqual(len(content["custom_fields"]), self.original_count)
|
|
|
|
|
|
|
|
def test_list_order(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
order = (
|
|
|
|
CustomProfileField.objects.filter(realm=realm)
|
|
|
|
.order_by('-order')
|
|
|
|
.values_list('order', flat=True)
|
|
|
|
)
|
|
|
|
try_reorder_realm_custom_profile_fields(realm, order)
|
|
|
|
result = self.client_get("/json/realm/profile_fields")
|
|
|
|
content = result.json()
|
|
|
|
self.assertListEqual(content["custom_fields"],
|
|
|
|
sorted(content["custom_fields"], key=lambda x: -x["id"]))
|
|
|
|
|
2019-10-20 19:15:44 +02:00
|
|
|
def test_get_custom_profile_fields_from_api(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
test_bot = self.create_test_bot("foo-bot", iago)
|
2020-03-12 13:51:54 +01:00
|
|
|
self.login_user(iago)
|
2019-10-20 19:15:44 +02:00
|
|
|
assert(test_bot)
|
|
|
|
|
|
|
|
url = "/json/users?client_gravatar=false&include_custom_profile_fields=true"
|
2020-02-09 00:26:43 +01:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(url)
|
|
|
|
|
|
|
|
self.assertEqual(len(queries), 4)
|
|
|
|
|
2019-10-20 19:15:44 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
raw_users_data = response.json()["members"]
|
|
|
|
|
|
|
|
iago_raw_data = None
|
|
|
|
test_bot_raw_data = None
|
|
|
|
|
|
|
|
for user_dict in raw_users_data:
|
|
|
|
if user_dict["user_id"] == iago.id:
|
|
|
|
iago_raw_data = user_dict
|
|
|
|
continue
|
|
|
|
if user_dict["user_id"] == test_bot.id:
|
|
|
|
test_bot_raw_data = user_dict
|
|
|
|
continue
|
|
|
|
|
|
|
|
if (not iago_raw_data) or (not test_bot_raw_data):
|
|
|
|
raise AssertionError("Could not find required data from the response.")
|
|
|
|
|
|
|
|
expected_keys_for_iago = {
|
2020-03-12 13:51:54 +01:00
|
|
|
"delivery_email",
|
2020-06-01 21:47:18 +02:00
|
|
|
"email", "user_id", "avatar_url", "avatar_version", "is_admin", "is_guest", "is_bot", "is_owner",
|
2019-10-26 18:32:21 +02:00
|
|
|
"full_name", "timezone", "is_active", "date_joined", "profile_data"}
|
2019-10-20 19:15:44 +02:00
|
|
|
self.assertEqual(set(iago_raw_data.keys()), expected_keys_for_iago)
|
|
|
|
self.assertNotEqual(iago_raw_data["profile_data"], {})
|
|
|
|
|
|
|
|
expected_keys_for_test_bot = {
|
2020-03-12 13:51:54 +01:00
|
|
|
"delivery_email",
|
2020-06-01 21:47:18 +02:00
|
|
|
"email", "user_id", "avatar_url", "avatar_version", "is_admin", "is_guest", "is_bot", "is_owner",
|
|
|
|
"full_name", "timezone", "is_active", "date_joined", "bot_type", "bot_owner_id"}
|
2019-10-20 19:15:44 +02:00
|
|
|
self.assertEqual(set(test_bot_raw_data.keys()), expected_keys_for_test_bot)
|
|
|
|
self.assertEqual(test_bot_raw_data["bot_type"], 1)
|
|
|
|
self.assertEqual(test_bot_raw_data["bot_owner_id"], iago_raw_data["user_id"])
|
|
|
|
|
|
|
|
url = "/json/users?client_gravatar=false"
|
|
|
|
response = self.client_get(url)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
raw_users_data = response.json()["members"]
|
|
|
|
for user_dict in raw_users_data:
|
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
user_dict["profile_data"]
|
|
|
|
|
2019-10-25 20:28:53 +02:00
|
|
|
def test_get_custom_profile_fields_from_api_for_single_user(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-10-25 20:28:53 +02:00
|
|
|
expected_keys = {
|
2020-06-18 09:18:29 +02:00
|
|
|
"result", "msg", "max_message_id", "user_id", "avatar_url",
|
2020-06-09 00:07:13 +02:00
|
|
|
"full_name", "email", "is_bot", "is_admin", "is_owner", "profile_data",
|
|
|
|
"avatar_version", "timezone", "delivery_email", "is_active", "is_guest",
|
|
|
|
"date_joined"}
|
2019-10-25 20:28:53 +02:00
|
|
|
|
|
|
|
url = "/json/users/me"
|
|
|
|
response = self.client_get(url)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
raw_user_data = response.json()
|
|
|
|
self.assertEqual(set(raw_user_data.keys()), expected_keys)
|
|
|
|
|
2019-10-20 19:15:44 +02:00
|
|
|
|
2019-08-24 15:10:49 +02:00
|
|
|
class ReorderCustomProfileFieldTest(CustomProfileFieldTestCase):
|
|
|
|
def test_reorder(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
order = (
|
|
|
|
CustomProfileField.objects.filter(realm=realm)
|
|
|
|
.order_by('-order')
|
|
|
|
.values_list('order', flat=True)
|
|
|
|
)
|
|
|
|
result = self.client_patch("/json/realm/profile_fields",
|
|
|
|
info={'order': ujson.dumps(order)})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
fields = CustomProfileField.objects.filter(realm=realm).order_by('order')
|
|
|
|
for field in fields:
|
|
|
|
self.assertEqual(field.id, order[field.order])
|
|
|
|
|
|
|
|
def test_reorder_duplicates(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
order = (
|
|
|
|
CustomProfileField.objects.filter(realm=realm)
|
|
|
|
.order_by('-order')
|
|
|
|
.values_list('order', flat=True)
|
|
|
|
)
|
|
|
|
order = list(order)
|
|
|
|
order.append(4)
|
|
|
|
result = self.client_patch("/json/realm/profile_fields",
|
|
|
|
info={'order': ujson.dumps(order)})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
fields = CustomProfileField.objects.filter(realm=realm).order_by('order')
|
|
|
|
for field in fields:
|
|
|
|
self.assertEqual(field.id, order[field.order])
|
|
|
|
|
|
|
|
def test_reorder_unauthorized(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2019-08-24 15:10:49 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
order = (
|
|
|
|
CustomProfileField.objects.filter(realm=realm)
|
|
|
|
.order_by('-order')
|
|
|
|
.values_list('order', flat=True)
|
|
|
|
)
|
|
|
|
result = self.client_patch("/json/realm/profile_fields",
|
|
|
|
info={'order': ujson.dumps(order)})
|
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
|
|
|
|
|
|
|
def test_reorder_invalid(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-08-24 15:10:49 +02:00
|
|
|
order = [100, 200, 300]
|
|
|
|
result = self.client_patch("/json/realm/profile_fields",
|
|
|
|
info={'order': ujson.dumps(order)})
|
|
|
|
self.assert_json_error(
|
2020-04-09 21:51:58 +02:00
|
|
|
result, 'Invalid order mapping.')
|
2019-08-24 15:10:49 +02:00
|
|
|
order = [1, 2]
|
|
|
|
result = self.client_patch("/json/realm/profile_fields",
|
|
|
|
info={'order': ujson.dumps(order)})
|
|
|
|
self.assert_json_error(
|
2020-04-09 21:51:58 +02:00
|
|
|
result, 'Invalid order mapping.')
|