2020-06-13 05:24:42 +02:00
|
|
|
from typing import List, Sequence
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
from django.utils.translation import ugettext as _
|
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
from zerver.decorator import require_member_or_admin, require_user_group_edit_permission
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.actions import (
|
|
|
|
bulk_add_members_to_user_group,
|
|
|
|
check_add_user_group,
|
|
|
|
check_delete_user_group,
|
|
|
|
do_update_user_group_description,
|
|
|
|
do_update_user_group_name,
|
|
|
|
remove_members_from_user_group,
|
|
|
|
)
|
2017-11-02 07:53:08 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
|
|
|
from zerver.lib.response import json_error, json_success
|
|
|
|
from zerver.lib.user_groups import (
|
|
|
|
access_user_group_by_id,
|
|
|
|
get_memberships_of_users,
|
|
|
|
get_user_group_members,
|
|
|
|
user_groups_in_realm_serialized,
|
|
|
|
)
|
2017-11-01 10:04:16 +01:00
|
|
|
from zerver.lib.users import user_ids_to_users
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import check_int, check_list
|
2019-02-02 23:53:22 +01:00
|
|
|
from zerver.models import UserProfile
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.views.streams import FuncKwargPair, compose_views
|
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
@require_user_group_edit_permission
|
2017-11-01 10:04:16 +01:00
|
|
|
@has_request_variables
|
2017-12-29 14:34:49 +01:00
|
|
|
def add_user_group(request: HttpRequest, user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
name: str=REQ(),
|
2020-06-13 05:24:42 +02:00
|
|
|
members: Sequence[int]=REQ(validator=check_list(check_int), default=[]),
|
2018-04-24 03:47:28 +02:00
|
|
|
description: str=REQ()) -> HttpResponse:
|
2017-11-01 10:04:16 +01:00
|
|
|
user_profiles = user_ids_to_users(members, user_profile.realm)
|
|
|
|
check_add_user_group(user_profile.realm, name, user_profiles, description)
|
|
|
|
return json_success()
|
2017-11-02 07:53:08 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
@require_member_or_admin
|
2018-08-16 02:44:51 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_user_group(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
|
|
|
|
user_groups = user_groups_in_realm_serialized(user_profile.realm)
|
|
|
|
return json_success({"user_groups": user_groups})
|
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
@require_user_group_edit_permission
|
2017-11-02 07:53:08 +01:00
|
|
|
@has_request_variables
|
2017-12-29 14:34:49 +01:00
|
|
|
def edit_user_group(request: HttpRequest, user_profile: UserProfile,
|
2020-07-17 18:29:57 +02:00
|
|
|
user_group_id: int=REQ(validator=check_int, path_only=True),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
name: str=REQ(default=""), description: str=REQ(default=""),
|
2017-12-29 14:34:49 +01:00
|
|
|
) -> HttpResponse:
|
2017-11-02 07:53:08 +01:00
|
|
|
if not (name or description):
|
|
|
|
return json_error(_("No new data supplied"))
|
|
|
|
|
2018-02-19 13:38:18 +01:00
|
|
|
user_group = access_user_group_by_id(user_group_id, user_profile)
|
2017-11-02 07:53:08 +01:00
|
|
|
|
|
|
|
if name != user_group.name:
|
|
|
|
do_update_user_group_name(user_group, name)
|
|
|
|
|
|
|
|
if description != user_group.description:
|
|
|
|
do_update_user_group_description(user_group, description)
|
|
|
|
|
2019-07-06 21:04:02 +02:00
|
|
|
return json_success()
|
2017-11-02 08:15:14 +01:00
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
@require_user_group_edit_permission
|
2017-11-02 08:15:14 +01:00
|
|
|
@has_request_variables
|
2017-11-27 09:28:57 +01:00
|
|
|
def delete_user_group(request: HttpRequest, user_profile: UserProfile,
|
2020-07-17 18:29:57 +02:00
|
|
|
user_group_id: int=REQ(validator=check_int, path_only=True)) -> HttpResponse:
|
2018-02-19 13:38:18 +01:00
|
|
|
|
|
|
|
check_delete_user_group(user_group_id, user_profile)
|
2017-11-02 08:15:14 +01:00
|
|
|
return json_success()
|
2017-11-02 08:53:30 +01:00
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
@require_user_group_edit_permission
|
2017-11-02 08:53:30 +01:00
|
|
|
@has_request_variables
|
2017-12-29 14:34:49 +01:00
|
|
|
def update_user_group_backend(request: HttpRequest, user_profile: UserProfile,
|
2020-07-17 18:29:57 +02:00
|
|
|
user_group_id: int=REQ(validator=check_int, path_only=True),
|
2020-06-13 05:24:42 +02:00
|
|
|
delete: Sequence[int]=REQ(validator=check_list(check_int), default=[]),
|
|
|
|
add: Sequence[int]=REQ(validator=check_list(check_int), default=[]),
|
2017-12-29 14:34:49 +01:00
|
|
|
) -> HttpResponse:
|
2017-11-02 08:53:30 +01:00
|
|
|
if not add and not delete:
|
|
|
|
return json_error(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
method_kwarg_pairs: List[FuncKwargPair] = [
|
2017-11-02 08:53:30 +01:00
|
|
|
(add_members_to_group_backend,
|
|
|
|
dict(user_group_id=user_group_id, members=add)),
|
|
|
|
(remove_members_from_group_backend,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
dict(user_group_id=user_group_id, members=delete)),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2017-11-02 08:53:30 +01:00
|
|
|
return compose_views(request, user_profile, method_kwarg_pairs)
|
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def add_members_to_group_backend(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
user_group_id: int, members: List[int]) -> HttpResponse:
|
2017-11-02 08:53:30 +01:00
|
|
|
if not members:
|
|
|
|
return json_success()
|
|
|
|
|
2018-02-19 13:38:18 +01:00
|
|
|
user_group = access_user_group_by_id(user_group_id, user_profile)
|
2017-11-02 08:53:30 +01:00
|
|
|
user_profiles = user_ids_to_users(members, user_profile.realm)
|
|
|
|
existing_member_ids = set(get_memberships_of_users(user_group, user_profiles))
|
2018-02-19 13:38:18 +01:00
|
|
|
|
2017-11-02 08:53:30 +01:00
|
|
|
for user_profile in user_profiles:
|
|
|
|
if user_profile.id in existing_member_ids:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("User {user_id} is already a member of this group").format(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
))
|
2017-11-02 08:53:30 +01:00
|
|
|
|
2017-11-14 08:40:53 +01:00
|
|
|
bulk_add_members_to_user_group(user_group, user_profiles)
|
2017-11-02 08:53:30 +01:00
|
|
|
return json_success()
|
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def remove_members_from_group_backend(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
user_group_id: int, members: List[int]) -> HttpResponse:
|
2017-11-02 08:53:30 +01:00
|
|
|
if not members:
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
user_profiles = user_ids_to_users(members, user_profile.realm)
|
2018-02-19 13:38:18 +01:00
|
|
|
user_group = access_user_group_by_id(user_group_id, user_profile)
|
|
|
|
group_member_ids = get_user_group_members(user_group)
|
|
|
|
for member in members:
|
|
|
|
if (member not in group_member_ids):
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("There is no member '{}' in this user group").format(member))
|
2018-02-19 13:38:18 +01:00
|
|
|
|
2017-11-02 08:53:30 +01:00
|
|
|
remove_members_from_user_group(user_group, user_profiles)
|
|
|
|
return json_success()
|