2020-06-11 00:54:34 +02:00
import random
from datetime import timedelta
2020-06-13 03:34:01 +02:00
from typing import Any , Dict , List , Mapping , Optional , Sequence , Set , Union
2020-06-11 00:54:34 +02:00
from unittest import mock
2020-07-30 00:43:58 +02:00
from urllib . parse import urlencode
2016-06-03 07:57:07 +02:00
2020-08-07 01:09:47 +02:00
import orjson
2017-04-27 00:03:21 +02:00
from django . conf import settings
2018-05-16 03:36:18 +02:00
from django . core . exceptions import ValidationError
2016-09-12 17:21:49 +02:00
from django . http import HttpRequest , HttpResponse
2017-05-29 19:54:06 +02:00
from django . utils . timezone import now as timezone_now
2016-09-12 17:21:49 +02:00
2020-06-11 00:54:34 +02:00
from zerver . decorator import JsonableError
2014-01-29 22:03:40 +01:00
from zerver . lib import cache
from zerver . lib . actions import (
2020-06-11 00:54:34 +02:00
bulk_add_subscriptions ,
bulk_get_subscriber_user_ids ,
bulk_remove_subscriptions ,
can_access_stream_user_ids ,
2020-03-24 14:47:41 +01:00
create_stream_if_needed ,
2020-06-11 00:54:34 +02:00
do_add_default_stream ,
do_add_streams_to_default_stream_group ,
do_change_default_stream_group_description ,
do_change_default_stream_group_name ,
2020-06-14 18:57:02 +02:00
do_change_plan_type ,
2020-06-11 00:54:34 +02:00
do_change_stream_post_policy ,
2020-07-13 16:13:28 +02:00
do_change_subscription_property ,
2020-06-11 00:54:34 +02:00
do_change_user_role ,
do_create_default_stream_group ,
do_create_realm ,
2017-02-18 18:01:00 +01:00
do_deactivate_stream ,
2018-05-21 03:54:42 +02:00
do_deactivate_user ,
2020-06-11 00:54:34 +02:00
do_get_streams ,
do_remove_default_stream ,
2017-11-01 18:20:34 +01:00
do_remove_default_stream_group ,
2020-06-11 00:54:34 +02:00
do_remove_streams_from_default_stream_group ,
do_set_realm_property ,
ensure_stream ,
gather_subscriptions ,
gather_subscriptions_helper ,
get_average_weekly_stream_traffic ,
get_default_streams_for_realm ,
get_stream ,
2017-11-14 20:33:09 +01:00
lookup_default_stream_groups ,
2020-06-11 00:54:34 +02:00
round_to_2_significant_digits ,
2018-06-20 23:03:03 +02:00
validate_user_access_to_subscribers_helper ,
2014-01-29 22:03:40 +01:00
)
2020-06-11 00:54:34 +02:00
from zerver . lib . message import aggregate_unread_data , get_raw_unread_data
from zerver . lib . response import json_error , json_success
from zerver . lib . stream_recipient import StreamRecipientMap
from zerver . lib . stream_subscription import (
get_active_subscriptions_for_stream_id ,
num_subscribers_for_stream_id ,
2016-09-12 17:21:49 +02:00
)
2020-06-11 00:54:34 +02:00
from zerver . lib . streams import (
access_stream_by_id ,
access_stream_by_name ,
2020-07-24 04:56:12 +02:00
can_access_stream_history ,
2020-06-11 00:54:34 +02:00
create_streams_if_needed ,
filter_stream_authorization ,
list_to_streams ,
2017-11-13 21:24:51 +01:00
)
2020-06-11 00:54:34 +02:00
from zerver . lib . test_classes import ZulipTestCase
from zerver . lib . test_helpers import (
get_subscription ,
queries_captured ,
reset_emails_in_zulip_realm ,
tornado_redirected_to_list ,
)
from zerver . models import (
DefaultStream ,
DefaultStreamGroup ,
Message ,
Realm ,
Recipient ,
Stream ,
Subscription ,
UserMessage ,
UserProfile ,
active_non_guest_user_ids ,
flush_per_request_caches ,
get_client ,
get_default_stream_groups ,
get_realm ,
get_user ,
get_user_profile_by_id_in_realm ,
)
from zerver . views . streams import compose_views
2017-11-13 21:24:51 +01:00
2014-01-29 22:03:40 +01:00
2018-08-15 21:03:05 +02:00
class TestMiscStuff ( ZulipTestCase ) :
def test_empty_results ( self ) - > None :
# These are essentially just tests to ensure line
# coverage for codepaths that won't ever really be
# called in practice.
user_profile = self . example_user ( ' cordelia ' )
result = bulk_get_subscriber_user_ids (
stream_dicts = [ ] ,
user_profile = user_profile ,
sub_dict = { } ,
stream_recipient = StreamRecipientMap ( ) ,
)
self . assertEqual ( result , { } )
streams = do_get_streams (
user_profile = user_profile ,
include_public = False ,
include_subscribed = False ,
include_all_active = False ,
include_default = False ,
)
self . assertEqual ( streams , [ ] )
2016-09-15 16:22:09 +02:00
class TestCreateStreams ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_creating_streams ( self ) - > None :
2020-04-09 21:51:58 +02:00
stream_names = [ ' new1 ' , ' new2 ' , ' new3 ' ]
stream_descriptions = [ ' des1 ' , ' des2 ' , ' des3 ' ]
2017-01-04 05:30:48 +01:00
realm = get_realm ( ' zulip ' )
2016-09-15 16:22:09 +02:00
2018-03-16 10:57:17 +01:00
# Test stream creation events.
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2018-03-16 10:57:17 +01:00
with tornado_redirected_to_list ( events ) :
2018-03-21 22:05:21 +01:00
ensure_stream ( realm , " Public stream " , invite_only = False )
2018-03-16 10:57:17 +01:00
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' type ' ] , ' stream ' )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' op ' ] , ' create ' )
# Send public stream creation event to all active users.
2018-06-03 19:11:52 +02:00
self . assertEqual ( events [ 0 ] [ ' users ' ] , active_non_guest_user_ids ( realm . id ) )
2018-03-16 10:57:17 +01:00
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' streams ' ] [ 0 ] [ ' name ' ] , " Public stream " )
events = [ ]
with tornado_redirected_to_list ( events ) :
2018-03-21 22:05:21 +01:00
ensure_stream ( realm , " Private stream " , invite_only = True )
2018-03-16 10:57:17 +01:00
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' type ' ] , ' stream ' )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' op ' ] , ' create ' )
# Send private stream creation event to only realm admins.
2020-05-17 18:46:14 +02:00
self . assertEqual ( len ( events [ 0 ] [ ' users ' ] ) , 2 )
self . assertTrue ( self . example_user ( " iago " ) . id in events [ 0 ] [ ' users ' ] )
self . assertTrue ( self . example_user ( " desdemona " ) . id in events [ 0 ] [ ' users ' ] )
2018-03-16 10:57:17 +01:00
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' streams ' ] [ 0 ] [ ' name ' ] , " Private stream " )
2016-09-15 16:22:09 +02:00
new_streams , existing_streams = create_streams_if_needed (
realm ,
2016-11-27 15:31:53 +01:00
[ { " name " : stream_name ,
" description " : stream_description ,
2018-05-30 15:32:51 +02:00
" invite_only " : True ,
2020-06-14 18:57:02 +02:00
" stream_post_policy " : Stream . STREAM_POST_POLICY_ADMINS ,
" message_retention_days " : - 1 }
2016-11-27 15:31:53 +01:00
for ( stream_name , stream_description ) in zip ( stream_names , stream_descriptions ) ] )
2016-11-21 00:16:52 +01:00
2016-09-15 16:22:09 +02:00
self . assertEqual ( len ( new_streams ) , 3 )
self . assertEqual ( len ( existing_streams ) , 0 )
actual_stream_names = { stream . name for stream in new_streams }
self . assertEqual ( actual_stream_names , set ( stream_names ) )
2016-11-27 15:31:53 +01:00
actual_stream_descriptions = { stream . description for stream in new_streams }
self . assertEqual ( actual_stream_descriptions , set ( stream_descriptions ) )
for stream in new_streams :
self . assertTrue ( stream . invite_only )
2020-02-04 21:50:55 +01:00
self . assertTrue ( stream . stream_post_policy == Stream . STREAM_POST_POLICY_ADMINS )
2020-06-14 18:57:02 +02:00
self . assertTrue ( stream . message_retention_days == - 1 )
2016-09-15 16:22:09 +02:00
new_streams , existing_streams = create_streams_if_needed (
realm ,
2016-11-27 15:31:53 +01:00
[ { " name " : stream_name ,
" description " : stream_description ,
" invite_only " : True }
for ( stream_name , stream_description ) in zip ( stream_names , stream_descriptions ) ] )
2016-11-21 00:16:52 +01:00
2016-09-15 16:22:09 +02:00
self . assertEqual ( len ( new_streams ) , 0 )
self . assertEqual ( len ( existing_streams ) , 3 )
actual_stream_names = { stream . name for stream in existing_streams }
self . assertEqual ( actual_stream_names , set ( stream_names ) )
2016-11-27 15:31:53 +01:00
actual_stream_descriptions = { stream . description for stream in existing_streams }
self . assertEqual ( actual_stream_descriptions , set ( stream_descriptions ) )
for stream in existing_streams :
self . assertTrue ( stream . invite_only )
2014-01-29 22:03:40 +01:00
2019-02-20 21:09:21 +01:00
def test_create_api_multiline_description ( self ) - > None :
user = self . example_user ( " hamlet " )
realm = user . realm
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-08-07 01:09:47 +02:00
post_data = { ' subscriptions ' : orjson . dumps ( [ { " name " : ' new_stream ' ,
" description " : " multi \n line \n description " } ] ) . decode ( ) ,
' invite_only ' : orjson . dumps ( False ) . decode ( ) }
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , " /api/v1/users/me/subscriptions " , post_data ,
2019-02-20 21:09:21 +01:00
subdomain = " zulip " )
self . assert_json_success ( result )
stream = get_stream ( " new_stream " , realm )
self . assertEqual ( stream . description , ' multi line description ' )
2018-04-27 01:00:26 +02:00
def test_history_public_to_subscribers_on_stream_creation ( self ) - > None :
realm = get_realm ( ' zulip ' )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
stream_dicts : List [ Mapping [ str , Any ] ] = [
2018-04-27 01:00:26 +02:00
{
" name " : " publicstream " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" description " : " Public stream with public history " ,
2018-04-27 01:00:26 +02:00
} ,
2020-07-24 04:56:12 +02:00
{
" name " : " webpublicstream " ,
" description " : " Web public stream " ,
" is_web_public " : True
} ,
2018-04-27 01:00:26 +02:00
{
" name " : " privatestream " ,
" description " : " Private stream with non-public history " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" invite_only " : True ,
2018-04-27 01:00:26 +02:00
} ,
{
" name " : " privatewithhistory " ,
" description " : " Private stream with public history " ,
" invite_only " : True ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" history_public_to_subscribers " : True ,
2018-04-27 01:00:26 +02:00
} ,
{
" name " : " publictrywithouthistory " ,
" description " : " Public stream without public history (disallowed) " ,
" invite_only " : False ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" history_public_to_subscribers " : False ,
2018-04-27 01:00:26 +02:00
} ,
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
]
2018-04-27 01:00:26 +02:00
created , existing = create_streams_if_needed ( realm , stream_dicts )
2020-07-24 04:56:12 +02:00
self . assertEqual ( len ( created ) , 5 )
2018-04-27 01:00:26 +02:00
self . assertEqual ( len ( existing ) , 0 )
for stream in created :
if stream . name == ' publicstream ' :
self . assertTrue ( stream . history_public_to_subscribers )
2020-07-24 04:56:12 +02:00
if stream . name == ' webpublicstream ' :
self . assertTrue ( stream . history_public_to_subscribers )
2018-04-27 01:00:26 +02:00
if stream . name == ' privatestream ' :
self . assertFalse ( stream . history_public_to_subscribers )
if stream . name == ' privatewithhistory ' :
self . assertTrue ( stream . history_public_to_subscribers )
if stream . name == ' publictrywithouthistory ' :
self . assertTrue ( stream . history_public_to_subscribers )
def test_history_public_to_subscribers_zephyr_realm ( self ) - > None :
realm = get_realm ( ' zephyr ' )
stream , created = create_stream_if_needed ( realm , " private_stream " , invite_only = True )
self . assertTrue ( created )
self . assertTrue ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
stream , created = create_stream_if_needed ( realm , " public_stream " , invite_only = False )
self . assertTrue ( created )
self . assertFalse ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
2019-07-24 08:47:38 +02:00
def test_auto_mark_stream_created_message_as_read_for_stream_creator ( self ) - > None :
2020-03-12 14:17:25 +01:00
# This test relies on email == delivery_email for
# convenience.
reset_emails_in_zulip_realm ( )
2019-07-24 08:47:38 +02:00
realm = Realm . objects . get ( name = ' Zulip Dev ' )
iago = self . example_user ( ' iago ' )
hamlet = self . example_user ( ' hamlet ' )
2020-04-09 19:07:57 +02:00
cordelia = self . example_user ( ' cordelia ' )
aaron = self . example_user ( ' aaron ' )
2019-07-24 08:47:38 +02:00
# Establish a stream for notifications.
announce_stream = ensure_stream ( realm , " announce " , False , " announcements here. " )
realm . notifications_stream_id = announce_stream . id
realm . save ( update_fields = [ ' notifications_stream_id ' ] )
self . subscribe ( iago , announce_stream . name )
self . subscribe ( hamlet , announce_stream . name )
notification_bot = UserProfile . objects . get ( full_name = " Notification Bot " )
2020-03-06 18:40:46 +01:00
self . login_user ( iago )
2019-07-24 08:47:38 +02:00
initial_message_count = Message . objects . count ( )
initial_usermessage_count = UserMessage . objects . count ( )
data = {
" subscriptions " : ' [ { " name " : " brand new stream " , " description " : " " }] ' ,
" history_public_to_subscribers " : ' true ' ,
" invite_only " : ' false ' ,
" announce " : ' true ' ,
2020-08-07 01:09:47 +02:00
" principals " : orjson . dumps ( [ iago . id , aaron . id , cordelia . id , hamlet . id ] ) . decode ( ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" stream_post_policy " : ' 1 ' ,
2019-07-24 08:47:38 +02:00
}
response = self . client_post ( " /json/users/me/subscriptions " , data )
final_message_count = Message . objects . count ( )
final_usermessage_count = UserMessage . objects . count ( )
expected_response = {
" result " : " success " ,
" msg " : " " ,
" subscribed " : {
" AARON@zulip.com " : [ " brand new stream " ] ,
" cordelia@zulip.com " : [ " brand new stream " ] ,
" hamlet@zulip.com " : [ " brand new stream " ] ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" iago@zulip.com " : [ " brand new stream " ] ,
2019-07-24 08:47:38 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" already_subscribed " : { } ,
2019-07-24 08:47:38 +02:00
}
self . assertEqual ( response . status_code , 200 )
2020-08-07 01:09:47 +02:00
self . assertEqual ( orjson . loads ( response . content . decode ( ) ) , expected_response )
2019-07-24 08:47:38 +02:00
# 2 messages should be created, one in announce and one in the new stream itself.
self . assertEqual ( final_message_count - initial_message_count , 2 )
# 4 UserMessages per subscriber: One for each of the subscribers, plus 1 for
# each user in the notifications stream.
2019-12-06 00:27:21 +01:00
announce_stream_subs = Subscription . objects . filter ( recipient = announce_stream . recipient )
2019-07-24 08:47:38 +02:00
self . assertEqual ( final_usermessage_count - initial_usermessage_count ,
4 + announce_stream_subs . count ( ) )
def get_unread_stream_data ( user : UserProfile ) - > List [ Dict [ str , Any ] ] :
raw_unread_data = get_raw_unread_data ( user )
aggregated_data = aggregate_unread_data ( raw_unread_data )
return aggregated_data [ ' streams ' ]
stream_id = Stream . objects . get ( name = ' brand new stream ' ) . id
iago_unread_messages = get_unread_stream_data ( iago )
hamlet_unread_messages = get_unread_stream_data ( hamlet )
# The stream creation messages should be unread for Hamlet
self . assertEqual ( len ( hamlet_unread_messages ) , 2 )
# According to the code in zerver/views/streams/add_subscriptions_backend
# the notification stream message is sent first, then the new stream's message.
self . assertEqual ( hamlet_unread_messages [ 0 ] [ ' sender_ids ' ] [ 0 ] , notification_bot . id )
self . assertEqual ( hamlet_unread_messages [ 1 ] [ ' stream_id ' ] , stream_id )
# But it should be marked as read for Iago, the stream creator.
self . assertEqual ( len ( iago_unread_messages ) , 0 )
2016-09-20 02:40:52 +02:00
class RecipientTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_recipient ( self ) - > None :
2017-01-04 05:30:48 +01:00
realm = get_realm ( ' zulip ' )
2016-09-20 02:40:52 +02:00
stream = get_stream ( ' Verona ' , realm )
recipient = Recipient . objects . get (
type_id = stream . id ,
type = Recipient . STREAM ,
)
2020-06-13 08:59:37 +02:00
self . assertEqual ( str ( recipient ) , f ' <Recipient: Verona ( { stream . id } , { Recipient . STREAM } )> ' )
2016-09-20 02:40:52 +02:00
2016-08-23 02:08:42 +02:00
class StreamAdminTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_make_stream_public ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2020-07-13 16:13:28 +02:00
self . make_stream ( ' private_stream_1 ' , invite_only = True )
self . make_stream ( ' private_stream_2 ' , invite_only = True )
2014-01-29 22:03:40 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
params = {
2020-07-13 16:13:28 +02:00
' stream_name ' : orjson . dumps ( ' private_stream_1 ' ) . decode ( ) ,
2020-08-07 01:09:47 +02:00
' is_private ' : orjson . dumps ( False ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-07-13 16:13:28 +02:00
stream_id = get_stream ( ' private_stream_1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2017-01-30 01:51:43 +01:00
self . assert_json_error ( result , ' Invalid stream id ' )
2014-01-29 22:03:40 +01:00
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , ' private_stream_1 ' )
2017-10-08 21:16:51 +02:00
self . assertFalse ( stream . is_in_zephyr_realm )
2014-01-29 22:03:40 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
params = {
2020-07-13 16:13:28 +02:00
' stream_name ' : orjson . dumps ( ' private_stream_1 ' ) . decode ( ) ,
2020-08-07 01:09:47 +02:00
' is_private ' : orjson . dumps ( False ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2016-10-20 00:50:09 +02:00
realm = user_profile . realm
2020-07-13 16:13:28 +02:00
stream = get_stream ( ' private_stream_1 ' , realm )
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
params = {
' stream_name ' : orjson . dumps ( ' private_stream_2 ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( False ) . decode ( ) ,
}
stream = self . subscribe ( user_profile , ' private_stream_2 ' )
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assertTrue ( stream . invite_only )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
sub = get_subscription ( ' private_stream_2 ' , user_profile )
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
stream = get_stream ( ' private_stream_2 ' , realm )
2014-01-29 22:03:40 +01:00
self . assertFalse ( stream . invite_only )
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_make_stream_private ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2020-07-13 16:13:28 +02:00
self . make_stream ( ' public_stream_1 ' , realm = realm )
self . make_stream ( ' public_stream_2 ' )
2014-01-29 22:03:40 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
params = {
2020-07-13 16:13:28 +02:00
' stream_name ' : orjson . dumps ( ' public_stream_1 ' ) . decode ( ) ,
2020-08-07 01:09:47 +02:00
' is_private ' : orjson . dumps ( True ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-07-13 16:13:28 +02:00
stream_id = get_stream ( ' public_stream_1 ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-07-13 16:13:28 +02:00
stream = get_stream ( ' public_stream_1 ' , realm )
self . assertTrue ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
params = {
' stream_name ' : orjson . dumps ( ' public_stream_2 ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( True ) . decode ( ) ,
}
stream = self . subscribe ( user_profile , ' public_stream_2 ' )
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assertFalse ( stream . invite_only )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
sub = get_subscription ( ' public_stream_2 ' , user_profile )
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
stream = get_stream ( ' public_stream_2 ' , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( stream . invite_only )
2018-04-27 01:00:26 +02:00
self . assertFalse ( stream . history_public_to_subscribers )
def test_make_stream_public_zephyr_mirror ( self ) - > None :
user_profile = self . mit_user ( ' starnine ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
self . make_stream ( ' target_stream ' , realm = realm , invite_only = True )
self . subscribe ( user_profile , ' target_stream ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2018-04-27 01:00:26 +02:00
params = {
2020-08-07 01:09:47 +02:00
' stream_name ' : orjson . dumps ( ' target_stream ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( False ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
stream_id = get_stream ( ' target_stream ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params ,
2018-04-27 01:00:26 +02:00
subdomain = " zephyr " )
self . assert_json_success ( result )
stream = get_stream ( ' target_stream ' , realm )
self . assertFalse ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
def test_make_stream_private_with_public_history ( self ) - > None :
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
self . make_stream ( ' public_history_stream ' , realm = realm )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2018-04-27 01:00:26 +02:00
params = {
2020-08-07 01:09:47 +02:00
' stream_name ' : orjson . dumps ( ' public_history_stream ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( True ) . decode ( ) ,
' history_public_to_subscribers ' : orjson . dumps ( True ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
stream_id = get_stream ( ' public_history_stream ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2018-04-27 01:00:26 +02:00
self . assert_json_success ( result )
stream = get_stream ( ' public_history_stream ' , realm )
self . assertTrue ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
def test_try_make_stream_public_with_private_history ( self ) - > None :
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
self . make_stream ( ' public_stream ' , realm = realm )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2018-04-27 01:00:26 +02:00
params = {
2020-08-07 01:09:47 +02:00
' stream_name ' : orjson . dumps ( ' public_stream ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( False ) . decode ( ) ,
' history_public_to_subscribers ' : orjson . dumps ( False ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
stream_id = get_stream ( ' public_stream ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2018-04-27 01:00:26 +02:00
self . assert_json_success ( result )
stream = get_stream ( ' public_stream ' , realm )
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_backend ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2020-07-13 16:13:28 +02:00
stream = self . make_stream ( ' new_stream_1 ' )
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , stream . name )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
2020-06-13 08:59:37 +02:00
result = self . client_delete ( f ' /json/streams/ { stream . id } ' )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-10-29 15:40:07 +01:00
subscription_exists = get_active_subscriptions_for_stream_id ( stream . id ) . filter (
2014-01-29 22:03:40 +01:00
user_profile = user_profile ,
) . exists ( )
self . assertFalse ( subscription_exists )
2020-07-13 16:13:28 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
stream = self . make_stream ( ' new_stream_2 ' )
self . subscribe ( user_profile , stream . name )
sub = get_subscription ( stream . name , user_profile )
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
result = self . client_delete ( f ' /json/streams/ { stream . id } ' )
self . assert_json_success ( result )
subscription_exists = get_active_subscriptions_for_stream_id ( stream . id ) . filter (
user_profile = user_profile ,
) . exists ( )
self . assertFalse ( subscription_exists )
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_removes_default_stream ( self ) - > None :
2017-02-18 18:01:00 +01:00
stream = self . make_stream ( ' new_stream ' )
do_add_default_stream ( stream )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 1 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-02-18 18:01:00 +01:00
do_deactivate_stream ( stream )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 0 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-02-18 18:01:00 +01:00
2020-04-26 21:42:31 +02:00
def test_deactivate_stream_removes_stream_from_default_stream_groups ( self ) - > None :
realm = get_realm ( ' zulip ' )
streams_to_keep = [ ]
for stream_name in [ " stream1 " , " stream2 " ] :
stream = ensure_stream ( realm , stream_name )
streams_to_keep . append ( stream )
streams_to_remove = [ ]
stream = ensure_stream ( realm , " stream3 " )
streams_to_remove . append ( stream )
all_streams = streams_to_keep + streams_to_remove
def get_streams ( group : DefaultStreamGroup ) - > List [ Stream ] :
return list ( group . streams . all ( ) . order_by ( ' name ' ) )
group_name = " group1 "
description = " This is group1 "
do_create_default_stream_group ( realm , group_name , description , all_streams )
default_stream_groups = get_default_stream_groups ( realm )
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , all_streams )
do_deactivate_stream ( streams_to_remove [ 0 ] )
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams_to_keep )
2017-11-05 10:51:25 +01:00
def test_vacate_private_stream_removes_default_stream ( self ) - > None :
2017-06-04 19:32:41 +02:00
stream = self . make_stream ( ' new_stream ' , invite_only = True )
2017-08-25 06:01:29 +02:00
self . subscribe ( self . example_user ( " hamlet " ) , stream . name )
2017-06-04 19:32:41 +02:00
do_add_default_stream ( stream )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 1 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-08-25 06:23:11 +02:00
self . unsubscribe ( self . example_user ( " hamlet " ) , stream . name )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 0 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-06-04 19:32:41 +02:00
# Fetch stream again from database.
stream = Stream . objects . get ( id = stream . id )
self . assertTrue ( stream . deactivated )
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_backend_requires_existing_stream ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-10-21 23:22:25 +02:00
self . make_stream ( ' new_stream ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2016-07-16 18:50:41 +02:00
2016-12-30 11:42:59 +01:00
result = self . client_delete ( ' /json/streams/999999999 ' )
2020-04-09 21:51:58 +02:00
self . assert_json_error ( result , ' Invalid stream id ' )
2016-07-16 18:50:41 +02:00
2020-07-13 16:13:28 +02:00
def test_deactivate_stream_backend_requires_admin ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , ' new_stream ' )
sub = get_subscription ( ' new_stream ' , user_profile )
self . assertFalse ( sub . is_stream_admin )
2014-01-29 22:03:40 +01:00
2020-07-13 16:13:28 +02:00
result = self . client_delete ( f ' /json/streams/ { stream . id } ' )
self . assert_json_error ( result , ' Must be an organization or stream administrator ' )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_private_stream_live_updates ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-11-04 07:02:24 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2016-11-04 07:02:24 +01:00
self . make_stream ( ' private_stream ' , invite_only = True )
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , ' private_stream ' )
self . subscribe ( self . example_user ( " cordelia " ) , ' private_stream ' )
2016-11-04 07:02:24 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2016-11-04 07:02:24 +01:00
with tornado_redirected_to_list ( events ) :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' private_stream ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' Test description ' ) . decode ( ) } )
2016-11-04 07:02:24 +01:00
self . assert_json_success ( result )
2017-01-29 01:38:25 +01:00
# Should be just a description change event
self . assert_length ( events , 1 )
2016-11-04 07:02:24 +01:00
2017-05-07 17:21:26 +02:00
cordelia = self . example_user ( ' cordelia ' )
prospero = self . example_user ( ' prospero ' )
2016-11-04 07:02:24 +01:00
notified_user_ids = set ( events [ - 1 ] [ ' users ' ] )
self . assertIn ( user_profile . id , notified_user_ids )
self . assertIn ( cordelia . id , notified_user_ids )
self . assertNotIn ( prospero . id , notified_user_ids )
events = [ ]
with tornado_redirected_to_list ( events ) :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' private_stream ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' whatever ' ) . decode ( ) } )
2016-11-04 07:02:24 +01:00
self . assert_json_success ( result )
2019-01-05 12:47:38 +01:00
# Should be a name event, an email address event and a notification event
self . assert_length ( events , 3 )
2016-11-04 07:02:24 +01:00
2019-01-05 12:47:38 +01:00
notified_user_ids = set ( events [ 0 ] [ ' users ' ] )
2016-11-04 07:02:24 +01:00
self . assertIn ( user_profile . id , notified_user_ids )
self . assertIn ( cordelia . id , notified_user_ids )
self . assertNotIn ( prospero . id , notified_user_ids )
2019-01-05 12:47:38 +01:00
notified_with_bot_users = events [ - 1 ] [ ' users ' ]
notified_with_bot_user_ids = [ ]
notified_with_bot_user_ids . append ( notified_with_bot_users [ 0 ] [ ' id ' ] )
notified_with_bot_user_ids . append ( notified_with_bot_users [ 1 ] [ ' id ' ] )
self . assertIn ( user_profile . id , notified_with_bot_user_ids )
self . assertIn ( cordelia . id , notified_with_bot_user_ids )
self . assertNotIn ( prospero . id , notified_with_bot_user_ids )
2017-11-05 10:51:25 +01:00
def test_rename_stream ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2017-08-25 06:01:29 +02:00
stream = self . subscribe ( user_profile , ' stream_name1 ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' stream_name1 ' ) . decode ( ) } )
2017-01-30 04:44:40 +01:00
self . assert_json_error ( result , " Stream already has that name! " )
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' Denmark ' ) . decode ( ) } )
2018-01-08 19:54:19 +01:00
self . assert_json_error ( result , " Stream name ' Denmark ' is already taken. " )
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' denmark ' ) . decode ( ) } )
2018-01-08 19:54:19 +01:00
self . assert_json_error ( result , " Stream name ' denmark ' is already taken. " )
2017-01-30 04:44:40 +01:00
2017-02-01 23:20:46 +01:00
# Do a rename that is case-only--this should succeed.
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' sTREAm_name1 ' ) . decode ( ) } )
2017-02-01 23:20:46 +01:00
self . assert_json_success ( result )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' stream_name2 ' ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2014-02-02 15:30:33 +01:00
event = events [ 1 ] [ ' event ' ]
2014-01-29 22:03:40 +01:00
self . assertEqual ( event , dict (
op = ' update ' ,
type = ' stream ' ,
property = ' name ' ,
value = ' stream_name2 ' ,
2017-03-05 01:50:25 +01:00
stream_id = stream_id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
name = ' sTREAm_name1 ' ,
2014-01-29 22:03:40 +01:00
) )
2016-11-04 07:02:24 +01:00
notified_user_ids = set ( events [ 1 ] [ ' users ' ] )
2014-01-29 22:03:40 +01:00
2017-03-23 07:22:28 +01:00
self . assertRaises ( Stream . DoesNotExist , get_stream , ' stream_name1 ' , realm )
2016-09-28 09:07:09 +02:00
stream_name2_exists = get_stream ( ' stream_name2 ' , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( stream_name2_exists )
2018-06-03 19:11:52 +02:00
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
2016-11-04 07:02:24 +01:00
self . assertIn ( user_profile . id ,
notified_user_ids )
2017-05-07 17:21:26 +02:00
self . assertIn ( self . example_user ( ' prospero ' ) . id ,
2016-11-04 07:02:24 +01:00
notified_user_ids )
2018-06-03 19:11:52 +02:00
self . assertNotIn ( self . example_user ( ' polonius ' ) . id ,
notified_user_ids )
2016-11-04 07:02:24 +01:00
2016-09-28 09:07:09 +02:00
# Test case to handle unicode stream name change
# *NOTE: Here Encoding is needed when Unicode string is passed as an argument*
with tornado_redirected_to_list ( events ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name2_exists . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' नया नाम ' ) . decode ( ) } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
# While querying, system can handle unicode strings.
2020-04-09 21:51:58 +02:00
stream_name_uni_exists = get_stream ( ' नया नाम ' , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_uni_exists )
# Test case to handle changing of unicode stream name to newer name
# NOTE: Unicode string being part of URL is handled cleanly
# by client_patch call, encoding of URL is not needed.
with tornado_redirected_to_list ( events ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_uni_exists . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' नाम में क्या रक्खा हे ' ) . decode ( ) } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
# While querying, system can handle unicode strings.
2020-04-09 21:51:58 +02:00
self . assertRaises ( Stream . DoesNotExist , get_stream , ' नया नाम ' , realm )
2017-03-23 07:22:28 +01:00
2020-04-09 21:51:58 +02:00
stream_name_new_uni_exists = get_stream ( ' नाम में क्या रक्खा हे ' , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_new_uni_exists )
# Test case to change name from one language to other.
with tornado_redirected_to_list ( events ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_new_uni_exists . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' français ' ) . decode ( ) } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2020-04-09 21:51:58 +02:00
stream_name_fr_exists = get_stream ( ' français ' , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_fr_exists )
# Test case to change name to mixed language name.
with tornado_redirected_to_list ( events ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_fr_exists . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' français name ' ) . decode ( ) } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2020-04-09 21:51:58 +02:00
stream_name_mixed_exists = get_stream ( ' français name ' , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_mixed_exists )
2018-04-13 02:42:30 +02:00
# Test case for notified users in private streams.
stream_private = self . make_stream ( ' stream_private_name1 ' , realm = user_profile . realm , invite_only = True )
self . subscribe ( self . example_user ( ' cordelia ' ) , ' stream_private_name1 ' )
del events [ : ]
with tornado_redirected_to_list ( events ) :
stream_id = get_stream ( ' stream_private_name1 ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' stream_private_name2 ' ) . decode ( ) } )
2018-04-13 02:42:30 +02:00
self . assert_json_success ( result )
notified_user_ids = set ( events [ 1 ] [ ' users ' ] )
self . assertEqual ( notified_user_ids , can_access_stream_user_ids ( stream_private ) )
self . assertIn ( self . example_user ( ' cordelia ' ) . id , notified_user_ids )
# An important corner case is that all organization admins are notified.
self . assertIn ( self . example_user ( ' iago ' ) . id , notified_user_ids )
# The current user, Hamlet was made an admin and thus should be notified too.
self . assertIn ( user_profile . id , notified_user_ids )
self . assertNotIn ( self . example_user ( ' prospero ' ) . id ,
notified_user_ids )
2020-07-13 16:13:28 +02:00
# Test renaming of stream by stream admin.
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
new_stream = self . make_stream ( ' new_stream ' , realm = user_profile . realm )
self . subscribe ( user_profile , ' new_stream ' )
sub = get_subscription ( ' new_stream ' , user_profile )
do_change_subscription_property ( user_profile , sub , new_stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
del events [ : ]
with tornado_redirected_to_list ( events ) :
result = self . client_patch ( f ' /json/streams/ { new_stream . id } ' ,
{ ' new_name ' : orjson . dumps ( ' stream_rename ' ) . decode ( ) } )
self . assert_json_success ( result )
self . assertEqual ( len ( events ) , 3 )
stream_rename_exists = get_stream ( ' stream_rename ' , realm )
self . assertTrue ( stream_rename_exists )
def test_rename_stream_requires_admin ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-10-21 23:22:25 +02:00
self . make_stream ( ' stream_name1 ' )
2020-07-13 16:13:28 +02:00
self . subscribe ( user_profile , ' stream_name1 ' )
sub = get_subscription ( ' stream_name1 ' , user_profile )
self . assertFalse ( sub . is_stream_admin )
2014-01-29 22:03:40 +01:00
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' stream_name2 ' ) . decode ( ) } )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , ' Must be an organization or stream administrator ' )
2014-01-29 22:03:40 +01:00
2019-01-05 12:47:38 +01:00
def test_notify_on_stream_rename ( self ) - > None :
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2019-01-05 12:47:38 +01:00
self . make_stream ( ' stream_name1 ' )
stream = self . subscribe ( user_profile , ' stream_name1 ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' stream_name2 ' ) . decode ( ) } )
2019-01-05 12:47:38 +01:00
self . assert_json_success ( result )
# Inspect the notification message sent
message = self . get_last_message ( )
actual_stream = Stream . objects . get ( id = message . recipient . type_id )
2020-06-09 00:25:09 +02:00
message_content = f ' @_**King Hamlet| { user_profile . id } ** renamed stream **stream_name1** to **stream_name2**. '
2019-01-05 12:47:38 +01:00
self . assertEqual ( actual_stream . name , ' stream_name2 ' )
2019-07-24 07:38:56 +02:00
self . assertEqual ( actual_stream . realm_id , user_profile . realm_id )
2019-01-05 12:47:38 +01:00
self . assertEqual ( message . recipient . type , Recipient . STREAM )
self . assertEqual ( message . content , message_content )
self . assertEqual ( message . sender . email , ' notification-bot@zulip.com ' )
2019-07-24 07:38:56 +02:00
self . assertEqual ( message . sender . realm , get_realm ( settings . SYSTEM_BOT_REALM ) )
2019-01-05 12:47:38 +01:00
2018-02-12 16:02:19 +01:00
def test_realm_admin_can_update_unsub_private_stream ( self ) - > None :
iago = self . example_user ( ' iago ' )
2020-03-12 14:17:25 +01:00
hamlet = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( iago )
2020-03-09 21:41:26 +01:00
result = self . common_subscribe_to_streams ( iago , [ " private_stream " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ hamlet . id ] ) . decode ( ) ) ,
2018-02-12 16:02:19 +01:00
invite_only = True )
self . assert_json_success ( result )
stream_id = get_stream ( ' private_stream ' , iago . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_name ' : orjson . dumps ( ' new_private_stream ' ) . decode ( ) } )
2018-02-12 16:02:19 +01:00
self . assert_json_success ( result )
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' new_description ' : orjson . dumps ( ' new description ' ) . decode ( ) } )
2018-02-12 16:02:19 +01:00
self . assert_json_success ( result )
2018-04-03 00:36:31 +02:00
# But cannot change stream type.
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' stream_name ' : orjson . dumps ( ' private_stream ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( True ) . decode ( ) } )
2018-02-12 16:02:19 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2020-07-13 16:13:28 +02:00
def test_non_admin_cannot_access_unsub_private_stream ( self ) - > None :
iago = self . example_user ( ' iago ' )
hamlet = self . example_user ( ' hamlet ' )
self . login_user ( hamlet )
result = self . common_subscribe_to_streams ( hamlet , [ " private_stream_1 " ] ,
dict ( principals = orjson . dumps ( [ iago . id ] ) . decode ( ) ) ,
invite_only = True )
self . assert_json_success ( result )
stream_id = get_stream ( ' private_stream_1 ' , hamlet . realm ) . id
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
{ ' new_name ' : orjson . dumps ( ' private_stream_2 ' ) . decode ( ) } )
self . assert_json_error ( result , " Invalid stream id " )
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
{ ' new_description ' : orjson . dumps ( ' new description ' ) . decode ( ) } )
self . assert_json_error ( result , " Invalid stream id " )
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
{ ' stream_name ' : orjson . dumps ( ' private_stream_1 ' ) . decode ( ) ,
' is_private ' : orjson . dumps ( True ) . decode ( ) } )
self . assert_json_error ( result , " Invalid stream id " )
result = self . client_delete ( f ' /json/streams/ { stream_id } ' )
self . assert_json_error ( result , " Invalid stream id " )
2017-11-05 10:51:25 +01:00
def test_change_stream_description ( self ) - > None :
2018-04-30 08:59:51 +02:00
user_profile = self . example_user ( ' iago ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , ' stream_name1 ' )
2014-01-29 22:03:40 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' stream_name1 ' , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' Test description ' ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
event = events [ 0 ] [ ' event ' ]
self . assertEqual ( event , dict (
op = ' update ' ,
type = ' stream ' ,
property = ' description ' ,
value = ' Test description ' ,
2019-01-11 13:48:22 +01:00
rendered_description = ' <p>Test description</p> ' ,
2017-03-05 01:50:25 +01:00
stream_id = stream_id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
name = ' stream_name1 ' ,
2014-01-29 22:03:40 +01:00
) )
2016-11-04 07:02:24 +01:00
notified_user_ids = set ( events [ 0 ] [ ' users ' ] )
2014-01-29 22:03:40 +01:00
2017-01-13 15:50:17 +01:00
stream = get_stream ( ' stream_name1 ' , realm )
2018-06-03 19:11:52 +02:00
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
2016-11-04 07:02:24 +01:00
self . assertIn ( user_profile . id ,
notified_user_ids )
2017-05-07 17:21:26 +02:00
self . assertIn ( self . example_user ( ' prospero ' ) . id ,
2016-11-04 07:02:24 +01:00
notified_user_ids )
2018-06-03 19:11:52 +02:00
self . assertNotIn ( self . example_user ( ' polonius ' ) . id ,
notified_user_ids )
2016-11-04 07:02:24 +01:00
2014-01-29 22:03:40 +01:00
self . assertEqual ( ' Test description ' , stream . description )
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' a ' * 1025 ) . decode ( ) } )
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " description is too long (limit: { Stream . MAX_DESCRIPTION_LENGTH } characters) " ,
)
2018-04-30 08:59:51 +02:00
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' a \n multi \n line \n description ' ) . decode ( ) } )
2019-02-20 21:09:21 +01:00
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( stream . description , ' a multi line description ' )
2019-03-01 02:00:40 +01:00
# Verify that we don't render inline URL previews in this code path.
with self . settings ( INLINE_URL_EMBED_PREVIEW = True ) :
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' See https://zulip.com/team ' ) . decode ( ) } )
2019-03-01 02:00:40 +01:00
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , realm )
2020-05-09 03:44:56 +02:00
self . assertEqual (
stream . rendered_description ,
2020-06-08 23:04:39 +02:00
' <p>See <a href= " https://zulip.com/team " >https://zulip.com/team</a></p> ' ,
2020-05-09 03:44:56 +02:00
)
2019-03-01 02:00:40 +01:00
2020-07-13 16:13:28 +02:00
# Test changing stream description by stream admin.
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
sub = get_subscription ( ' stream_name1 ' , user_profile )
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
with tornado_redirected_to_list ( events ) :
stream_id = get_stream ( ' stream_name1 ' , realm ) . id
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
{ ' description ' : orjson . dumps ( ' Test description ' ) . decode ( ) } )
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( stream . description , ' Test description ' )
def test_change_stream_description_requires_admin ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , ' stream_name1 ' )
sub = get_subscription ( ' stream_name1 ' , user_profile )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
2020-07-13 16:13:28 +02:00
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_MEMBER )
2014-01-29 22:03:40 +01:00
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' description ' : orjson . dumps ( ' Test description ' ) . decode ( ) } )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , ' Must be an organization or stream administrator ' )
2014-01-29 22:03:40 +01:00
2020-02-04 21:50:55 +01:00
def test_change_to_stream_post_policy_admins ( self ) - > None :
2018-05-12 07:25:42 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-05-12 07:25:42 +02:00
self . subscribe ( user_profile , ' stream_name1 ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2018-05-12 07:25:42 +02:00
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' is_announcement_only ' : orjson . dumps ( True ) . decode ( ) } )
2018-05-12 07:25:42 +02:00
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , user_profile . realm )
2020-02-04 21:50:55 +01:00
self . assertTrue ( stream . stream_post_policy == Stream . STREAM_POST_POLICY_ADMINS )
2018-05-12 07:25:42 +02:00
2020-07-13 16:13:28 +02:00
def test_change_stream_post_policy_requires_admin ( self ) - > None :
2018-05-12 07:25:42 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-05-12 07:25:42 +02:00
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , ' stream_name1 ' )
sub = get_subscription ( ' stream_name1 ' , user_profile )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
2020-07-13 16:13:28 +02:00
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_MEMBER )
2018-05-12 07:25:42 +02:00
2020-02-04 21:50:55 +01:00
do_set_realm_property ( user_profile . realm , ' waiting_period_threshold ' , 10 )
2020-04-07 20:43:04 +02:00
def test_non_admin ( how_old : int , is_new : bool , policy : int ) - > None :
user_profile . date_joined = timezone_now ( ) - timedelta ( days = how_old )
user_profile . save ( )
self . assertEqual ( user_profile . is_new_member , is_new )
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' stream_post_policy ' : orjson . dumps ( policy ) . decode ( ) } )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , ' Must be an organization or stream administrator ' )
2020-04-07 20:43:04 +02:00
policies = [ Stream . STREAM_POST_POLICY_ADMINS , Stream . STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS ]
for policy in policies :
test_non_admin ( how_old = 15 , is_new = False , policy = policy )
test_non_admin ( how_old = 5 , is_new = True , policy = policy )
2020-07-13 16:13:28 +02:00
do_change_subscription_property ( user_profile , sub , stream , " role " , Subscription . ROLE_STREAM_ADMINISTRATOR )
for policy in policies :
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
{ ' stream_post_policy ' : orjson . dumps ( policy ) . decode ( ) } )
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , user_profile . realm )
self . assertEqual ( stream . stream_post_policy , policy )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2020-04-07 20:43:04 +02:00
for policy in policies :
stream_id = get_stream ( ' stream_name1 ' , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f ' /json/streams/ { stream_id } ' ,
2020-08-07 01:09:47 +02:00
{ ' stream_post_policy ' : orjson . dumps ( policy ) . decode ( ) } )
2020-04-07 20:43:04 +02:00
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , user_profile . realm )
self . assertEqual ( stream . stream_post_policy , policy )
2018-05-12 07:25:42 +02:00
2020-06-14 18:57:02 +02:00
def test_change_stream_message_retention_days ( self ) - > None :
user_profile = self . example_user ( ' desdemona ' )
self . login_user ( user_profile )
realm = user_profile . realm
do_change_plan_type ( realm , Realm . LIMITED )
stream = self . subscribe ( user_profile , ' stream_name1 ' )
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( 2 ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Available on Zulip Standard. Upgrade to access. " )
do_change_plan_type ( realm , Realm . SELF_HOSTED )
events : List [ Mapping [ str , Any ] ] = [ ]
with tornado_redirected_to_list ( events ) :
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( 2 ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
event = events [ 0 ] [ ' event ' ]
self . assertEqual ( event , dict (
op = ' update ' ,
type = ' stream ' ,
property = ' message_retention_days ' ,
value = 2 ,
stream_id = stream . id ,
name = ' stream_name1 ' ,
) )
notified_user_ids = set ( events [ 0 ] [ ' users ' ] )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
self . assertIn ( user_profile . id , notified_user_ids )
self . assertIn ( self . example_user ( ' prospero ' ) . id , notified_user_ids )
self . assertNotIn ( self . example_user ( ' polonius ' ) . id , notified_user_ids )
self . assertEqual ( stream . message_retention_days , 2 )
events = [ ]
with tornado_redirected_to_list ( events ) :
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( " forever " ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
event = events [ 0 ] [ ' event ' ]
self . assertEqual ( event , dict (
op = ' update ' ,
type = ' stream ' ,
property = ' message_retention_days ' ,
value = - 1 ,
stream_id = stream . id ,
name = ' stream_name1 ' ,
) )
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( stream . message_retention_days , - 1 )
events = [ ]
with tornado_redirected_to_list ( events ) :
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( " realm_default " ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
event = events [ 0 ] [ ' event ' ]
self . assertEqual ( event , dict (
op = ' update ' ,
type = ' stream ' ,
property = ' message_retention_days ' ,
value = None ,
stream_id = stream . id ,
name = ' stream_name1 ' ,
) )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( stream . message_retention_days , None )
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( " invalid " ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : invalid " )
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( - 1 ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : -1 " )
2020-06-21 10:50:01 +02:00
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( 0 ) . decode ( ) } )
2020-06-21 10:50:01 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : 0 " )
2020-06-14 18:57:02 +02:00
def test_change_stream_message_retention_days_requires_realm_owner ( self ) - > None :
user_profile = self . example_user ( ' iago ' )
self . login_user ( user_profile )
realm = user_profile . realm
stream = self . subscribe ( user_profile , ' stream_name1 ' )
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( 2 ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Must be an organization owner " )
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_OWNER )
result = self . client_patch ( f ' /json/streams/ { stream . id } ' ,
2020-08-07 01:09:47 +02:00
{ ' message_retention_days ' : orjson . dumps ( 2 ) . decode ( ) } )
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
stream = get_stream ( ' stream_name1 ' , realm )
self . assertEqual ( stream . message_retention_days , 2 )
def test_stream_message_retention_days_on_stream_creation ( self ) - > None :
"""
Only admins can create streams with message_retention_days
with value other than None .
"""
admin = self . example_user ( ' iago ' )
streams_raw = [ {
' name ' : ' new_stream ' ,
' message_retention_days ' : 10 ,
} ]
with self . assertRaisesRegex ( JsonableError , " User cannot create stream with this settings. " ) :
list_to_streams ( streams_raw , admin , autocreate = True )
streams_raw = [ {
' name ' : ' new_stream ' ,
' message_retention_days ' : - 1 ,
} ]
with self . assertRaisesRegex ( JsonableError , " User cannot create stream with this settings. " ) :
list_to_streams ( streams_raw , admin , autocreate = True )
streams_raw = [ {
' name ' : ' new_stream ' ,
' message_retention_days ' : None ,
} ]
result = list_to_streams ( streams_raw , admin , autocreate = True )
self . assert_length ( result [ 0 ] , 0 )
self . assert_length ( result [ 1 ] , 1 )
self . assertEqual ( result [ 1 ] [ 0 ] . name , ' new_stream ' )
self . assertEqual ( result [ 1 ] [ 0 ] . message_retention_days , None )
owner = self . example_user ( ' desdemona ' )
realm = owner . realm
streams_raw = [
{ ' name ' : ' new_stream1 ' ,
' message_retention_days ' : 10 } ,
{ ' name ' : ' new_stream2 ' ,
' message_retention_days ' : - 1 } ,
{ ' name ' : ' new_stream3 ' } ,
]
do_change_plan_type ( realm , Realm . LIMITED )
with self . assertRaisesRegex ( JsonableError , " Available on Zulip Standard. Upgrade to access. " ) :
list_to_streams ( streams_raw , owner , autocreate = True )
do_change_plan_type ( realm , Realm . SELF_HOSTED )
result = list_to_streams ( streams_raw , owner , autocreate = True )
self . assert_length ( result [ 0 ] , 0 )
self . assert_length ( result [ 1 ] , 3 )
self . assertEqual ( result [ 1 ] [ 0 ] . name , ' new_stream1 ' )
self . assertEqual ( result [ 1 ] [ 0 ] . message_retention_days , 10 )
self . assertEqual ( result [ 1 ] [ 1 ] . name , ' new_stream2 ' )
self . assertEqual ( result [ 1 ] [ 1 ] . message_retention_days , - 1 )
self . assertEqual ( result [ 1 ] [ 2 ] . name , ' new_stream3 ' )
self . assertEqual ( result [ 1 ] [ 2 ] . message_retention_days , None )
2017-11-20 03:22:57 +01:00
def set_up_stream_for_deletion ( self , stream_name : str , invite_only : bool = False ,
subscribed : bool = True ) - > Stream :
2014-01-29 22:03:40 +01:00
"""
Create a stream for deletion by an administrator .
"""
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-10-21 23:22:25 +02:00
stream = self . make_stream ( stream_name , invite_only = invite_only )
2014-01-29 22:03:40 +01:00
# For testing deleting streams you aren't on.
if subscribed :
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , stream_name )
2014-01-29 22:03:40 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2014-01-29 22:03:40 +01:00
return stream
2017-11-05 10:51:25 +01:00
def delete_stream ( self , stream : Stream ) - > None :
2014-01-29 22:03:40 +01:00
"""
Delete the stream and assess the result .
"""
active_name = stream . name
2016-12-04 01:04:55 +01:00
realm = stream . realm
2016-12-30 11:42:59 +01:00
stream_id = stream . id
2014-01-29 22:03:40 +01:00
2018-08-11 17:28:52 +02:00
# Simulate that a stream by the same name has already been
# deactivated, just to exercise our renaming logic:
ensure_stream ( realm , " !DEACTIVATED: " + active_name )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
2016-12-30 11:42:59 +01:00
result = self . client_delete ( ' /json/streams/ ' + str ( stream_id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-02-15 17:38:44 +01:00
# We no longer send subscription events for stream deactivations.
sub_events = [ e for e in events if e [ ' event ' ] [ ' type ' ] == ' subscription ' ]
self . assertEqual ( sub_events , [ ] )
stream_events = [ e for e in events if e [ ' event ' ] [ ' type ' ] == ' stream ' ]
self . assertEqual ( len ( stream_events ) , 1 )
event = stream_events [ 0 ] [ ' event ' ]
self . assertEqual ( event [ ' op ' ] , ' delete ' )
self . assertEqual ( event [ ' streams ' ] [ 0 ] [ ' stream_id ' ] , stream . id )
2014-01-29 22:03:40 +01:00
with self . assertRaises ( Stream . DoesNotExist ) :
2017-01-04 05:30:48 +01:00
Stream . objects . get ( realm = get_realm ( " zulip " ) , name = active_name )
2014-01-29 22:03:40 +01:00
# A deleted stream's name is changed, is deactivated, is invite-only,
# and has no subscribers.
2018-08-11 17:28:52 +02:00
deactivated_stream_name = " !!DEACTIVATED: " + active_name
2017-01-13 15:50:17 +01:00
deactivated_stream = get_stream ( deactivated_stream_name , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( deactivated_stream . deactivated )
self . assertTrue ( deactivated_stream . invite_only )
self . assertEqual ( deactivated_stream . name , deactivated_stream_name )
subscribers = self . users_subscribed_to_stream (
2017-01-24 07:06:13 +01:00
deactivated_stream_name , realm )
2014-01-29 22:03:40 +01:00
self . assertEqual ( subscribers , [ ] )
# It doesn't show up in the list of public streams anymore.
2016-07-28 00:38:45 +02:00
result = self . client_get ( " /json/streams?include_subscribed=false " )
2017-08-17 08:45:20 +02:00
public_streams = [ s [ " name " ] for s in result . json ( ) [ " streams " ] ]
2014-01-29 22:03:40 +01:00
self . assertNotIn ( active_name , public_streams )
self . assertNotIn ( deactivated_stream_name , public_streams )
# Even if you could guess the new name, you can't subscribe to it.
2016-07-28 00:30:22 +02:00
result = self . client_post (
2015-11-30 21:39:40 +01:00
" /json/users/me/subscriptions " ,
2020-08-07 01:09:47 +02:00
{ " subscriptions " : orjson . dumps ( [ { " name " : deactivated_stream_name } ] ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_error (
2020-06-10 06:41:04 +02:00
result , f " Unable to access stream ( { deactivated_stream_name } ). " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_you_must_be_realm_admin ( self ) - > None :
2017-08-22 21:41:08 +02:00
"""
You must be on the realm to create a stream .
"""
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2017-08-22 21:41:08 +02:00
other_realm = Realm . objects . create ( string_id = ' other ' )
stream = self . make_stream ( ' other_realm_stream ' , realm = other_realm )
result = self . client_delete ( ' /json/streams/ ' + str ( stream . id ) )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , ' Invalid stream id ' )
2017-08-22 21:41:08 +02:00
# Even becoming a realm admin doesn't help us for an out-of-realm
# stream.
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2017-08-22 21:41:08 +02:00
result = self . client_delete ( ' /json/streams/ ' + str ( stream . id ) )
self . assert_json_error ( result , ' Invalid stream id ' )
2017-11-05 10:51:25 +01:00
def test_delete_public_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
When an administrator deletes a public stream , that stream is not
visible to users at all anymore .
"""
stream = self . set_up_stream_for_deletion ( " newstream " )
self . delete_stream ( stream )
2017-11-05 10:51:25 +01:00
def test_delete_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Administrators can delete private streams they are on .
"""
stream = self . set_up_stream_for_deletion ( " newstream " , invite_only = True )
self . delete_stream ( stream )
2017-11-05 10:51:25 +01:00
def test_delete_streams_youre_not_on ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-08-22 21:41:08 +02:00
Administrators can delete public streams they aren ' t on, including
private streams in their realm .
2014-01-29 22:03:40 +01:00
"""
pub_stream = self . set_up_stream_for_deletion (
" pubstream " , subscribed = False )
2017-02-15 17:38:44 +01:00
self . delete_stream ( pub_stream )
2014-01-29 22:03:40 +01:00
priv_stream = self . set_up_stream_for_deletion (
" privstream " , subscribed = False , invite_only = True )
2017-08-22 21:41:08 +02:00
self . delete_stream ( priv_stream )
2014-01-29 22:03:40 +01:00
2020-06-06 18:37:47 +02:00
def attempt_unsubscribe_of_principal ( self , query_count : int , target_users : List [ UserProfile ] ,
2020-07-13 16:13:28 +02:00
is_realm_admin : bool = False , is_stream_admin : bool = False ,
is_subbed : bool = True , invite_only : bool = False ,
2020-06-06 18:37:47 +02:00
target_users_subbed : bool = True , using_legacy_emails : bool = False ,
2020-06-13 03:34:01 +02:00
other_sub_users : Sequence [ UserProfile ] = [ ] ) - > HttpResponse :
2016-06-04 19:50:38 +02:00
2014-01-30 22:50:51 +01:00
# Set up the main user, who is in most cases an admin.
2020-07-13 16:13:28 +02:00
if is_realm_admin :
2018-03-14 02:20:31 +01:00
user_profile = self . example_user ( ' iago ' )
else :
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-30 22:50:51 +01:00
# Set up the stream.
2020-04-09 21:51:58 +02:00
stream_name = " hümbüǵ "
2016-10-21 23:22:25 +02:00
self . make_stream ( stream_name , invite_only = invite_only )
2014-01-30 22:50:51 +01:00
# Set up the principal to be unsubscribed.
2020-07-02 03:13:26 +02:00
principals : List [ Union [ str , int ] ] = [ ]
2020-06-06 18:37:47 +02:00
for user in target_users :
2020-06-02 15:45:45 +02:00
if using_legacy_emails :
2020-06-06 18:37:47 +02:00
principals . append ( user . email )
2020-06-02 15:45:45 +02:00
else :
2020-06-06 18:37:47 +02:00
principals . append ( user . id )
2014-01-30 22:50:51 +01:00
# Subscribe the admin and/or principal as specified in the flags.
if is_subbed :
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , stream_name )
if is_stream_admin :
sub = get_subscription ( stream_name , user_profile )
do_change_subscription_property ( user_profile , sub , stream , " role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR )
2020-06-06 18:37:47 +02:00
if target_users_subbed :
for user in target_users :
self . subscribe ( user , stream_name )
2020-06-13 03:34:01 +02:00
for user in other_sub_users :
self . subscribe ( user , stream_name )
2014-01-30 22:50:51 +01:00
2017-10-29 21:03:11 +01:00
with queries_captured ( ) as queries :
result = self . client_delete (
" /json/users/me/subscriptions " ,
2020-08-07 01:09:47 +02:00
{ " subscriptions " : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
" principals " : orjson . dumps ( principals ) . decode ( ) } )
2017-10-29 21:03:11 +01:00
self . assert_length ( queries , query_count )
2014-01-30 22:50:51 +01:00
2014-01-31 18:08:23 +01:00
# If the removal succeeded, then assert that Cordelia is no longer subscribed.
if result . status_code not in [ 400 ] :
2020-06-06 18:37:47 +02:00
subbed_users = self . users_subscribed_to_stream ( stream_name , user_profile . realm )
for user in target_users :
self . assertNotIn ( user , subbed_users )
2014-01-30 22:50:51 +01:00
return result
2017-11-05 10:51:25 +01:00
def test_cant_remove_others_from_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
If you ' re not an admin, you can ' t remove other people from streams .
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 5 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = False ,
is_stream_admin = False , is_subbed = True , invite_only = False , target_users_subbed = True )
2014-01-30 22:50:51 +01:00
self . assert_json_error (
2020-07-13 16:13:28 +02:00
result , " Must be an organization or stream administrator " )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_public_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove people from public streams, even
2014-01-30 22:50:51 +01:00
those you aren ' t on.
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = True ,
is_subbed = True , invite_only = False , target_users_subbed = True )
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
2019-10-13 05:30:34 +02:00
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_multiple_users_from_stream ( self ) - > None :
2020-06-02 15:45:45 +02:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove multiple users from a stream,
2020-06-02 15:45:45 +02:00
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 31 , target_users = [ self . example_user ( ' cordelia ' ) , self . example_user ( ' prospero ' ) ] ,
is_realm_admin = True , is_subbed = True , invite_only = False , target_users_subbed = True )
2020-06-02 15:45:45 +02:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 2 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_subbed_private_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove other people from private streams you
2014-01-30 22:50:51 +01:00
are on .
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = True ,
is_subbed = True , invite_only = True , target_users_subbed = True )
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
2019-10-13 05:30:34 +02:00
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_unsubbed_private_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove people from private
2014-01-30 22:50:51 +01:00
streams you aren ' t on.
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = True ,
is_subbed = False , invite_only = True , target_users_subbed = True , other_sub_users = [ self . example_user ( " othello " ) ] )
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
def test_stream_admin_remove_others_from_public_stream ( self ) - > None :
"""
You can remove others from public streams you ' re a stream administrator of.
"""
result = self . attempt_unsubscribe_of_principal (
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = False ,
is_stream_admin = True , is_subbed = True , invite_only = False , target_users_subbed = True )
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
def test_stream_admin_remove_multiple_users_from_stream ( self ) - > None :
"""
You can remove multiple users from public streams you ' re a stream administrator of.
"""
result = self . attempt_unsubscribe_of_principal (
query_count = 31 , target_users = [ self . example_user ( ' cordelia ' ) , self . example_user ( ' prospero ' ) ] ,
is_realm_admin = False , is_stream_admin = True , is_subbed = True , invite_only = False ,
target_users_subbed = True )
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 2 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
def test_stream_admin_remove_others_from_private_stream ( self ) - > None :
"""
You can remove others from private streams you ' re a stream administrator of.
"""
result = self . attempt_unsubscribe_of_principal (
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = False ,
is_stream_admin = True , is_subbed = True , invite_only = True , target_users_subbed = True )
2018-02-20 18:56:01 +01:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
2019-10-13 05:30:34 +02:00
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2014-01-30 22:50:51 +01:00
2020-04-09 19:07:57 +02:00
def test_cant_remove_others_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 5 , is_realm_admin = False , is_stream_admin = False , is_subbed = True ,
invite_only = False , target_users = [ self . example_user ( ' cordelia ' ) ] , target_users_subbed = True ,
2020-06-06 18:37:47 +02:00
using_legacy_emails = True )
2020-04-09 19:07:57 +02:00
self . assert_json_error (
2020-07-13 16:13:28 +02:00
result , " Must be an organization or stream administrator " )
2020-04-09 19:07:57 +02:00
def test_admin_remove_others_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 22 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = True ,
is_subbed = True , invite_only = False , target_users_subbed = True , using_legacy_emails = True )
2020-04-09 19:07:57 +02:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 1 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2020-06-02 15:45:45 +02:00
def test_admin_remove_multiple_users_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 31 , target_users = [ self . example_user ( ' cordelia ' ) , self . example_user ( ' prospero ' ) ] ,
is_realm_admin = True , is_subbed = True , invite_only = False , target_users_subbed = True ,
using_legacy_emails = True )
2020-06-02 15:45:45 +02:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 2 )
self . assertEqual ( len ( json [ " not_removed " ] ) , 0 )
2019-05-06 16:34:31 +02:00
def test_create_stream_policy_setting ( self ) - > None :
2016-05-12 10:28:00 +02:00
"""
2020-04-02 21:53:20 +02:00
When realm . create_stream_policy setting is Realm . POLICY_MEMBERS_ONLY then
2019-05-06 16:34:31 +02:00
test that any user can create a stream .
2016-11-29 08:57:35 +01:00
2020-04-02 21:53:20 +02:00
When realm . create_stream_policy setting is Realm . POLICY_ADMINS_ONLY then
2019-05-06 16:34:31 +02:00
test that only admins can create a stream .
2016-11-29 08:57:35 +01:00
2020-04-02 21:53:20 +02:00
When realm . create_stream_policy setting is Realm . POLICY_FULL_MEMBERS_ONLY then
2019-05-06 16:34:31 +02:00
test that admins and users with accounts older than the waiting period can create a stream .
2016-11-29 08:57:35 +01:00
"""
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2017-05-29 19:54:06 +02:00
user_profile . date_joined = timezone_now ( )
user_profile . save ( )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
2016-05-12 10:28:00 +02:00
2019-05-06 16:34:31 +02:00
# Allow all members to create streams.
do_set_realm_property ( user_profile . realm , ' create_stream_policy ' ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_MEMBERS_ONLY )
2019-05-06 16:34:31 +02:00
# Set waiting period to 10 days.
2017-03-21 18:08:40 +01:00
do_set_realm_property ( user_profile . realm , ' waiting_period_threshold ' , 10 )
2016-11-29 08:57:35 +01:00
2019-05-06 16:34:31 +02:00
# Can successfully create stream despite being less than waiting period and not an admin,
# due to create stream policy.
stream_name = [ ' all_members ' ]
2020-03-09 21:41:26 +01:00
result = self . common_subscribe_to_streams ( user_profile , stream_name )
2019-05-06 16:34:31 +02:00
self . assert_json_success ( result )
# Allow only administrators to create streams.
do_set_realm_property ( user_profile . realm , ' create_stream_policy ' ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_ADMINS_ONLY )
2019-05-06 16:34:31 +02:00
# Cannot create stream because not an admin.
stream_name = [ ' admins_only ' ]
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( user_profile , stream_name , allow_fail = True )
2016-05-12 10:28:00 +02:00
self . assert_json_error ( result , ' User cannot create streams. ' )
2019-05-06 16:34:31 +02:00
# Make current user an admin.
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2016-11-29 08:57:35 +01:00
2019-05-06 16:34:31 +02:00
# Can successfully create stream as user is now an admin.
stream_name = [ ' admins_only ' ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( user_profile , stream_name )
2019-05-06 16:34:31 +02:00
# Allow users older than the waiting period to create streams.
do_set_realm_property ( user_profile . realm , ' create_stream_policy ' ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_FULL_MEMBERS_ONLY )
2019-05-06 16:34:31 +02:00
# Can successfully create stream despite being under waiting period because user is admin.
stream_name = [ ' waiting_period_as_admin ' ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( user_profile , stream_name )
2019-05-06 16:34:31 +02:00
# Make current user no longer an admin.
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER )
2019-05-06 16:34:31 +02:00
# Cannot create stream because user is not an admin and is not older than the waiting
# period.
stream_name = [ ' waiting_period ' ]
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( user_profile , stream_name , allow_fail = True )
2019-05-06 16:34:31 +02:00
self . assert_json_error ( result , ' User cannot create streams. ' )
# Make user account 11 days old..
user_profile . date_joined = timezone_now ( ) - timedelta ( days = 11 )
user_profile . save ( )
# Can successfully create stream now that account is old enough.
stream_name = [ ' waiting_period ' ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( user_profile , stream_name )
2016-05-12 10:28:00 +02:00
2019-04-08 19:23:00 +02:00
def test_invite_to_stream_by_invite_period_threshold ( self ) - > None :
"""
Non admin users with account age greater or equal to the invite
to stream threshold should be able to invite others to a stream .
"""
hamlet_user = self . example_user ( ' hamlet ' )
hamlet_user . date_joined = timezone_now ( )
hamlet_user . save ( )
cordelia_user = self . example_user ( ' cordelia ' )
cordelia_user . date_joined = timezone_now ( )
cordelia_user . save ( )
do_set_realm_property ( hamlet_user . realm , ' invite_to_stream_policy ' ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_FULL_MEMBERS_ONLY )
2020-04-09 19:07:57 +02:00
cordelia_user_id = cordelia_user . id
2019-04-08 19:23:00 +02:00
2020-03-06 18:40:46 +01:00
self . login_user ( hamlet_user )
2020-05-21 00:13:06 +02:00
do_change_user_role ( hamlet_user , UserProfile . ROLE_REALM_ADMINISTRATOR )
2019-04-08 19:23:00 +02:00
# Hamlet creates a stream as an admin..
stream_name = [ ' waitingperiodtest ' ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( hamlet_user , stream_name )
2019-04-08 19:23:00 +02:00
# Can only invite users to stream if their account is ten days old..
2020-05-21 00:13:06 +02:00
do_change_user_role ( hamlet_user , UserProfile . ROLE_MEMBER )
2019-04-08 19:23:00 +02:00
do_set_realm_property ( hamlet_user . realm , ' waiting_period_threshold ' , 10 )
# Attempt and fail to invite Cordelia to the stream..
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams (
hamlet_user ,
stream_name ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ cordelia_user_id ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
)
2019-04-08 19:23:00 +02:00
self . assert_json_error ( result ,
" Your account is too new to modify other users ' subscriptions. " )
# Anyone can invite users..
do_set_realm_property ( hamlet_user . realm , ' waiting_period_threshold ' , 0 )
# Attempt and succeed to invite Cordelia to the stream..
2020-08-07 01:09:47 +02:00
self . common_subscribe_to_streams ( hamlet_user , stream_name , { " principals " : orjson . dumps ( [ cordelia_user_id ] ) . decode ( ) } )
2019-04-08 19:23:00 +02:00
# Set threshold to 20 days..
do_set_realm_property ( hamlet_user . realm , ' waiting_period_threshold ' , 20 )
# Make Hamlet's account 21 days old..
hamlet_user . date_joined = timezone_now ( ) - timedelta ( days = 21 )
hamlet_user . save ( )
# Unsubscribe Cordelia..
self . unsubscribe ( cordelia_user , stream_name [ 0 ] )
# Attempt and succeed to invite Aaron to the stream..
2020-08-07 01:09:47 +02:00
self . common_subscribe_to_streams ( hamlet_user , stream_name , { " principals " : orjson . dumps ( [ cordelia_user_id ] ) . decode ( ) } )
2019-04-08 19:23:00 +02:00
2017-11-05 10:51:25 +01:00
def test_remove_already_not_subbed ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
Trying to unsubscribe someone who already isn ' t subscribed to a stream
fails gracefully .
"""
result = self . attempt_unsubscribe_of_principal (
2020-07-13 16:13:28 +02:00
query_count = 12 , target_users = [ self . example_user ( ' cordelia ' ) ] , is_realm_admin = True ,
is_subbed = False , invite_only = False , target_users_subbed = False )
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
self . assertEqual ( len ( json [ " removed " ] ) , 0 )
2019-10-13 05:30:34 +02:00
self . assertEqual ( len ( json [ " not_removed " ] ) , 1 )
2014-01-30 22:50:51 +01:00
2017-11-05 10:51:25 +01:00
def test_remove_invalid_user ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
Trying to unsubscribe an invalid user from a stream fails gracefully .
"""
2020-03-06 18:40:46 +01:00
admin = self . example_user ( ' iago ' )
self . login_user ( admin )
self . assertTrue ( admin . is_realm_admin )
2014-01-30 22:50:51 +01:00
2020-04-09 21:51:58 +02:00
stream_name = " hümbüǵ "
2016-10-21 23:22:25 +02:00
self . make_stream ( stream_name )
2014-01-30 22:50:51 +01:00
2016-12-23 02:37:10 +01:00
result = self . client_delete ( " /json/users/me/subscriptions " ,
2020-08-07 01:09:47 +02:00
{ " subscriptions " : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
" principals " : orjson . dumps ( [ 99 ] ) . decode ( ) } )
2014-01-30 22:50:51 +01:00
self . assert_json_error (
result ,
2020-04-09 19:07:57 +02:00
" User not authorized to execute queries on behalf of ' 99 ' " ,
2016-04-28 01:23:45 +02:00
status_code = 403 )
2014-01-30 22:50:51 +01:00
2016-08-23 02:08:42 +02:00
class DefaultStreamTest ( ZulipTestCase ) :
2018-05-11 01:39:38 +02:00
def get_default_stream_names ( self , realm : Realm ) - > Set [ str ] :
2017-09-17 00:34:13 +02:00
streams = get_default_streams_for_realm ( realm . id )
2014-01-29 22:03:40 +01:00
stream_names = [ s . name for s in streams ]
return set ( stream_names )
2017-11-05 10:51:25 +01:00
def test_add_and_remove_default_stream ( self ) - > None :
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2018-03-21 22:05:21 +01:00
stream = ensure_stream ( realm , " Added Stream " )
2014-01-29 22:03:40 +01:00
orig_stream_names = self . get_default_stream_names ( realm )
2017-01-30 04:23:08 +01:00
do_add_default_stream ( stream )
2014-01-29 22:03:40 +01:00
new_stream_names = self . get_default_stream_names ( realm )
added_stream_names = new_stream_names - orig_stream_names
2020-04-09 21:51:58 +02:00
self . assertEqual ( added_stream_names , { ' Added Stream ' } )
2014-01-29 22:03:40 +01:00
# idempotentcy--2nd call to add_default_stream should be a noop
2017-01-30 04:23:08 +01:00
do_add_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , new_stream_names )
# start removing
2017-01-30 04:25:40 +01:00
do_remove_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , orig_stream_names )
# idempotentcy--2nd call to remove_default_stream should be a noop
2017-01-30 04:25:40 +01:00
do_remove_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , orig_stream_names )
2017-11-05 10:51:25 +01:00
def test_api_calls ( self ) - > None :
2017-05-07 17:21:26 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-08-15 20:45:55 +02:00
2014-01-29 22:03:40 +01:00
stream_name = ' stream ADDED via api '
2020-03-22 20:29:49 +01:00
stream = ensure_stream ( user_profile . realm , stream_name )
2020-04-13 13:57:07 +02:00
result = self . client_post ( ' /json/default_streams ' , dict ( stream_id = stream . id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
self . assertTrue ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
2018-08-15 20:45:55 +02:00
# look for it
self . subscribe ( user_profile , stream_name )
payload = dict (
include_public = ' true ' ,
include_default = ' true ' ,
)
result = self . client_get ( ' /json/streams ' , payload )
self . assert_json_success ( result )
streams = result . json ( ) [ ' streams ' ]
default_streams = {
stream [ ' name ' ]
for stream in streams
if stream [ ' is_default ' ]
}
self . assertEqual ( default_streams , { stream_name } )
other_streams = {
stream [ ' name ' ]
for stream in streams
if not stream [ ' is_default ' ]
}
self . assertTrue ( len ( other_streams ) > 0 )
2014-01-29 22:03:40 +01:00
# and remove it
2020-03-22 20:29:49 +01:00
result = self . client_delete ( ' /json/default_streams ' , dict ( stream_id = stream . id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
self . assertFalse ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
2018-08-07 14:13:58 +02:00
# Test admin can't add unsubscribed private stream
stream_name = " private_stream "
2020-03-22 20:29:49 +01:00
stream = self . make_stream ( stream_name , invite_only = True )
2018-08-07 14:13:58 +02:00
self . subscribe ( self . example_user ( ' iago ' ) , stream_name )
2020-04-13 13:57:07 +02:00
result = self . client_post ( ' /json/default_streams ' , dict ( stream_id = stream . id ) )
self . assert_json_error ( result , " Invalid stream id " )
2018-08-07 14:13:58 +02:00
self . subscribe ( user_profile , stream_name )
2020-04-13 13:57:07 +02:00
result = self . client_post ( ' /json/default_streams ' , dict ( stream_id = stream . id ) )
2018-08-07 14:13:58 +02:00
self . assert_json_success ( result )
self . assertTrue ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
# Test admin can remove unsubscribed private stream
self . unsubscribe ( user_profile , stream_name )
2020-03-22 20:29:49 +01:00
result = self . client_delete ( ' /json/default_streams ' , dict ( stream_id = stream . id ) )
2018-08-07 14:13:58 +02:00
self . assert_json_success ( result )
self . assertFalse ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
2014-01-29 22:03:40 +01:00
2020-07-24 17:50:36 +02:00
def test_guest_user_access_to_streams ( self ) - > None :
user_profile = self . example_user ( " polonius " )
self . login_user ( user_profile )
self . assertEqual ( user_profile . role , UserProfile . ROLE_GUEST )
# Get all the streams that Polonius has access to (subscribed + web public streams)
result = self . client_get ( ' /json/streams?include_web_public=true ' )
streams = result . json ( ) [ ' streams ' ]
subscribed , unsubscribed , never_subscribed = gather_subscriptions_helper ( user_profile )
self . assertEqual ( len ( streams ) ,
len ( subscribed ) + len ( unsubscribed ) + len ( never_subscribed ) )
expected_streams = subscribed + unsubscribed + never_subscribed
stream_names = [
stream [ ' name ' ]
for stream in streams
]
expected_stream_names = [
stream [ ' name ' ]
for stream in expected_streams
]
self . assertEqual ( set ( stream_names ) , set ( expected_stream_names ) )
2017-11-01 18:20:34 +01:00
class DefaultStreamGroupTest ( ZulipTestCase ) :
def test_create_update_and_remove_default_stream_group ( self ) - > None :
realm = get_realm ( " zulip " )
# Test creating new default stream group
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
streams = [ ]
for stream_name in [ " stream1 " , " stream2 " , " stream3 " ] :
2018-03-21 22:05:21 +01:00
stream = ensure_stream ( realm , stream_name )
2017-11-01 18:20:34 +01:00
streams . append ( stream )
2017-12-01 07:57:54 +01:00
def get_streams ( group : DefaultStreamGroup ) - > List [ Stream ] :
return list ( group . streams . all ( ) . order_by ( ' name ' ) )
2017-11-01 18:20:34 +01:00
group_name = " group1 "
2017-11-14 20:51:34 +01:00
description = " This is group1 "
do_create_default_stream_group ( realm , group_name , description , streams )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-11-14 20:51:34 +01:00
self . assertEqual ( default_stream_groups [ 0 ] . description , description )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams )
2017-11-01 18:20:34 +01:00
# Test adding streams to existing default stream group
2017-11-14 20:33:09 +01:00
group = lookup_default_stream_groups ( [ " group1 " ] , realm ) [ 0 ]
2017-11-01 18:20:34 +01:00
new_stream_names = [ " stream4 " , " stream5 " ]
new_streams = [ ]
for new_stream_name in new_stream_names :
2018-03-21 22:05:21 +01:00
new_stream = ensure_stream ( realm , new_stream_name )
2017-11-01 18:20:34 +01:00
new_streams . append ( new_stream )
streams . append ( new_stream )
2017-11-14 20:33:09 +01:00
do_add_streams_to_default_stream_group ( realm , group , new_streams )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams )
2017-11-01 18:20:34 +01:00
# Test removing streams from existing default stream group
2017-11-14 20:33:09 +01:00
do_remove_streams_from_default_stream_group ( realm , group , new_streams )
2017-11-01 18:20:34 +01:00
remaining_streams = streams [ 0 : 3 ]
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , remaining_streams )
2017-11-01 18:20:34 +01:00
2017-11-14 20:51:34 +01:00
# Test changing default stream group description
new_description = " group1 new description "
do_change_default_stream_group_description ( realm , group , new_description )
default_stream_groups = get_default_stream_groups ( realm )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
self . assert_length ( default_stream_groups , 1 )
2017-11-14 21:06:02 +01:00
# Test changing default stream group name
new_group_name = " new group1 "
do_change_default_stream_group_name ( realm , group , new_group_name )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , new_group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , remaining_streams )
2017-11-14 21:06:02 +01:00
2017-11-01 18:20:34 +01:00
# Test removing default stream group
2017-11-14 20:33:09 +01:00
do_remove_default_stream_group ( realm , group )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
# Test creating a default stream group which contains a default stream
do_add_default_stream ( remaining_streams [ 0 ] )
2017-11-17 07:00:53 +01:00
with self . assertRaisesRegex (
JsonableError , " ' stream1 ' is a default stream and cannot be added to ' new group1 ' " ) :
2017-11-14 21:06:02 +01:00
do_create_default_stream_group ( realm , new_group_name , " This is group1 " , remaining_streams )
2017-11-01 18:20:34 +01:00
def test_api_calls ( self ) - > None :
2020-03-06 18:40:46 +01:00
self . login ( ' hamlet ' )
2017-11-01 18:20:34 +01:00
user_profile = self . example_user ( ' hamlet ' )
realm = user_profile . realm
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2017-11-01 18:20:34 +01:00
# Test creating new default stream group
stream_names = [ " stream1 " , " stream2 " , " stream3 " ]
group_name = " group1 "
2017-11-14 20:51:34 +01:00
description = " This is group1 "
2017-11-01 18:20:34 +01:00
streams = [ ]
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
for stream_name in stream_names :
2018-03-21 22:05:21 +01:00
stream = ensure_stream ( realm , stream_name )
2017-11-01 18:20:34 +01:00
streams . append ( stream )
2017-11-14 20:33:09 +01:00
result = self . client_post ( ' /json/default_stream_groups/create ' ,
2017-11-14 20:51:34 +01:00
{ " group_name " : group_name , " description " : description ,
2020-08-07 01:09:47 +02:00
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-11-14 20:51:34 +01:00
self . assertEqual ( default_stream_groups [ 0 ] . description , description )
2017-11-18 00:31:54 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( " id " ) ) , streams )
2017-11-01 18:20:34 +01:00
2018-08-22 16:54:28 +02:00
# Try adding the same streams to the group.
result = self . client_post ( ' /json/default_stream_groups/create ' ,
{ " group_name " : group_name , " description " : description ,
2020-08-07 01:09:47 +02:00
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) } )
2018-08-22 16:54:28 +02:00
self . assert_json_error ( result , " Default stream group ' group1 ' already exists " )
2017-11-01 18:20:34 +01:00
# Test adding streams to existing default stream group
2017-11-14 20:33:09 +01:00
group_id = default_stream_groups [ 0 ] . id
2017-11-01 18:20:34 +01:00
new_stream_names = [ " stream4 " , " stream5 " ]
new_streams = [ ]
for new_stream_name in new_stream_names :
2018-03-21 22:05:21 +01:00
new_stream = ensure_stream ( realm , new_stream_name )
2017-11-01 18:20:34 +01:00
new_streams . append ( new_stream )
streams . append ( new_stream )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " Missing ' op ' argument " )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " invalid " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , ' Invalid value for " op " . Specify one of " add " or " remove " . ' )
result = self . client_patch ( " /json/default_stream_groups/12345/streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2017-11-01 18:20:34 +01:00
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " , { " op " : " add " } )
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Missing ' stream_names ' argument " )
2017-11-01 18:20:34 +01:00
do_add_default_stream ( new_streams [ 0 ] )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " ' stream4 ' is a default stream and cannot be added to ' group1 ' " )
do_remove_default_stream ( new_streams [ 0 ] )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-14 20:33:09 +01:00
self . assert_json_success ( result )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2018-01-31 08:49:53 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( ' name ' ) ) , streams )
2017-11-01 18:20:34 +01:00
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-17 07:00:53 +01:00
self . assert_json_error ( result ,
" Stream ' stream4 ' is already present in default stream group ' group1 ' " )
2017-11-01 18:20:34 +01:00
# Test removing streams from default stream group
2017-11-14 20:33:09 +01:00
result = self . client_patch ( " /json/default_stream_groups/12345/streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " remove " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " remove " , " stream_names " : orjson . dumps ( [ " random stream name " ] ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " Invalid stream name ' random stream name ' " )
streams . remove ( new_streams [ 0 ] )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " remove " , " stream_names " : orjson . dumps ( [ new_stream_names [ 0 ] ] ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2018-01-31 08:49:53 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( ' name ' ) ) , streams )
2017-11-01 18:20:34 +01:00
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " ,
2020-08-07 01:09:47 +02:00
{ " op " : " remove " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } )
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " Stream ' stream4 ' is not present in default stream group ' group1 ' " )
2017-11-14 20:51:34 +01:00
# Test changing description of default stream group
new_description = " new group1 description "
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " ,
2017-11-14 20:51:34 +01:00
{ " group_name " : group_name , " op " : " change " } )
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , ' You must pass " new_description " or " new_group_name " . ' )
2017-11-14 20:51:34 +01:00
result = self . client_patch ( " /json/default_stream_groups/12345 " ,
2020-08-07 01:09:47 +02:00
{ " op " : " change " , " new_description " : orjson . dumps ( new_description ) . decode ( ) } )
2017-11-14 20:51:34 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " ,
2017-11-14 20:51:34 +01:00
{ " group_name " : group_name ,
" op " : " change " ,
2020-08-07 01:09:47 +02:00
" new_description " : orjson . dumps ( new_description ) . decode ( ) } )
2017-11-14 20:51:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
2017-11-14 21:06:02 +01:00
# Test changing name of default stream group
new_group_name = " new group1 "
do_create_default_stream_group ( realm , " group2 " , " " , [ ] )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " ,
2020-08-07 01:09:47 +02:00
{ " op " : " change " , " new_group_name " : orjson . dumps ( " group2 " ) . decode ( ) } )
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , " Default stream group ' group2 ' already exists " )
new_group = lookup_default_stream_groups ( [ " group2 " ] , realm ) [ 0 ]
do_remove_default_stream_group ( realm , new_group )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " ,
2020-08-07 01:09:47 +02:00
{ " op " : " change " , " new_group_name " : orjson . dumps ( group_name ) . decode ( ) } )
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , " This default stream group is already named ' group1 ' " )
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " ,
2020-08-07 01:09:47 +02:00
{ " op " : " change " , " new_group_name " : orjson . dumps ( new_group_name ) . decode ( ) } )
2017-11-14 21:06:02 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , new_group_name )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
2017-11-01 18:20:34 +01:00
# Test deleting a default stream group
2020-06-09 00:25:09 +02:00
result = self . client_delete ( f ' /json/default_stream_groups/ { group_id } ' )
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
2020-06-09 00:25:09 +02:00
result = self . client_delete ( f ' /json/default_stream_groups/ { group_id } ' )
self . assert_json_error ( result , f " Default stream group with id ' { group_id } ' does not exist. " )
2017-11-01 18:20:34 +01:00
2018-02-04 19:50:47 +01:00
def test_invalid_default_stream_group_name ( self ) - > None :
2020-03-06 18:40:46 +01:00
self . login ( ' iago ' )
2018-02-04 19:50:47 +01:00
user_profile = self . example_user ( ' iago ' )
realm = user_profile . realm
stream_names = [ " stream1 " , " stream2 " , " stream3 " ]
description = " This is group1 "
streams = [ ]
for stream_name in stream_names :
2018-03-21 22:05:21 +01:00
stream = ensure_stream ( realm , stream_name )
2018-02-04 19:50:47 +01:00
streams . append ( stream )
result = self . client_post ( ' /json/default_stream_groups/create ' ,
{ " group_name " : " " , " description " : description ,
2020-08-07 01:09:47 +02:00
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) } )
2018-02-04 19:50:47 +01:00
self . assert_json_error ( result , " Invalid default stream group name ' ' " )
result = self . client_post ( ' /json/default_stream_groups/create ' ,
{ " group_name " : ' x ' * 100 , " description " : description ,
2020-08-07 01:09:47 +02:00
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) } )
2018-02-04 19:50:47 +01:00
self . assert_json_error ( result , " Default stream group name too long (limit: {} characters) "
2020-04-09 21:51:58 +02:00
. format ( DefaultStreamGroup . MAX_NAME_LENGTH ) )
2018-02-04 19:50:47 +01:00
result = self . client_post ( ' /json/default_stream_groups/create ' ,
{ " group_name " : " abc \000 " , " description " : description ,
2020-08-07 01:09:47 +02:00
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) } )
2018-02-04 19:50:47 +01:00
self . assert_json_error ( result , " Default stream group name ' abc \000 ' contains NULL (0x00) characters. " )
2018-08-16 16:17:20 +02:00
# Also test that lookup_default_stream_groups raises an
# error if we pass it a bad name. This function is used
# during registration, but it's a bit heavy to do a full
# test of that.
with self . assertRaisesRegex ( JsonableError , ' Invalid default stream group invalid-name ' ) :
lookup_default_stream_groups ( [ ' invalid-name ' ] , realm )
2016-08-23 02:08:42 +02:00
class SubscriptionPropertiesTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_set_stream_color ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-05-09 07:01:42 +02:00
A POST request to / api / v1 / users / me / subscriptions / properties with stream_id and
2019-01-14 07:50:23 +01:00
color data sets the stream color , and for that stream only . Also , make sure that
any invalid hex color codes are bounced .
2014-01-29 22:03:40 +01:00
"""
2017-05-08 16:23:43 +02:00
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2014-01-29 22:03:40 +01:00
2017-05-08 16:23:43 +02:00
old_subs , _ = gather_subscriptions ( test_user )
2014-01-29 22:03:40 +01:00
sub = old_subs [ 0 ]
2017-05-09 07:01:42 +02:00
stream_id = sub [ ' stream_id ' ]
2017-07-11 21:51:31 +02:00
new_color = " #ffffff " # TODO: ensure that this is different from old_color
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " color " ,
" stream_id " : stream_id ,
" value " : " #ffffff " } ] ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
new_subs = gather_subscriptions ( test_user ) [ 0 ]
2014-01-29 22:03:40 +01:00
found_sub = None
for sub in new_subs :
2017-05-09 07:01:42 +02:00
if sub [ ' stream_id ' ] == stream_id :
2014-01-29 22:03:40 +01:00
found_sub = sub
break
2017-05-24 04:21:29 +02:00
assert ( found_sub is not None )
2014-01-29 22:03:40 +01:00
self . assertEqual ( found_sub [ ' color ' ] , new_color )
new_subs . remove ( found_sub )
for sub in old_subs :
2017-05-09 07:01:42 +02:00
if sub [ ' stream_id ' ] == stream_id :
2014-01-29 22:03:40 +01:00
found_sub = sub
break
old_subs . remove ( found_sub )
self . assertEqual ( old_subs , new_subs )
2019-01-14 07:50:23 +01:00
invalid_color = " 3ffrff "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " color " ,
" stream_id " : stream_id ,
" value " : invalid_color } ] ) . decode ( ) } )
2019-01-14 07:50:23 +01:00
self . assert_json_error ( result , " color is not a valid hex color code " )
2017-11-05 10:51:25 +01:00
def test_set_color_missing_stream_id ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-05-09 07:01:42 +02:00
Updating the color property requires a ` stream_id ` key .
2014-01-29 22:03:40 +01:00
"""
2017-05-09 07:01:42 +02:00
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " color " ,
" value " : " #ffffff " } ] ) . decode ( ) } )
2014-02-14 21:55:20 +01:00
self . assert_json_error (
2017-05-09 07:01:42 +02:00
result , " stream_id key is missing from subscription_data[0] " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_set_color_unsubscribed_stream_id ( self ) - > None :
2016-08-18 12:33:06 +02:00
"""
Updating the color property requires a subscribed stream .
"""
2020-03-10 11:48:26 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2016-08-18 12:33:06 +02:00
2020-03-10 11:48:26 +01:00
subscribed , unsubscribed , never_subscribed = gather_subscriptions_helper ( test_user )
2017-05-09 07:01:42 +02:00
not_subbed = unsubscribed + never_subscribed
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " color " ,
" stream_id " : not_subbed [ 0 ] [ " stream_id " ] ,
" value " : " #ffffff " } ] ) . decode ( ) } )
2016-08-18 12:33:06 +02:00
self . assert_json_error (
2020-06-13 08:59:37 +02:00
result , " Not subscribed to stream id {} " . format ( not_subbed [ 0 ] [ " stream_id " ] ) )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_set_color_missing_color ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Updating the color property requires a color .
"""
2017-05-08 16:23:43 +02:00
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " color " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2014-02-14 21:55:20 +01:00
self . assert_json_error (
result , " value key is missing from subscription_data[0] " )
2014-01-29 22:03:40 +01:00
2019-11-26 02:37:12 +01:00
def test_set_stream_wildcard_mentions_notify ( self ) - > None :
"""
A POST request to / api / v1 / users / me / subscriptions / properties with wildcard_mentions_notify
sets the property .
"""
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2019-11-26 02:37:12 +01:00
subs = gather_subscriptions ( test_user ) [ 0 ]
sub = subs [ 0 ]
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " wildcard_mentions_notify " ,
" stream_id " : sub [ " stream_id " ] ,
" value " : True } ] ) . decode ( ) } )
2019-11-26 02:37:12 +01:00
self . assert_json_success ( result )
updated_sub = get_subscription ( sub [ ' name ' ] , test_user )
self . assertIsNotNone ( updated_sub )
self . assertEqual ( updated_sub . wildcard_mentions_notify , True )
2017-11-05 10:51:25 +01:00
def test_set_pin_to_top ( self ) - > None :
2016-07-01 07:26:09 +02:00
"""
2017-05-09 07:01:42 +02:00
A POST request to / api / v1 / users / me / subscriptions / properties with stream_id and
2016-07-01 07:26:09 +02:00
pin_to_top data pins the stream .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-01 07:26:09 +02:00
2020-03-10 11:48:26 +01:00
old_subs , _ = gather_subscriptions ( user )
2016-07-01 07:26:09 +02:00
sub = old_subs [ 0 ]
2017-05-09 07:01:42 +02:00
stream_id = sub [ ' stream_id ' ]
2016-07-01 07:26:09 +02:00
new_pin_to_top = not sub [ ' pin_to_top ' ]
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " pin_to_top " ,
" stream_id " : stream_id ,
" value " : new_pin_to_top } ] ) . decode ( ) } )
2016-07-01 07:26:09 +02:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
updated_sub = get_subscription ( sub [ ' name ' ] , user )
2016-07-01 07:26:09 +02:00
self . assertIsNotNone ( updated_sub )
self . assertEqual ( updated_sub . pin_to_top , new_pin_to_top )
2018-08-02 23:46:05 +02:00
def test_change_is_muted ( self ) - > None :
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2018-08-02 23:46:05 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
sub = Subscription . objects . get ( recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user )
self . assertEqual ( sub . is_muted , False )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2018-08-02 23:46:05 +02:00
property_name = " is_muted "
with tornado_redirected_to_list ( events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : True ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' property ' ] , ' in_home_view ' )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' value ' ] , False )
sub = Subscription . objects . get ( recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user )
self . assertEqual ( sub . is_muted , True )
events = [ ]
legacy_property_name = ' in_home_view '
with tornado_redirected_to_list ( events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : legacy_property_name ,
" value " : True ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' property ' ] , ' in_home_view ' )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' value ' ] , True )
self . assert_json_success ( result )
sub = Subscription . objects . get ( recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user )
self . assertEqual ( sub . is_muted , False )
events = [ ]
with tornado_redirected_to_list ( events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : legacy_property_name ,
" value " : False ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' property ' ] , ' in_home_view ' )
self . assertEqual ( events [ 0 ] [ ' event ' ] [ ' value ' ] , False )
sub = Subscription . objects . get ( recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user )
self . assertEqual ( sub . is_muted , True )
2017-11-05 10:51:25 +01:00
def test_set_subscription_property_incorrect ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
Trying to set a property incorrectly returns a JSON error .
"""
2017-05-08 16:23:43 +02:00
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2016-07-16 18:50:41 +02:00
2018-08-02 23:46:05 +02:00
property_name = " is_muted "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2018-08-02 23:46:05 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2018-08-02 23:46:05 +02:00
2016-07-16 18:50:41 +02:00
property_name = " in_home_view "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2016-07-16 18:50:41 +02:00
property_name = " desktop_notifications "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2016-07-16 18:50:41 +02:00
property_name = " audible_notifications "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2016-07-16 18:50:41 +02:00
2017-08-17 16:55:32 +02:00
property_name = " push_notifications "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2017-08-17 16:55:32 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2017-08-17 16:55:32 +02:00
2017-11-21 04:35:26 +01:00
property_name = " email_notifications "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2017-11-21 04:35:26 +01:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a boolean ' )
2017-11-21 04:35:26 +01:00
2019-11-26 02:37:12 +01:00
property_name = " wildcard_mentions_notify "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2019-11-26 02:37:12 +01:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f " { property_name } is not a boolean " )
2019-11-26 02:37:12 +01:00
2016-07-16 18:50:41 +02:00
property_name = " color "
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : property_name ,
" value " : False ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f ' { property_name } is not a string ' )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_json_subscription_property_invalid_stream ( self ) - > None :
2020-03-10 11:48:26 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2016-07-24 16:45:20 +02:00
2017-05-09 07:01:42 +02:00
stream_id = 1000
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " is_muted " ,
" stream_id " : stream_id ,
" value " : False } ] ) . decode ( ) } )
2017-05-09 07:01:42 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2016-07-24 16:45:20 +02:00
2017-11-05 10:51:25 +01:00
def test_set_invalid_property ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Trying to set an invalid property returns a JSON error .
"""
2017-05-08 16:23:43 +02:00
test_user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2020-03-10 11:48:26 +01:00
result = self . api_post ( test_user , " /api/v1/users/me/subscriptions/properties " ,
2020-08-07 01:09:47 +02:00
{ " subscription_data " : orjson . dumps ( [ { " property " : " bad " ,
" value " : " bad " ,
" stream_id " : subs [ 0 ] [ " stream_id " ] } ] ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result ,
" Unknown subscription property: bad " )
2016-08-23 02:08:42 +02:00
class SubscriptionRestApiTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_basic_add_delete ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# add
request = {
2020-08-07 01:09:47 +02:00
' add ' : orjson . dumps ( [ { ' name ' : ' my_test_stream_1 ' } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
streams = self . get_streams ( user )
2014-01-29 22:03:40 +01:00
self . assertTrue ( ' my_test_stream_1 ' in streams )
# now delete the same stream
request = {
2020-08-07 01:09:47 +02:00
' delete ' : orjson . dumps ( [ ' my_test_stream_1 ' ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
streams = self . get_streams ( user )
2014-01-29 22:03:40 +01:00
self . assertTrue ( ' my_test_stream_1 ' not in streams )
2019-01-10 15:03:15 +01:00
def test_add_with_color ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2019-01-10 15:03:15 +01:00
# add with color proposition
request = {
2020-08-07 01:09:47 +02:00
' add ' : orjson . dumps ( [ { ' name ' : ' my_test_stream_2 ' , ' color ' : ' #afafaf ' } ] ) . decode ( ) ,
2019-01-10 15:03:15 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2019-01-10 15:03:15 +01:00
self . assert_json_success ( result )
# incorrect color format
request = {
2020-08-07 01:09:47 +02:00
' subscriptions ' : orjson . dumps ( [ { ' name ' : ' my_test_stream_3 ' , ' color ' : ' #0g0g0g ' } ] ) . decode ( ) ,
2019-01-10 15:03:15 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , " /api/v1/users/me/subscriptions " , request )
2019-01-10 15:03:15 +01:00
self . assert_json_error ( result , ' subscriptions[0][ " color " ] is not a valid hex color code ' )
2017-11-05 10:51:25 +01:00
def test_api_valid_property ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set valid json returns success message .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2017-05-09 22:29:59 +02:00
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-10 11:48:26 +01:00
subs = gather_subscriptions ( user ) [ 0 ]
2020-06-13 08:59:37 +02:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions/ {} " . format ( subs [ 0 ] [ " stream_id " ] ) ,
2017-12-14 19:02:31 +01:00
{ ' property ' : ' color ' , ' value ' : ' #c2c2c2 ' } )
2017-05-09 22:29:59 +02:00
self . assert_json_success ( result )
2017-11-05 10:51:25 +01:00
def test_api_invalid_property ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set an invalid property returns a JSON error .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2017-05-09 22:29:59 +02:00
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-10 11:48:26 +01:00
subs = gather_subscriptions ( user ) [ 0 ]
2017-05-09 22:29:59 +02:00
2020-06-13 08:59:37 +02:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions/ {} " . format ( subs [ 0 ] [ " stream_id " ] ) ,
2017-12-14 19:02:31 +01:00
{ ' property ' : ' invalid ' , ' value ' : ' somevalue ' } )
2017-05-09 22:29:59 +02:00
self . assert_json_error ( result ,
" Unknown subscription property: invalid " )
2017-11-05 10:51:25 +01:00
def test_api_invalid_stream_id ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set an invalid stream id returns a JSON error .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions/121 " ,
2018-08-02 23:46:05 +02:00
{ ' property ' : ' is_muted ' , ' value ' : ' somevalue ' } )
2017-05-09 22:29:59 +02:00
self . assert_json_error ( result ,
" Invalid stream id " )
2017-11-05 10:51:25 +01:00
def test_bad_add_parameters ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def check_for_error ( val : Any , expected_message : str ) - > None :
2014-01-29 22:03:40 +01:00
request = {
2020-08-07 01:09:47 +02:00
' add ' : orjson . dumps ( val ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result , expected_message )
check_for_error ( [ ' foo ' ] , ' add[0] is not a dict ' )
check_for_error ( [ { ' bogus ' : ' foo ' } ] , ' name key is missing from add[0] ' )
check_for_error ( [ { ' name ' : { } } ] , ' add[0][ " name " ] is not a string ' )
2017-11-05 10:51:25 +01:00
def test_bad_principals ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
request = {
2020-08-07 01:09:47 +02:00
' add ' : orjson . dumps ( [ { ' name ' : ' my_new_stream ' } ] ) . decode ( ) ,
' principals ' : orjson . dumps ( [ { } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2020-04-09 19:07:57 +02:00
self . assert_json_error ( result , ' principals is not an allowed_type ' )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_bad_delete_parameters ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
request = {
2020-08-07 01:09:47 +02:00
' delete ' : orjson . dumps ( [ { ' name ' : ' my_test_stream_1 ' } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result , " delete[0] is not a string " )
2017-11-05 10:51:25 +01:00
def test_add_or_delete_not_specified ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , { } )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
' Nothing to do. Specify at least one of " add " or " delete " . ' )
2017-11-05 10:51:25 +01:00
def test_patch_enforces_valid_stream_name_check ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
Only way to force an error is with a empty string .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
invalid_stream_name = " "
request = {
2020-08-07 01:09:47 +02:00
' delete ' : orjson . dumps ( [ invalid_stream_name ] ) . decode ( ) ,
2016-07-16 18:50:41 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f " Invalid stream name ' { invalid_stream_name } ' " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_stream_name_too_long ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
long_stream_name = " a " * 61
request = {
2020-08-07 01:09:47 +02:00
' delete ' : orjson . dumps ( [ long_stream_name ] ) . decode ( ) ,
2016-07-16 18:50:41 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2016-07-16 18:50:41 +02:00
self . assert_json_error ( result ,
2018-01-08 19:54:19 +01:00
" Stream name too long (limit: 60 characters). " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_stream_name_contains_null ( self ) - > None :
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2017-04-26 01:28:22 +02:00
stream_name = " abc \000 "
request = {
2020-08-07 01:09:47 +02:00
' delete ' : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
2017-04-26 01:28:22 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2017-04-26 01:28:22 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f " Stream name ' { stream_name } ' contains NULL (0x00) characters. " )
2017-04-26 01:28:22 +02:00
2017-11-05 10:51:25 +01:00
def test_compose_views_rollback ( self ) - > None :
2016-09-12 17:21:49 +02:00
'''
The compose_views function ( ) is used under the hood by
update_subscriptions_backend . It ' s a pretty simple method in terms of
control flow , but it uses a Django rollback , which may make it brittle
code when we upgrade Django . We test the functions ' s rollback logic
here with a simple scenario to avoid false positives related to
subscription complications .
'''
2017-05-07 17:21:26 +02:00
user_profile = self . example_user ( ' hamlet ' )
2016-09-12 17:21:49 +02:00
user_profile . full_name = ' Hamlet '
user_profile . save ( )
2017-11-05 10:51:25 +01:00
def method1 ( req : HttpRequest , user_profile : UserProfile ) - > HttpResponse :
2016-09-12 17:21:49 +02:00
user_profile . full_name = ' Should not be committed '
user_profile . save ( )
2016-10-21 07:34:04 +02:00
return json_success ( )
2016-09-12 17:21:49 +02:00
2017-11-05 10:51:25 +01:00
def method2 ( req : HttpRequest , user_profile : UserProfile ) - > HttpResponse :
2017-03-09 09:03:21 +01:00
return json_error ( ' random failure ' )
2016-09-12 17:21:49 +02:00
with self . assertRaises ( JsonableError ) :
compose_views ( None , user_profile , [ ( method1 , { } ) , ( method2 , { } ) ] )
2017-05-07 17:21:26 +02:00
user_profile = self . example_user ( ' hamlet ' )
2016-09-12 17:21:49 +02:00
self . assertEqual ( user_profile . full_name , ' Hamlet ' )
2016-08-23 02:08:42 +02:00
class SubscriptionAPITest ( ZulipTestCase ) :
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
All tests will be logged in as hamlet . Also save various useful values
as attributes that tests can access .
"""
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2017-05-07 21:25:59 +02:00
self . user_profile = self . example_user ( ' hamlet ' )
self . test_email = self . user_profile . email
2017-10-07 16:00:39 +02:00
self . test_user = self . user_profile
2020-03-06 18:40:46 +01:00
self . login_user ( self . user_profile )
2017-07-12 12:32:14 +02:00
self . test_realm = self . user_profile . realm
2020-03-09 21:41:26 +01:00
self . streams = self . get_streams ( self . user_profile )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def make_random_stream_names ( self , existing_stream_names : List [ str ] ) - > List [ str ] :
2014-01-29 22:03:40 +01:00
"""
Helper function to make up random stream names . It takes
existing_stream_names and randomly appends a digit to the end of each ,
but avoids names that appear in the list names_to_avoid .
"""
random_streams = [ ]
2017-07-12 12:32:14 +02:00
all_stream_names = [ stream . name for stream in Stream . objects . filter ( realm = self . test_realm ) ]
2014-01-29 22:03:40 +01:00
for stream in existing_stream_names :
random_stream = stream + str ( random . randint ( 0 , 9 ) )
2016-05-10 01:55:43 +02:00
if random_stream not in all_stream_names :
2014-01-29 22:03:40 +01:00
random_streams . append ( random_stream )
return random_streams
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_list ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / api / v1 / users / me / subscriptions should successfully return your subscriptions .
"""
2020-03-10 11:48:26 +01:00
result = self . api_get ( self . test_user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertIn ( " subscriptions " , json )
for stream in json [ ' subscriptions ' ] :
2017-09-27 10:11:59 +02:00
self . assertIsInstance ( stream [ ' name ' ] , str )
self . assertIsInstance ( stream [ ' color ' ] , str )
2014-01-29 22:03:40 +01:00
self . assertIsInstance ( stream [ ' invite_only ' ] , bool )
2017-03-23 07:22:28 +01:00
# check that the stream name corresponds to an actual
# stream; will throw Stream.DoesNotExist if it doesn't
2017-07-12 12:32:14 +02:00
get_stream ( stream [ ' name ' ] , self . test_realm )
2014-01-29 22:03:40 +01:00
list_streams = [ stream [ ' name ' ] for stream in json [ " subscriptions " ] ]
# also check that this matches the list of your subscriptions
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( list_streams ) , sorted ( self . streams ) )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def helper_check_subs_before_and_after_add ( self , subscriptions : List [ str ] ,
2017-11-20 03:22:57 +01:00
other_params : Dict [ str , Any ] ,
2018-05-11 01:39:38 +02:00
subscribed : List [ str ] ,
already_subscribed : List [ str ] ,
email : str , new_subs : List [ str ] ,
2017-11-20 03:22:57 +01:00
realm : Realm ,
invite_only : bool = False ) - > None :
2014-01-29 22:03:40 +01:00
"""
Check result of adding subscriptions .
You can add subscriptions for yourself or possibly many
principals , which is why e - mails map to subscriptions in the
result .
The result json is of the form
{ " msg " : " " ,
" result " : " success " ,
2017-05-25 01:44:04 +02:00
" already_subscribed " : { self . example_email ( " iago " ) : [ " Venice " , " Verona " ] } ,
" subscribed " : { self . example_email ( " iago " ) : [ " Venice8 " ] } }
2014-01-29 22:03:40 +01:00
"""
2020-03-09 21:41:26 +01:00
result = self . common_subscribe_to_streams ( self . test_user , subscriptions ,
2014-01-29 22:03:40 +01:00
other_params , invite_only = invite_only )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( subscribed ) , sorted ( json [ " subscribed " ] [ email ] ) )
self . assertEqual ( sorted ( already_subscribed ) , sorted ( json [ " already_subscribed " ] [ email ] ) )
2020-03-09 21:41:26 +01:00
user = get_user ( email , realm )
new_streams = self . get_streams ( user )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( new_streams ) , sorted ( new_subs ) )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_add ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-04-13 02:05:48 +02:00
Calling POST / json / users / me / subscriptions should successfully add
streams , and should determine which are new subscriptions vs
which were already subscribed . We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated .
2014-01-29 22:03:40 +01:00
"""
self . assertNotEqual ( len ( self . streams ) , 0 ) # necessary for full test coverage
2020-04-09 21:51:58 +02:00
add_streams = [ " Verona2 " , " Denmark5 " ]
2014-01-29 22:03:40 +01:00
self . assertNotEqual ( len ( add_streams ) , 0 ) # necessary for full test coverage
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
self . helper_check_subs_before_and_after_add ( self . streams + add_streams , { } ,
2017-07-12 13:07:48 +02:00
add_streams , self . streams , self . test_email ,
self . streams + add_streams , self . test_realm )
2017-04-27 00:03:21 +02:00
self . assert_length ( events , 8 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_add_with_announce ( self ) - > None :
2016-06-24 20:10:27 +02:00
"""
Calling POST / json / users / me / subscriptions should successfully add
streams , and should determine which are new subscriptions vs
which were already subscribed . We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated .
"""
self . assertNotEqual ( len ( self . streams ) , 0 )
2020-04-09 21:51:58 +02:00
add_streams = [ " Verona2 " , " Denmark5 " ]
2016-06-24 20:10:27 +02:00
self . assertNotEqual ( len ( add_streams ) , 0 )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2016-06-24 20:10:27 +02:00
other_params = {
' announce ' : ' true ' ,
}
2017-07-12 22:17:24 +02:00
notifications_stream = get_stream ( self . streams [ 0 ] , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 22:17:24 +02:00
self . test_realm . save ( )
2016-06-24 20:10:27 +02:00
# Delete the UserProfile from the cache so the realm change will be
# picked up
cache . cache_delete ( cache . user_profile_by_email_cache_key ( self . test_email ) )
with tornado_redirected_to_list ( events ) :
self . helper_check_subs_before_and_after_add ( self . streams + add_streams , other_params ,
2017-07-12 13:07:48 +02:00
add_streams , self . streams , self . test_email ,
self . streams + add_streams , self . test_realm )
2017-04-27 00:03:21 +02:00
self . assertEqual ( len ( events ) , 9 )
2016-06-24 20:10:27 +02:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_pm ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
2014-01-29 22:03:40 +01:00
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] ) [ : 1 ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = {
' announce ' : ' true ' ,
2020-08-07 01:09:47 +02:00
' principals ' : orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
} ,
)
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
2014-01-29 22:03:40 +01:00
invitee_full_name = ' Iago '
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] ) [ : 1 ]
2014-01-29 22:03:40 +01:00
2017-07-12 12:32:14 +02:00
notifications_stream = get_stream ( current_stream , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 12:32:14 +02:00
self . test_realm . save ( )
2014-01-29 22:03:40 +01:00
# Delete the UserProfile from the cache so the realm change will be
# picked up
2020-03-09 21:41:26 +01:00
cache . cache_delete ( cache . user_profile_by_email_cache_key ( invitee . email ) )
2014-01-29 22:03:40 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = dict (
announce = ' true ' ,
2020-08-07 01:09:47 +02:00
principals = orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
) ,
)
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . recipient . type , Recipient . STREAM )
2017-05-08 17:42:50 +02:00
self . assertEqual ( msg . sender_id , self . notification_bot ( ) . id )
2020-06-13 08:59:37 +02:00
expected_msg = f " @_** { invitee_full_name } | { invitee . id } ** created a new stream #** { invite_streams [ 0 ] } **. "
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . content , expected_msg )
2017-11-05 10:51:25 +01:00
def test_successful_cross_realm_notification ( self ) - > None :
2017-01-18 23:19:18 +01:00
"""
Calling POST / json / users / me / subscriptions in a new realm
should notify with a proper new stream link
"""
2017-08-24 04:52:34 +02:00
realm = do_create_realm ( " testrealm " , " Test Realm " )
2017-01-18 23:19:18 +01:00
2019-02-23 23:14:31 +01:00
notifications_stream = Stream . objects . get ( name = ' general ' , realm = realm )
2017-01-18 23:19:18 +01:00
realm . notifications_stream = notifications_stream
realm . save ( )
invite_streams = [ " cross_stream " ]
2017-05-07 17:21:26 +02:00
user = self . example_user ( ' AARON ' )
2017-01-18 23:19:18 +01:00
user . realm = realm
user . save ( )
# Delete the UserProfile from the cache so the realm change will be
# picked up
cache . cache_delete ( cache . user_profile_by_email_cache_key ( user . email ) )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
user ,
2017-01-18 23:19:18 +01:00
invite_streams ,
extra_post_data = dict (
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
announce = ' true ' ,
2017-01-18 23:19:18 +01:00
) ,
2017-08-26 00:58:13 +02:00
subdomain = " testrealm " ,
2017-01-18 23:19:18 +01:00
)
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2017-01-18 23:19:18 +01:00
self . assertEqual ( msg . recipient . type , Recipient . STREAM )
2017-05-08 17:42:50 +02:00
self . assertEqual ( msg . sender_id , self . notification_bot ( ) . id )
2017-01-18 23:19:18 +01:00
stream_id = Stream . objects . latest ( ' id ' ) . id
2020-06-13 08:59:37 +02:00
expected_rendered_msg = f ' <p><span class= " user-mention silent " data-user-id= " { user . id } " > { user . full_name } </span> created a new stream <a class= " stream " data-stream-id= " { stream_id } " href= " /#narrow/stream/ { stream_id } - { invite_streams [ 0 ] } " ># { invite_streams [ 0 ] } </a>.</p> '
2017-01-18 23:19:18 +01:00
self . assertEqual ( msg . rendered_content , expected_rendered_msg )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_with_escaping ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
invitee_full_name = ' Iago '
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( ' iago ' )
2014-01-29 22:03:40 +01:00
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2017-07-12 12:32:14 +02:00
notifications_stream = get_stream ( current_stream , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 12:32:14 +02:00
self . test_realm . save ( )
2016-01-24 03:39:44 +01:00
2014-01-29 22:03:40 +01:00
invite_streams = [ ' strange ) \\ test ' ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = {
' announce ' : ' true ' ,
2020-08-07 01:09:47 +02:00
' principals ' : orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
} ,
)
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2017-05-08 17:42:50 +02:00
self . assertEqual ( msg . sender_id , self . notification_bot ( ) . id )
2020-06-13 08:59:37 +02:00
expected_msg = f " @_** { invitee_full_name } | { invitee . id } ** created a new stream #** { invite_streams [ 0 ] } **. "
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . content , expected_msg )
2017-11-05 10:51:25 +01:00
def test_non_ascii_stream_subscription ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Subscribing to a stream name with non - ASCII characters succeeds .
"""
2020-04-09 21:51:58 +02:00
self . helper_check_subs_before_and_after_add ( self . streams + [ " hümbüǵ " ] , { } ,
[ " hümbüǵ " ] , self . streams , self . test_email ,
self . streams + [ " hümbüǵ " ] , self . test_realm )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_too_long ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on a stream whose name is > 60
2014-01-29 22:03:40 +01:00
characters should return a JSON error .
"""
# character limit is 60 characters
long_stream_name = " a " * 61
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , [ long_stream_name ] , allow_fail = True )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result ,
2018-01-08 19:54:19 +01:00
" Stream name too long (limit: 60 characters). " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_stream_with_null ( self ) - > None :
2017-04-26 01:28:22 +02:00
"""
Calling POST / json / users / me / subscriptions on a stream whose name contains
null characters should return a JSON error .
"""
stream_name = " abc \000 "
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , [ stream_name ] , allow_fail = True )
2017-04-26 01:28:22 +02:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f " Stream name ' { stream_name } ' contains NULL (0x00) characters. " )
2017-04-26 01:28:22 +02:00
2017-11-05 10:51:25 +01:00
def test_user_settings_for_adding_streams ( self ) - > None :
2016-07-27 22:09:33 +02:00
with mock . patch ( ' zerver.models.UserProfile.can_create_streams ' , return_value = False ) :
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , [ ' stream1 ' ] , allow_fail = True )
2016-05-19 03:46:33 +02:00
self . assert_json_error ( result , ' User cannot create streams. ' )
2016-07-27 22:09:33 +02:00
with mock . patch ( ' zerver.models.UserProfile.can_create_streams ' , return_value = True ) :
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( self . test_user , [ ' stream2 ' ] )
2014-01-29 22:03:40 +01:00
2016-05-19 03:46:33 +02:00
# User should still be able to subscribe to an existing stream
2016-07-27 22:09:33 +02:00
with mock . patch ( ' zerver.models.UserProfile.can_create_streams ' , return_value = False ) :
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( self . test_user , [ ' stream2 ' ] )
2014-01-29 22:03:40 +01:00
2018-07-30 00:59:45 +02:00
def test_can_create_streams ( self ) - > None :
othello = self . example_user ( ' othello ' )
2019-10-05 02:35:07 +02:00
othello . role = UserProfile . ROLE_REALM_ADMINISTRATOR
2018-07-30 00:59:45 +02:00
self . assertTrue ( othello . can_create_streams ( ) )
2019-10-05 02:35:07 +02:00
othello . role = UserProfile . ROLE_MEMBER
2020-04-02 21:53:20 +02:00
othello . realm . create_stream_policy = Realm . POLICY_ADMINS_ONLY
2018-07-30 00:59:45 +02:00
self . assertFalse ( othello . can_create_streams ( ) )
2020-04-02 21:53:20 +02:00
othello . realm . create_stream_policy = Realm . POLICY_MEMBERS_ONLY
2019-10-05 02:35:07 +02:00
othello . role = UserProfile . ROLE_GUEST
2018-07-30 00:59:45 +02:00
self . assertFalse ( othello . can_create_streams ( ) )
2019-10-05 02:35:07 +02:00
othello . role = UserProfile . ROLE_MEMBER
2018-07-30 00:59:45 +02:00
othello . realm . waiting_period_threshold = 1000
2020-04-02 21:53:20 +02:00
othello . realm . create_stream_policy = Realm . POLICY_FULL_MEMBERS_ONLY
2018-07-30 00:59:45 +02:00
othello . date_joined = timezone_now ( ) - timedelta ( days = ( othello . realm . waiting_period_threshold - 1 ) )
self . assertFalse ( othello . can_create_streams ( ) )
othello . date_joined = timezone_now ( ) - timedelta ( days = ( othello . realm . waiting_period_threshold + 1 ) )
self . assertTrue ( othello . can_create_streams ( ) )
2018-07-30 01:25:13 +02:00
def test_user_settings_for_subscribing_other_users ( self ) - > None :
"""
2019-04-08 19:23:00 +02:00
You can ' t subscribe other people to streams if you are a guest or your account is not old
enough .
2018-07-30 01:25:13 +02:00
"""
2019-04-08 19:23:00 +02:00
user_profile = self . example_user ( " cordelia " )
2020-04-09 19:07:57 +02:00
invitee_user_id = user_profile . id
2019-04-08 19:23:00 +02:00
realm = user_profile . realm
2018-07-30 01:25:13 +02:00
2020-04-02 21:53:20 +02:00
do_set_realm_property ( realm , " create_stream_policy " , Realm . POLICY_MEMBERS_ONLY )
2019-04-08 19:23:00 +02:00
do_set_realm_property ( realm , " invite_to_stream_policy " ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_ADMINS_ONLY )
2019-04-08 19:23:00 +02:00
result = self . common_subscribe_to_streams (
2020-06-17 23:49:33 +02:00
self . test_user ,
[ ' stream1 ' ] ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
)
2019-04-08 19:23:00 +02:00
self . assert_json_error (
result , " Only administrators can modify other users ' subscriptions. " )
do_set_realm_property ( realm , " invite_to_stream_policy " ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_MEMBERS_ONLY )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-08-07 01:09:47 +02:00
self . test_user , [ ' stream2 ' ] , { " principals " : orjson . dumps ( [
self . test_user . id , invitee_user_id ] ) . decode ( ) } )
2019-04-08 19:23:00 +02:00
self . unsubscribe ( user_profile , " stream2 " )
do_set_realm_property ( realm , " invite_to_stream_policy " ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_FULL_MEMBERS_ONLY )
2019-04-08 19:23:00 +02:00
do_set_realm_property ( realm , " waiting_period_threshold " , 100000 )
result = self . common_subscribe_to_streams (
2020-06-17 23:49:33 +02:00
self . test_user ,
[ ' stream2 ' ] ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
)
2019-04-08 19:23:00 +02:00
self . assert_json_error (
result , " Your account is too new to modify other users ' subscriptions. " )
do_set_realm_property ( realm , " waiting_period_threshold " , 0 )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-08-07 01:09:47 +02:00
self . test_user , [ ' stream2 ' ] , { " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } )
2018-07-30 01:25:13 +02:00
def test_can_subscribe_other_users ( self ) - > None :
"""
2019-04-08 19:23:00 +02:00
You can ' t subscribe other people to streams if you are a guest or your account is not old
enough .
2018-07-30 01:25:13 +02:00
"""
othello = self . example_user ( ' othello ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( othello , UserProfile . ROLE_REALM_ADMINISTRATOR )
2018-07-30 01:25:13 +02:00
self . assertTrue ( othello . can_subscribe_other_users ( ) )
2020-05-21 00:13:06 +02:00
do_change_user_role ( othello , UserProfile . ROLE_MEMBER )
do_change_user_role ( othello , UserProfile . ROLE_GUEST )
2018-07-30 01:25:13 +02:00
self . assertFalse ( othello . can_subscribe_other_users ( ) )
2020-05-21 00:13:06 +02:00
do_change_user_role ( othello , UserProfile . ROLE_MEMBER )
2019-04-08 19:23:00 +02:00
do_set_realm_property ( othello . realm , " waiting_period_threshold " , 1000 )
do_set_realm_property ( othello . realm , " invite_to_stream_policy " ,
2020-04-02 21:53:20 +02:00
Realm . POLICY_FULL_MEMBERS_ONLY )
2018-07-30 01:25:13 +02:00
othello . date_joined = timezone_now ( ) - timedelta ( days = ( othello . realm . waiting_period_threshold - 1 ) )
self . assertFalse ( othello . can_subscribe_other_users ( ) )
othello . date_joined = timezone_now ( ) - timedelta ( days = ( othello . realm . waiting_period_threshold + 1 ) )
self . assertTrue ( othello . can_subscribe_other_users ( ) )
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_invalid_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on a stream whose name is invalid ( as
2014-01-29 22:03:40 +01:00
defined by valid_stream_name in zerver / views . py ) should return a JSON
error .
"""
# currently, the only invalid name is the empty string
invalid_stream_name = " "
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , [ invalid_stream_name ] , allow_fail = True )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result ,
2020-06-10 06:41:04 +02:00
f " Invalid stream name ' { invalid_stream_name } ' " )
2014-01-29 22:03:40 +01:00
2020-04-09 19:07:57 +02:00
def assert_adding_subscriptions_for_principal ( self , invitee_data : Union [ str , int ] , invitee_realm : Realm ,
2018-05-11 01:39:38 +02:00
streams : List [ str ] , invite_only : bool = False ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on behalf of another principal ( for
2014-01-29 22:03:40 +01:00
whom you have permission to add subscriptions ) should successfully add
those subscriptions and send a message to the subscribee notifying
them .
"""
2020-04-09 19:07:57 +02:00
if isinstance ( invitee_data , str ) :
other_profile = get_user ( invitee_data , invitee_realm )
else :
other_profile = get_user_profile_by_id_in_realm ( invitee_data , invitee_realm )
2020-03-09 21:41:26 +01:00
current_streams = self . get_streams ( other_profile )
2014-01-29 22:03:40 +01:00
self . assertIsInstance ( other_profile , UserProfile )
self . assertNotEqual ( len ( current_streams ) , 0 ) # necessary for full test coverage
self . assertNotEqual ( len ( streams ) , 0 ) # necessary for full test coverage
streams_to_sub = streams [ : 1 ] # just add one, to make the message easier to check
streams_to_sub . extend ( current_streams )
self . helper_check_subs_before_and_after_add ( streams_to_sub ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invitee_data ] ) . decode ( ) } , streams [ : 1 ] ,
2020-04-09 19:07:57 +02:00
current_streams , other_profile . email , streams_to_sub ,
2017-07-12 13:07:48 +02:00
invitee_realm , invite_only = invite_only )
2017-04-27 00:03:21 +02:00
# verify that a welcome message was sent to the stream
2016-04-13 23:59:08 +02:00
msg = self . get_last_message ( )
2017-04-27 00:03:21 +02:00
self . assertEqual ( msg . recipient . type , msg . recipient . STREAM )
2020-04-09 21:51:58 +02:00
self . assertEqual ( msg . topic_name ( ) , ' stream events ' )
2019-07-11 18:32:38 +02:00
self . assertEqual ( msg . sender . email , settings . NOTIFICATION_BOT )
2020-06-30 14:12:27 +02:00
self . assertIn ( f " Stream created by @_** { self . test_user . full_name } | { self . test_user . id } ** " , msg . content )
2017-04-27 00:03:21 +02:00
2017-11-05 10:51:25 +01:00
def test_multi_user_subscription ( self ) - > None :
2017-10-07 16:00:39 +02:00
user1 = self . example_user ( " cordelia " )
user2 = self . example_user ( " iago " )
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2014-01-29 22:03:40 +01:00
streams_to_sub = [ ' multi_user_stream ' ]
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2017-11-30 08:43:12 +01:00
flush_per_request_caches ( )
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2014-01-29 22:03:40 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2017-01-24 06:34:26 +01:00
)
2020-06-29 23:31:25 +02:00
self . assert_length ( queries , 40 )
2014-01-29 22:03:40 +01:00
2017-05-16 01:32:50 +02:00
self . assert_length ( events , 7 )
2015-11-01 17:14:31 +01:00
for ev in [ x for x in events if x [ ' event ' ] [ ' type ' ] not in ( ' message ' , ' stream ' ) ] :
2020-06-12 17:26:50 +02:00
if ev [ ' event ' ] [ ' op ' ] == ' add ' :
2016-07-12 23:57:16 +02:00
self . assertEqual (
2017-01-24 07:06:13 +01:00
set ( ev [ ' event ' ] [ ' subscriptions ' ] [ 0 ] [ ' subscribers ' ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id } ,
2016-07-12 23:57:16 +02:00
)
else :
# Check "peer_add" events for streams users were
# never subscribed to, in order for the neversubscribed
# structure to stay up-to-date.
self . assertEqual ( ev [ ' event ' ] [ ' op ' ] , ' peer_add ' )
2014-01-29 22:03:40 +01:00
stream = get_stream ( ' multi_user_stream ' , realm )
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 2 )
2014-01-29 22:03:40 +01:00
# Now add ourselves
events = [ ]
with tornado_redirected_to_list ( events ) :
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2017-01-24 07:06:13 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ self . test_user . id ] ) . decode ( ) ) ,
2014-01-29 22:03:40 +01:00
)
2019-12-06 00:15:59 +01:00
self . assert_length ( queries , 14 )
2014-01-29 22:03:40 +01:00
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 2 )
2014-01-29 22:03:40 +01:00
add_event , add_peer_event = events
2014-02-06 21:21:21 +01:00
self . assertEqual ( add_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
2014-01-29 22:03:40 +01:00
self . assertEqual ( add_event [ ' event ' ] [ ' op ' ] , ' add ' )
2017-05-23 20:57:59 +02:00
self . assertEqual ( add_event [ ' users ' ] , [ get_user ( self . test_email , self . test_realm ) . id ] )
2014-01-29 22:03:40 +01:00
self . assertEqual (
2017-01-24 07:06:13 +01:00
set ( add_event [ ' event ' ] [ ' subscriptions ' ] [ 0 ] [ ' subscribers ' ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id , self . test_user . id } ,
2014-01-29 22:03:40 +01:00
)
2018-06-03 19:11:52 +02:00
self . assertNotIn ( self . example_user ( ' polonius ' ) . id , add_peer_event [ ' users ' ] )
2020-05-17 18:46:14 +02:00
self . assertEqual ( len ( add_peer_event [ ' users ' ] ) , 11 )
2014-02-06 21:21:21 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
2014-01-29 22:03:40 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' op ' ] , ' peer_add ' )
2016-10-31 20:18:32 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' user_id ' ] , self . user_profile . id )
2014-01-29 22:03:40 +01:00
stream = get_stream ( ' multi_user_stream ' , realm )
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 3 )
2014-01-29 22:03:40 +01:00
2016-10-20 00:50:09 +02:00
# Finally, add othello.
2014-01-29 22:03:40 +01:00
events = [ ]
2017-05-07 21:25:59 +02:00
user_profile = self . example_user ( ' othello ' )
email3 = user_profile . email
2017-10-07 16:00:39 +02:00
user3 = user_profile
2017-05-23 20:57:59 +02:00
realm3 = user_profile . realm
2014-01-29 22:03:40 +01:00
stream = get_stream ( ' multi_user_stream ' , realm )
with tornado_redirected_to_list ( events ) :
2016-10-20 00:50:09 +02:00
bulk_add_subscriptions ( [ stream ] , [ user_profile ] )
2014-01-29 22:03:40 +01:00
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 2 )
2014-01-29 22:03:40 +01:00
add_event , add_peer_event = events
2014-02-06 21:21:21 +01:00
self . assertEqual ( add_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
2014-01-29 22:03:40 +01:00
self . assertEqual ( add_event [ ' event ' ] [ ' op ' ] , ' add ' )
2017-05-23 20:57:59 +02:00
self . assertEqual ( add_event [ ' users ' ] , [ get_user ( email3 , realm3 ) . id ] )
2014-01-29 22:03:40 +01:00
self . assertEqual (
2017-01-24 07:06:13 +01:00
set ( add_event [ ' event ' ] [ ' subscriptions ' ] [ 0 ] [ ' subscribers ' ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id , user3 . id , self . test_user . id } ,
2014-01-29 22:03:40 +01:00
)
2016-10-20 00:50:09 +02:00
# We don't send a peer_add event to othello
self . assertNotIn ( user_profile . id , add_peer_event [ ' users ' ] )
2018-06-03 19:11:52 +02:00
self . assertNotIn ( self . example_user ( ' polonius ' ) . id , add_peer_event [ ' users ' ] )
2020-05-17 18:46:14 +02:00
self . assertEqual ( len ( add_peer_event [ ' users ' ] ) , 11 )
2014-02-06 21:21:21 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
2014-01-29 22:03:40 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' op ' ] , ' peer_add ' )
2016-10-31 20:18:32 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' user_id ' ] , user_profile . id )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_private_stream_subscription ( self ) - > None :
2017-01-29 01:21:31 +01:00
realm = get_realm ( " zulip " )
# Create a private stream with Hamlet subscribed
stream_name = " private "
2018-03-21 22:05:21 +01:00
stream = ensure_stream ( realm , stream_name , invite_only = True )
2017-01-29 01:21:31 +01:00
2017-05-07 21:25:59 +02:00
existing_user_profile = self . example_user ( ' hamlet ' )
2017-01-29 01:21:31 +01:00
bulk_add_subscriptions ( [ stream ] , [ existing_user_profile ] )
# Now subscribe Cordelia to the stream, capturing events
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' cordelia ' )
2017-01-29 01:21:31 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2017-01-29 01:21:31 +01:00
with tornado_redirected_to_list ( events ) :
bulk_add_subscriptions ( [ stream ] , [ user_profile ] )
self . assert_length ( events , 3 )
create_event , add_event , add_peer_event = events
self . assertEqual ( create_event [ ' event ' ] [ ' type ' ] , ' stream ' )
self . assertEqual ( create_event [ ' event ' ] [ ' op ' ] , ' create ' )
2018-03-16 10:57:17 +01:00
self . assertEqual ( create_event [ ' users ' ] , [ user_profile . id ] )
2017-01-29 01:21:31 +01:00
self . assertEqual ( create_event [ ' event ' ] [ ' streams ' ] [ 0 ] [ ' name ' ] , stream_name )
self . assertEqual ( add_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
self . assertEqual ( add_event [ ' event ' ] [ ' op ' ] , ' add ' )
self . assertEqual ( add_event [ ' users ' ] , [ user_profile . id ] )
self . assertEqual (
set ( add_event [ ' event ' ] [ ' subscriptions ' ] [ 0 ] [ ' subscribers ' ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user_profile . id , existing_user_profile . id } ,
2017-01-29 01:21:31 +01:00
)
2018-02-14 17:59:01 +01:00
# We don't send a peer_add event to othello, but we do send peer_add event to
# all realm admins.
2017-01-29 01:21:31 +01:00
self . assertNotIn ( user_profile . id , add_peer_event [ ' users ' ] )
2020-05-17 18:46:14 +02:00
self . assertEqual ( len ( add_peer_event [ ' users ' ] ) , 3 )
2017-01-29 01:21:31 +01:00
self . assertEqual ( add_peer_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' op ' ] , ' peer_add ' )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' user_id ' ] , user_profile . id )
2018-03-16 10:57:17 +01:00
# Do not send stream creation event to realm admin users
# even if realm admin is subscribed to stream cause realm admin already get
# private stream creation event on stream creation.
2018-03-21 22:05:21 +01:00
new_stream = ensure_stream ( realm , " private stream " , invite_only = True )
2018-03-16 10:57:17 +01:00
events = [ ]
with tornado_redirected_to_list ( events ) :
bulk_add_subscriptions ( [ new_stream ] , [ self . example_user ( " iago " ) ] )
2020-05-17 18:46:14 +02:00
self . assert_length ( events , 3 )
create_event , add_event , add_peer_event = events
2018-03-16 10:57:17 +01:00
self . assertEqual ( create_event [ ' event ' ] [ ' type ' ] , ' stream ' )
self . assertEqual ( create_event [ ' event ' ] [ ' op ' ] , ' create ' )
self . assertEqual ( create_event [ ' users ' ] , [ ] )
self . assertEqual ( add_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
self . assertEqual ( add_event [ ' event ' ] [ ' op ' ] , ' add ' )
self . assertEqual ( add_event [ ' users ' ] , [ self . example_user ( " iago " ) . id ] )
2020-05-17 18:46:14 +02:00
self . assertEqual ( len ( add_peer_event [ ' users ' ] ) , 1 )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' op ' ] , ' peer_add ' )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' user_id ' ] , self . example_user ( " iago " ) . id )
2020-02-04 21:50:55 +01:00
def test_subscribe_to_stream_post_policy_admins_stream ( self ) - > None :
2019-01-24 09:16:35 +01:00
"""
Members can subscribe to streams where only admins can post
"""
member = self . example_user ( " AARON " )
2020-05-15 00:33:24 +02:00
stream = self . make_stream ( ' stream1 ' )
do_change_stream_post_policy ( stream , Stream . STREAM_POST_POLICY_ADMINS )
result = self . common_subscribe_to_streams ( member , [ " stream1 " ] )
2019-01-24 09:16:35 +01:00
self . assert_json_success ( result )
2020-05-15 00:33:24 +02:00
json = result . json ( )
self . assertEqual ( json [ " subscribed " ] , { member . email : [ " stream1 " ] } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
2020-02-04 21:50:55 +01:00
def test_subscribe_to_stream_post_policy_restrict_new_members_stream ( self ) - > None :
"""
2020-05-15 00:33:24 +02:00
New members can subscribe to streams where they can not post
2020-02-04 21:50:55 +01:00
"""
new_member_email = self . nonreg_email ( ' test ' )
self . register ( new_member_email , " test " )
new_member = self . nonreg_user ( ' test ' )
do_set_realm_property ( new_member . realm , ' waiting_period_threshold ' , 10 )
self . assertTrue ( new_member . is_new_member )
2020-05-15 00:33:24 +02:00
stream = self . make_stream ( ' stream1 ' )
do_change_stream_post_policy ( stream , Stream . STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS )
result = self . common_subscribe_to_streams ( new_member , [ " stream1 " ] )
self . assert_json_success ( result )
json = result . json ( )
self . assertEqual ( json [ " subscribed " ] , { new_member . email : [ " stream1 " ] } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
2019-01-24 09:16:35 +01:00
2018-05-04 19:14:29 +02:00
def test_guest_user_subscribe ( self ) - > None :
2018-05-02 17:00:06 +02:00
""" Guest users cannot subscribe themselves to anything """
guest_user = self . example_user ( " polonius " )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( guest_user , [ " Denmark " ] , allow_fail = True )
2018-05-04 19:14:29 +02:00
self . assert_json_error ( result , " Not allowed for guest users " )
2018-05-02 17:00:06 +02:00
2018-05-04 19:14:29 +02:00
# Verify the internal checks also block guest users.
stream = get_stream ( " Denmark " , guest_user . realm )
self . assertEqual ( filter_stream_authorization ( guest_user , [ stream ] ) ,
( [ ] , [ stream ] ) )
2018-06-12 17:34:59 +02:00
# Test UserProfile.can_create_streams for guest users.
streams_raw = [ {
' invite_only ' : False ,
' history_public_to_subscribers ' : None ,
' name ' : ' new_stream ' ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
' stream_post_policy ' : Stream . STREAM_POST_POLICY_EVERYONE ,
2018-06-12 17:34:59 +02:00
} ]
with self . assertRaisesRegex ( JsonableError , " User cannot create streams. " ) :
list_to_streams ( streams_raw , guest_user )
2018-05-04 19:14:29 +02:00
stream = self . make_stream ( ' private_stream ' , invite_only = True )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( guest_user , [ " private_stream " ] , allow_fail = True )
2018-05-04 19:14:29 +02:00
self . assert_json_error ( result , " Not allowed for guest users " )
self . assertEqual ( filter_stream_authorization ( guest_user , [ stream ] ) ,
( [ ] , [ stream ] ) )
2018-05-02 17:00:06 +02:00
2020-07-24 05:30:58 +02:00
web_public_stream = self . make_stream ( ' web_public_stream ' , is_web_public = True )
public_stream = self . make_stream ( ' public_stream ' , invite_only = False )
private_stream = self . make_stream ( ' private_stream2 ' , invite_only = True )
# This test should be added as soon as the subscription endpoint allows
# guest users to subscribe to web public streams. Although they are already
# authorized, the decorator in "add_subscriptions_backend" still needs to be
# deleted.
#
# result = self.common_subscribe_to_streams(guest_user, ['web_public_stream'],
# is_web_public=True, allow_fail=True)
# self.assert_json_success(result)
streams_to_sub = [ web_public_stream , public_stream , private_stream ]
self . assertEqual ( filter_stream_authorization ( guest_user , streams_to_sub ) ,
( [ web_public_stream ] , [ public_stream , private_stream ] ) )
2017-11-05 10:51:25 +01:00
def test_users_getting_add_peer_event ( self ) - > None :
2016-07-22 23:30:47 +02:00
"""
Check users getting add_peer_event is correct
"""
streams_to_sub = [ ' multi_user_stream ' ]
2020-03-12 14:17:25 +01:00
othello = self . example_user ( ' othello ' )
cordelia = self . example_user ( ' cordelia ' )
iago = self . example_user ( ' iago ' )
2020-04-09 19:07:57 +02:00
orig_user_ids_to_subscribe = [ self . test_user . id , othello . id ]
2016-07-22 23:30:47 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2016-07-22 23:30:47 +02:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( orig_user_ids_to_subscribe ) . decode ( ) ) )
2016-07-22 23:30:47 +02:00
2020-04-09 19:07:57 +02:00
new_user_ids_to_subscribe = [ iago . id , cordelia . id ]
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2016-07-22 23:30:47 +02:00
with tornado_redirected_to_list ( events ) :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2017-01-24 07:06:13 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( new_user_ids_to_subscribe ) . decode ( ) ) ,
2016-07-22 23:30:47 +02:00
)
add_peer_events = [ events [ 2 ] , events [ 3 ] ]
for add_peer_event in add_peer_events :
self . assertEqual ( add_peer_event [ ' event ' ] [ ' type ' ] , ' subscription ' )
self . assertEqual ( add_peer_event [ ' event ' ] [ ' op ' ] , ' peer_add ' )
event_sent_to_ids = add_peer_event [ ' users ' ]
2020-04-09 19:07:57 +02:00
for user_id in new_user_ids_to_subscribe :
2016-07-22 23:30:47 +02:00
# Make sure new users subscribed to stream is not in
# peer_add event recipient list
2020-04-09 19:07:57 +02:00
self . assertNotIn ( user_id , event_sent_to_ids )
for old_user in orig_user_ids_to_subscribe :
2016-07-22 23:30:47 +02:00
# Check non new users are in peer_add event recipient list.
2020-04-09 19:07:57 +02:00
self . assertIn ( old_user , event_sent_to_ids )
2016-07-22 23:30:47 +02:00
2017-11-05 10:51:25 +01:00
def test_users_getting_remove_peer_event ( self ) - > None :
2016-10-20 20:12:39 +02:00
"""
Check users getting add_peer_event is correct
"""
2017-08-25 06:01:29 +02:00
user1 = self . example_user ( " othello " )
user2 = self . example_user ( " cordelia " )
user3 = self . example_user ( " hamlet " )
user4 = self . example_user ( " iago " )
2018-02-14 17:59:01 +01:00
user5 = self . example_user ( " AARON " )
2016-10-20 20:12:39 +02:00
2016-10-21 23:22:25 +02:00
stream1 = self . make_stream ( ' stream1 ' )
stream2 = self . make_stream ( ' stream2 ' )
private = self . make_stream ( ' private_stream ' , invite_only = True )
2016-10-20 20:12:39 +02:00
2017-08-25 06:01:29 +02:00
self . subscribe ( user1 , ' stream1 ' )
self . subscribe ( user2 , ' stream1 ' )
self . subscribe ( user3 , ' stream1 ' )
2016-10-20 20:12:39 +02:00
2017-08-25 06:01:29 +02:00
self . subscribe ( user2 , ' stream2 ' )
2016-10-20 20:12:39 +02:00
2017-08-25 06:01:29 +02:00
self . subscribe ( user1 , ' private_stream ' )
self . subscribe ( user2 , ' private_stream ' )
self . subscribe ( user3 , ' private_stream ' )
2016-10-20 20:12:39 +02:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2016-10-20 20:12:39 +02:00
with tornado_redirected_to_list ( events ) :
bulk_remove_subscriptions (
2018-03-14 00:13:21 +01:00
[ user1 , user2 ] ,
[ stream1 , stream2 , private ] ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
get_client ( " website " ) ,
2016-10-20 20:12:39 +02:00
)
peer_events = [ e for e in events
2016-12-03 00:04:17 +01:00
if e [ ' event ' ] . get ( ' op ' ) == ' peer_remove ' ]
2016-10-20 20:12:39 +02:00
notifications = set ( )
for event in peer_events :
for user_id in event [ ' users ' ] :
2020-06-12 16:54:01 +02:00
stream_id = event [ ' event ' ] [ ' stream_id ' ]
stream_name = Stream . objects . get ( id = stream_id ) . name
removed_user_id = event [ ' event ' ] [ ' user_id ' ]
notifications . add ( ( user_id , removed_user_id , stream_name ) )
2016-10-20 20:12:39 +02:00
# POSITIVE CASES FIRST
2016-11-08 15:04:18 +01:00
self . assertIn ( ( user3 . id , user1 . id , ' stream1 ' ) , notifications )
self . assertIn ( ( user4 . id , user1 . id , ' stream1 ' ) , notifications )
2016-10-20 20:12:39 +02:00
2016-11-08 15:04:18 +01:00
self . assertIn ( ( user3 . id , user2 . id , ' stream1 ' ) , notifications )
self . assertIn ( ( user4 . id , user2 . id , ' stream1 ' ) , notifications )
2016-10-20 20:12:39 +02:00
2016-11-08 15:04:18 +01:00
self . assertIn ( ( user1 . id , user2 . id , ' stream2 ' ) , notifications )
self . assertIn ( ( user3 . id , user2 . id , ' stream2 ' ) , notifications )
self . assertIn ( ( user4 . id , user2 . id , ' stream2 ' ) , notifications )
2016-10-20 20:12:39 +02:00
2016-11-08 15:04:18 +01:00
self . assertIn ( ( user3 . id , user1 . id , ' private_stream ' ) , notifications )
self . assertIn ( ( user3 . id , user2 . id , ' private_stream ' ) , notifications )
2016-10-20 20:12:39 +02:00
2018-02-14 17:59:01 +01:00
self . assertIn ( ( user4 . id , user1 . id , ' private_stream ' ) , notifications )
self . assertIn ( ( user4 . id , user2 . id , ' private_stream ' ) , notifications )
2016-10-20 20:12:39 +02:00
# NEGATIVE
# don't be notified if you are being removed yourself
2016-11-08 15:04:18 +01:00
self . assertNotIn ( ( user1 . id , user1 . id , ' stream1 ' ) , notifications )
2016-10-20 20:12:39 +02:00
# don't send false notifications for folks that weren't actually
# subscribed int he first place
2016-11-08 15:04:18 +01:00
self . assertNotIn ( ( user3 . id , user1 . id , ' stream2 ' ) , notifications )
2016-10-20 20:12:39 +02:00
# don't send notifications for random people
2016-11-08 15:04:18 +01:00
self . assertNotIn ( ( user3 . id , user4 . id , ' stream2 ' ) , notifications )
2016-10-20 20:12:39 +02:00
2018-02-14 17:59:01 +01:00
# don't send notifications to unsubscribed non realm admin users for private streams
self . assertNotIn ( ( user5 . id , user1 . id , ' private_stream ' ) , notifications )
2016-10-20 20:12:39 +02:00
2017-11-05 10:51:25 +01:00
def test_bulk_subscribe_MIT ( self ) - > None :
2018-08-21 19:20:31 +02:00
mit_user = self . mit_user ( ' starnine ' )
2017-03-04 09:19:37 +01:00
realm = get_realm ( " zephyr " )
2020-06-10 06:41:04 +02:00
stream_names = [ f " stream_ { i } " for i in range ( 40 ) ]
2018-08-21 19:20:31 +02:00
streams = [
2016-10-21 23:22:25 +02:00
self . make_stream ( stream_name , realm = realm )
2018-08-21 19:20:31 +02:00
for stream_name in stream_names ]
for stream in streams :
stream . is_in_zephyr_realm = True
stream . save ( )
2014-01-29 22:03:40 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2014-01-29 22:03:40 +01:00
with tornado_redirected_to_list ( events ) :
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
mit_user ,
2018-08-21 19:20:31 +02:00
stream_names ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ mit_user . id ] ) . decode ( ) ) ,
2017-08-26 00:58:13 +02:00
subdomain = " zephyr " ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
2014-01-29 22:03:40 +01:00
)
2016-07-27 01:45:29 +02:00
# Make sure Zephyr mirroring realms such as MIT do not get
# any tornado subscription events
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 0 )
2020-03-10 11:48:26 +01:00
self . assert_length ( queries , 5 )
2014-01-29 22:03:40 +01:00
2018-08-21 19:20:54 +02:00
events = [ ]
with tornado_redirected_to_list ( events ) :
bulk_remove_subscriptions (
users = [ mit_user ] ,
streams = streams ,
acting_client = get_client ( ' website ' ) ,
)
self . assert_length ( events , 0 )
2017-11-05 10:51:25 +01:00
def test_bulk_subscribe_many ( self ) - > None :
2016-06-04 19:50:38 +02:00
2014-01-29 22:03:40 +01:00
# Create a whole bunch of streams
2020-06-10 06:41:04 +02:00
streams = [ f " stream_ { i } " for i in range ( 20 ) ]
2016-10-21 23:22:25 +02:00
for stream_name in streams :
self . make_stream ( stream_name )
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2019-01-31 14:32:37 +01:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2019-01-31 14:32:37 +01:00
streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ self . test_user . id ] ) . decode ( ) ) ,
2019-01-31 14:32:37 +01:00
)
2014-01-29 22:03:40 +01:00
# Make sure we don't make O(streams) queries
2020-03-10 11:48:26 +01:00
self . assert_length ( queries , 16 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_for_principal ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to streams .
"""
2020-04-09 19:07:57 +02:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
invite_streams = self . make_random_stream_names ( current_streams )
self . assert_adding_subscriptions_for_principal ( invitee . id , invitee . realm , invite_streams )
def test_subscriptions_add_for_principal_legacy_emails ( self ) - > None :
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
2014-01-29 22:03:40 +01:00
invite_streams = self . make_random_stream_names ( current_streams )
2020-03-09 21:41:26 +01:00
self . assert_adding_subscriptions_for_principal ( invitee . email , invitee . realm , invite_streams )
2014-01-29 22:03:40 +01:00
2018-05-21 03:54:42 +02:00
def test_subscriptions_add_for_principal_deactivated ( self ) - > None :
"""
You can ' t subscribe deactivated people to streams.
"""
target_profile = self . example_user ( " cordelia " )
2020-03-12 14:17:25 +01:00
post_data = dict (
2020-08-07 01:09:47 +02:00
principals = orjson . dumps ( [ target_profile . id ] ) . decode ( ) ,
2020-03-12 14:17:25 +01:00
)
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( self . test_user , " Verona " , post_data )
2018-05-21 03:54:42 +02:00
do_deactivate_user ( target_profile )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , " Denmark " , post_data , allow_fail = True )
2020-03-12 14:17:25 +01:00
self . assert_json_error (
result ,
2020-06-09 00:25:09 +02:00
f " User not authorized to execute queries on behalf of ' { target_profile . id } ' " ,
2020-03-12 14:17:25 +01:00
status_code = 403 )
2018-05-21 03:54:42 +02:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_for_principal_invite_only ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to invite only streams .
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
2014-01-29 22:03:40 +01:00
invite_streams = self . make_random_stream_names ( current_streams )
2020-04-09 19:07:57 +02:00
self . assert_adding_subscriptions_for_principal ( invitee . id , invitee . realm , invite_streams ,
2014-01-29 22:03:40 +01:00
invite_only = True )
2017-11-05 10:51:25 +01:00
def test_non_ascii_subscription_for_principal ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to streams even if they containing
non - ASCII characters .
"""
2020-03-12 14:17:25 +01:00
iago = self . example_user ( ' iago ' )
2020-04-09 19:07:57 +02:00
self . assert_adding_subscriptions_for_principal ( iago . id , get_realm ( ' zulip ' ) , [ " hümbüǵ " ] )
2014-01-29 22:03:40 +01:00
2020-04-09 19:07:57 +02:00
def test_subscription_add_invalid_principal_legacy_emails ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling subscribe on behalf of a principal that does not exist
should return a JSON error .
"""
invalid_principal = " rosencrantz-and-guildenstern@zulip.com "
2017-05-23 20:57:59 +02:00
invalid_principal_realm = get_realm ( " zulip " )
2014-01-29 22:03:40 +01:00
# verify that invalid_principal actually doesn't exist
with self . assertRaises ( UserProfile . DoesNotExist ) :
2017-05-23 20:57:59 +02:00
get_user ( invalid_principal , invalid_principal_realm )
2020-03-09 21:41:26 +01:00
result = self . common_subscribe_to_streams ( self . test_user , self . streams ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invalid_principal ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True )
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { invalid_principal } ' " ,
status_code = 403 ,
)
2014-01-29 22:03:40 +01:00
2020-04-09 19:07:57 +02:00
def test_subscription_add_invalid_principal ( self ) - > None :
invalid_principal = 999
invalid_principal_realm = get_realm ( " zulip " )
with self . assertRaises ( UserProfile . DoesNotExist ) :
get_user_profile_by_id_in_realm ( invalid_principal , invalid_principal_realm )
result = self . common_subscribe_to_streams ( self . test_user , self . streams ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invalid_principal ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True )
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { invalid_principal } ' " ,
status_code = 403 ,
)
2020-04-09 19:07:57 +02:00
2017-11-05 10:51:25 +01:00
def test_subscription_add_principal_other_realm ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling subscribe on behalf of a principal in another realm
should return a JSON error .
"""
2017-05-23 02:33:53 +02:00
profile = self . mit_user ( ' starnine ' )
2020-04-09 19:07:57 +02:00
principal = profile . id
2014-01-29 22:03:40 +01:00
# verify that principal exists (thus, the reason for the error is the cross-realming)
self . assertIsInstance ( profile , UserProfile )
2020-03-09 21:41:26 +01:00
result = self . common_subscribe_to_streams ( self . test_user , self . streams ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ principal ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True )
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { principal } ' " ,
status_code = 403 ,
)
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def helper_check_subs_before_and_after_remove ( self , subscriptions : List [ str ] ,
2017-11-20 03:22:57 +01:00
json_dict : Dict [ str , Any ] ,
2018-05-11 01:39:38 +02:00
email : str , new_subs : List [ str ] ,
2017-11-20 03:22:57 +01:00
realm : Realm ) - > None :
2014-01-29 22:03:40 +01:00
"""
Check result of removing subscriptions .
Unlike adding subscriptions , you can only remove subscriptions
for yourself , so the result format is different .
{ " msg " : " " ,
" removed " : [ " Denmark " , " Scotland " , " Verona " ] ,
2019-10-13 05:30:34 +02:00
" not_removed " : [ " Rome " ] , " result " : " success " }
2014-01-29 22:03:40 +01:00
"""
2016-12-23 02:37:10 +01:00
result = self . client_delete ( " /json/users/me/subscriptions " ,
2020-08-07 01:09:47 +02:00
{ " subscriptions " : orjson . dumps ( subscriptions ) . decode ( ) } )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2017-09-27 10:11:59 +02:00
for key , val in json_dict . items ( ) :
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( val ) , sorted ( json [ key ] ) ) # we don't care about the order of the items
2020-03-09 21:41:26 +01:00
user = get_user ( email , realm )
new_streams = self . get_streams ( user )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( new_streams ) , sorted ( new_subs ) )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_remove ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-12-23 02:37:10 +01:00
Calling DELETE / json / users / me / subscriptions should successfully remove streams ,
2014-01-29 22:03:40 +01:00
and should determine which were removed vs which weren ' t subscribed to.
We cannot randomly generate stream names because the remove code
verifies whether streams exist .
"""
2017-03-05 08:57:51 +01:00
self . assertGreaterEqual ( len ( self . streams ) , 2 )
2014-01-29 22:03:40 +01:00
streams_to_remove = self . streams [ 1 : ]
not_subbed = [ ]
for stream in Stream . objects . all ( ) :
2016-05-10 01:55:43 +02:00
if stream . name not in self . streams :
2014-01-29 22:03:40 +01:00
not_subbed . append ( stream . name )
random . shuffle ( not_subbed )
self . assertNotEqual ( len ( not_subbed ) , 0 ) # necessary for full test coverage
try_to_remove = not_subbed [ : 3 ] # attempt to remove up to 3 streams not already subbed to
streams_to_remove . extend ( try_to_remove )
self . helper_check_subs_before_and_after_remove ( streams_to_remove ,
2019-10-13 05:30:34 +02:00
{ " removed " : self . streams [ 1 : ] , " not_removed " : try_to_remove } ,
2017-07-12 13:07:48 +02:00
self . test_email , [ self . streams [ 0 ] ] , self . test_realm )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_remove_fake_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-12-23 02:37:10 +01:00
Calling DELETE / json / users / me / subscriptions on a stream that doesn ' t exist
2014-01-29 22:03:40 +01:00
should return a JSON error .
"""
random_streams = self . make_random_stream_names ( self . streams )
self . assertNotEqual ( len ( random_streams ) , 0 ) # necessary for full test coverage
streams_to_remove = random_streams [ : 1 ] # pick only one fake stream, to make checking the error message easy
2016-12-23 02:37:10 +01:00
result = self . client_delete ( " /json/users/me/subscriptions " ,
2020-08-07 01:09:47 +02:00
{ " subscriptions " : orjson . dumps ( streams_to_remove ) . decode ( ) } )
2020-06-10 06:41:04 +02:00
self . assert_json_error ( result , f " Stream(s) ( { random_streams [ 0 ] } ) do not exist " )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def helper_subscriptions_exists ( self , stream : str , expect_success : bool , subscribed : bool ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-07-25 02:15:40 +02:00
Call / json / subscriptions / exists on a stream and expect a certain result .
2014-01-29 22:03:40 +01:00
"""
2016-07-28 00:30:22 +02:00
result = self . client_post ( " /json/subscriptions/exists " ,
2014-01-29 22:03:40 +01:00
{ " stream " : stream } )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2017-07-25 02:15:40 +02:00
if expect_success :
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
else :
2016-12-16 02:01:34 +01:00
self . assertEqual ( result . status_code , 404 )
2016-06-04 19:50:38 +02:00
if subscribed :
2014-01-29 22:03:40 +01:00
self . assertIn ( " subscribed " , json )
self . assertEqual ( json [ " subscribed " ] , subscribed )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_exists_subbed ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream to which you are subbed
should return that it exists and that you are subbed .
"""
self . assertNotEqual ( len ( self . streams ) , 0 ) # necessary for full test coverage
self . helper_subscriptions_exists ( self . streams [ 0 ] , True , True )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_exists_not_subbed ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream to which you are not
subbed should return that it exists and that you are not subbed .
"""
2017-07-12 12:32:14 +02:00
all_stream_names = [ stream . name for stream in Stream . objects . filter ( realm = self . test_realm ) ]
2014-01-29 22:03:40 +01:00
streams_not_subbed = list ( set ( all_stream_names ) - set ( self . streams ) )
self . assertNotEqual ( len ( streams_not_subbed ) , 0 ) # necessary for full test coverage
self . helper_subscriptions_exists ( streams_not_subbed [ 0 ] , True , False )
2017-11-05 10:51:25 +01:00
def test_subscriptions_does_not_exist ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream that doesn ' t exist should
return that it doesn ' t exist.
"""
random_streams = self . make_random_stream_names ( self . streams )
self . assertNotEqual ( len ( random_streams ) , 0 ) # necessary for full test coverage
2016-06-04 19:50:38 +02:00
self . helper_subscriptions_exists ( random_streams [ 0 ] , False , False )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_exist_invalid_name ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream whose name is invalid ( as
defined by valid_stream_name in zerver / views . py ) should return a JSON
error .
"""
# currently, the only invalid stream name is the empty string
invalid_stream_name = " "
2016-07-28 00:30:22 +02:00
result = self . client_post ( " /json/subscriptions/exists " ,
2014-01-29 22:03:40 +01:00
{ " stream " : invalid_stream_name } )
2017-01-30 07:01:19 +01:00
self . assert_json_error ( result , " Invalid stream name ' ' " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_existing_subscriptions_autosubscription ( self ) - > None :
2016-06-21 15:54:18 +02:00
"""
Call / json / subscriptions / exist on an existing stream and autosubscribe to it .
"""
2017-01-12 01:41:16 +01:00
stream_name = " new_public_stream "
2020-03-09 21:41:26 +01:00
cordelia = self . example_user ( ' cordelia ' )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( cordelia , [ stream_name ] , invite_only = False )
2016-07-28 00:30:22 +02:00
result = self . client_post ( " /json/subscriptions/exists " ,
2017-01-12 01:41:16 +01:00
{ " stream " : stream_name , " autosubscribe " : " false " } )
2016-06-21 15:54:18 +02:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertFalse ( result . json ( ) [ " subscribed " ] )
2017-01-12 01:41:16 +01:00
result = self . client_post ( " /json/subscriptions/exists " ,
{ " stream " : stream_name , " autosubscribe " : " true " } )
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertTrue ( result . json ( ) [ " subscribed " ] )
2016-06-21 15:54:18 +02:00
2017-11-05 10:51:25 +01:00
def test_existing_subscriptions_autosubscription_private_stream ( self ) - > None :
2017-01-23 05:22:40 +01:00
""" Call /json/subscriptions/exist on an existing private stream with
autosubscribe should fail .
"""
stream_name = " Saxony "
2020-03-09 21:41:26 +01:00
cordelia = self . example_user ( ' cordelia ' )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( cordelia , [ stream_name ] , invite_only = True )
2017-07-12 12:32:14 +02:00
stream = get_stream ( stream_name , self . test_realm )
2017-01-23 05:22:40 +01:00
result = self . client_post ( " /json/subscriptions/exists " ,
2017-01-12 01:41:16 +01:00
{ " stream " : stream_name , " autosubscribe " : " true " } )
# We can't see invite-only streams here
self . assert_json_error ( result , " Invalid stream name ' Saxony ' " , status_code = 404 )
# Importantly, we are not now subscribed
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 1 )
2017-01-12 01:41:16 +01:00
# A user who is subscribed still sees the stream exists
2020-03-06 18:40:46 +01:00
self . login ( ' cordelia ' )
2017-01-12 01:41:16 +01:00
result = self . client_post ( " /json/subscriptions/exists " ,
{ " stream " : stream_name , " autosubscribe " : " false " } )
2017-01-23 05:22:40 +01:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertTrue ( result . json ( ) [ " subscribed " ] )
2017-01-23 05:22:40 +01:00
2018-05-11 01:39:38 +02:00
def get_subscription ( self , user_profile : UserProfile , stream_name : str ) - > Subscription :
2017-07-12 12:32:14 +02:00
stream = get_stream ( stream_name , self . test_realm )
2014-01-29 22:03:40 +01:00
return Subscription . objects . get (
user_profile = user_profile ,
recipient__type = Recipient . STREAM ,
recipient__type_id = stream . id ,
)
2019-02-13 10:22:16 +01:00
def test_subscriptions_add_notification_default_none ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-08-17 16:55:32 +02:00
When creating a subscription , the desktop , push , and audible notification
2019-02-13 10:22:16 +01:00
settings for that stream are none . A value of None means to use the values
inherited from the global notification settings .
2014-01-29 22:03:40 +01:00
"""
2017-05-07 21:25:59 +02:00
user_profile = self . example_user ( ' iago ' )
2020-04-09 19:07:57 +02:00
invitee_user_id = user_profile . id
2017-05-23 20:57:59 +02:00
invitee_realm = user_profile . realm
2014-02-05 22:56:30 +01:00
user_profile . enable_stream_desktop_notifications = True
2017-08-17 16:55:32 +02:00
user_profile . enable_stream_push_notifications = True
2019-06-11 08:47:49 +02:00
user_profile . enable_stream_audible_notifications = True
2017-11-21 04:35:26 +01:00
user_profile . enable_stream_email_notifications = True
2014-01-29 22:03:40 +01:00
user_profile . save ( )
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( user_profile ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] )
2020-04-09 19:07:57 +02:00
self . assert_adding_subscriptions_for_principal ( invitee_user_id , invitee_realm , invite_streams )
2014-01-29 22:03:40 +01:00
subscription = self . get_subscription ( user_profile , invite_streams [ 0 ] )
2016-10-04 01:05:44 +02:00
2017-10-27 09:06:40 +02:00
with mock . patch ( ' zerver.models.Recipient.__str__ ' , return_value = ' recip ' ) :
2016-10-04 01:05:44 +02:00
self . assertEqual ( str ( subscription ) ,
2020-04-09 21:51:58 +02:00
' <Subscription: '
2020-06-14 02:57:50 +02:00
f ' <UserProfile: { user_profile . email } { user_profile . realm } > -> recip> ' )
2016-10-04 01:05:44 +02:00
2019-02-13 10:22:16 +01:00
self . assertIsNone ( subscription . desktop_notifications )
self . assertIsNone ( subscription . push_notifications )
self . assertIsNone ( subscription . audible_notifications )
self . assertIsNone ( subscription . email_notifications )
2014-01-29 22:03:40 +01:00
2017-11-13 21:24:51 +01:00
def test_mark_messages_as_unread_on_unsubscribe ( self ) - > None :
realm = get_realm ( " zulip " )
user = self . example_user ( " iago " )
random_user = self . example_user ( " hamlet " )
2018-03-21 22:05:21 +01:00
stream1 = ensure_stream ( realm , " stream1 " , invite_only = False )
stream2 = ensure_stream ( realm , " stream2 " , invite_only = False )
private = ensure_stream ( realm , " private_stream " , invite_only = True )
2017-11-13 21:24:51 +01:00
self . subscribe ( user , " stream1 " )
self . subscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
self . subscribe ( random_user , " stream1 " )
self . subscribe ( random_user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( random_user , " private_stream " )
2017-11-13 21:24:51 +01:00
2020-03-07 11:43:05 +01:00
self . send_stream_message ( random_user , " stream1 " , " test " , " test " )
self . send_stream_message ( random_user , " stream2 " , " test " , " test " )
self . send_stream_message ( random_user , " private_stream " , " test " , " test " )
2017-11-13 21:24:51 +01:00
def get_unread_stream_data ( ) - > List [ Dict [ str , Any ] ] :
raw_unread_data = get_raw_unread_data ( user )
aggregated_data = aggregate_unread_data ( raw_unread_data )
return aggregated_data [ ' streams ' ]
result = get_unread_stream_data ( )
2017-11-29 23:35:33 +01:00
self . assert_length ( result , 3 )
2017-11-13 21:24:51 +01:00
self . assertEqual ( result [ 0 ] [ ' stream_id ' ] , stream1 . id )
self . assertEqual ( result [ 1 ] [ ' stream_id ' ] , stream2 . id )
2017-11-29 23:35:33 +01:00
self . assertEqual ( result [ 2 ] [ ' stream_id ' ] , private . id )
2017-11-13 21:24:51 +01:00
# Unsubscribing should mark all the messages in stream2 as read
self . unsubscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . unsubscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
self . subscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
result = get_unread_stream_data ( )
self . assert_length ( result , 1 )
self . assertEqual ( result [ 0 ] [ ' stream_id ' ] , stream1 . id )
2014-01-29 22:03:40 +01:00
2018-05-15 17:33:16 +02:00
def test_gather_subscriptions_excludes_deactivated_streams ( self ) - > None :
"""
Check that gather_subscriptions_helper does not include deactivated streams in its
results .
"""
realm = get_realm ( " zulip " )
admin_user = self . example_user ( " iago " )
non_admin_user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( admin_user )
2018-05-15 17:33:16 +02:00
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
for stream_name in [ " stream1 " , " stream2 " , " stream3 " ] :
2018-05-15 17:33:16 +02:00
self . make_stream ( stream_name , realm = realm , invite_only = False )
self . subscribe ( admin_user , stream_name )
self . subscribe ( non_admin_user , stream_name )
self . subscribe ( self . example_user ( " othello " ) , stream_name )
def delete_stream ( stream_name : str ) - > None :
stream_id = get_stream ( stream_name , realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_delete ( f ' /json/streams/ { stream_id } ' )
2018-05-15 17:33:16 +02:00
self . assert_json_success ( result )
# Deleted/deactivated stream should not be returned in the helper results
admin_before_delete = gather_subscriptions_helper ( admin_user )
non_admin_before_delete = gather_subscriptions_helper ( non_admin_user )
# Delete our stream
delete_stream ( " stream1 " )
# Get subs after delete
admin_after_delete = gather_subscriptions_helper ( admin_user )
non_admin_after_delete = gather_subscriptions_helper ( non_admin_user )
# Compare results - should be 1 stream less
self . assertTrue (
len ( admin_before_delete [ 0 ] ) == len ( admin_after_delete [ 0 ] ) + 1 ,
' Expected exactly 1 less stream from gather_subscriptions_helper ' )
self . assertTrue (
len ( non_admin_before_delete [ 0 ] ) == len ( non_admin_after_delete [ 0 ] ) + 1 ,
' Expected exactly 1 less stream from gather_subscriptions_helper ' )
2018-05-16 03:36:18 +02:00
def test_validate_user_access_to_subscribers_helper ( self ) - > None :
"""
Ensure the validate_user_access_to_subscribers_helper is properly raising
ValidationError on missing user , user not - in - realm .
"""
user_profile = self . example_user ( ' othello ' )
realm_name = ' no_othello_allowed '
realm = do_create_realm ( realm_name , ' Everyone but Othello is allowed ' )
stream_dict = {
' name ' : ' publicstream ' ,
' description ' : ' Public stream with public history ' ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
' realm_id ' : realm . id ,
2018-05-16 03:36:18 +02:00
}
# For this test to work, othello can't be in the no_othello_here realm
self . assertNotEqual ( user_profile . realm . id , realm . id , ' Expected othello user to not be in this realm. ' )
# This should result in missing user
with self . assertRaises ( ValidationError ) :
2020-06-23 00:33:46 +02:00
validate_user_access_to_subscribers_helper ( None , stream_dict , lambda user_profile : True )
2018-05-16 03:36:18 +02:00
# This should result in user not in realm
with self . assertRaises ( ValidationError ) :
2020-06-23 00:33:46 +02:00
validate_user_access_to_subscribers_helper ( user_profile , stream_dict , lambda user_profile : True )
2018-05-15 17:33:16 +02:00
2018-08-17 03:33:16 +02:00
def test_subscriptions_query_count ( self ) - > None :
"""
Test database query count when creating stream with api / v1 / users / me / subscriptions .
"""
user1 = self . example_user ( " cordelia " )
user2 = self . example_user ( " iago " )
new_streams = [
' query_count_stream_1 ' ,
' query_count_stream_2 ' ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
' query_count_stream_3 ' ,
2018-08-17 03:33:16 +02:00
]
# Test creating a public stream when realm does not have a notification stream.
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 0 ] ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2018-08-17 03:33:16 +02:00
)
2020-06-29 23:31:25 +02:00
self . assert_length ( queries , 40 )
2018-08-17 03:33:16 +02:00
# Test creating private stream.
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 1 ] ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2018-08-17 03:33:16 +02:00
invite_only = True ,
)
2020-06-29 23:31:25 +02:00
self . assert_length ( queries , 40 )
2018-08-17 03:33:16 +02:00
# Test creating a public stream with announce when realm has a notification stream.
notifications_stream = get_stream ( self . streams [ 0 ] , self . test_realm )
self . test_realm . notifications_stream_id = notifications_stream . id
self . test_realm . save ( )
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 2 ] ] ,
dict (
announce = ' true ' ,
2020-08-07 01:09:47 +02:00
principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
) ,
2018-08-17 03:33:16 +02:00
)
2020-06-29 23:31:25 +02:00
self . assert_length ( queries , 52 )
2018-08-17 03:33:16 +02:00
2020-03-01 13:05:05 +01:00
class GetStreamsTest ( ZulipTestCase ) :
2019-02-28 22:20:24 +01:00
def test_streams_api_for_bot_owners ( self ) - > None :
hamlet = self . example_user ( ' hamlet ' )
2020-08-07 08:26:23 +02:00
test_bot = self . create_test_bot ( ' foo ' , hamlet )
2019-02-28 22:20:24 +01:00
assert test_bot is not None
realm = get_realm ( ' zulip ' )
2020-03-06 18:40:46 +01:00
self . login_user ( hamlet )
2019-02-28 22:20:24 +01:00
# Check it correctly lists the bot owner's subs with
# include_owner_subscribed=true
2020-07-30 00:43:58 +02:00
filters = dict (
include_owner_subscribed = " true " ,
include_public = " false " ,
include_subscribed = " false " ,
)
request_variables = urlencode ( filters )
2019-02-28 22:20:24 +01:00
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
2020-07-30 00:43:58 +02:00
f " /api/v1/streams? { request_variables } " )
2020-03-10 11:48:26 +01:00
owner_subs = self . api_get ( hamlet , " /api/v1/users/me/subscriptions " )
2019-02-28 22:20:24 +01:00
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
self . assert_json_success ( owner_subs )
2020-08-07 01:09:47 +02:00
owner_subs_json = orjson . loads ( owner_subs . content )
2019-02-28 22:20:24 +01:00
self . assertEqual ( sorted ( [ s [ " name " ] for s in json [ " streams " ] ] ) ,
sorted ( [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ] ) )
# Check it correctly lists the bot owner's subs and the
# bot's subs
self . subscribe ( test_bot , ' Scotland ' )
2020-07-30 00:43:58 +02:00
filters = dict (
include_owner_subscribed = " true " ,
include_public = " false " ,
include_subscribed = " true " ,
)
request_variables = urlencode ( filters )
2019-02-28 22:20:24 +01:00
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
2020-07-30 00:43:58 +02:00
f " /api/v1/streams? { request_variables } " ,
2019-02-28 22:20:24 +01:00
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
actual = sorted ( [ s [ " name " ] for s in json [ " streams " ] ] )
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
expected . append ( ' Scotland ' )
expected . sort ( )
self . assertEqual ( actual , expected )
# Check it correctly lists the bot owner's subs + all public streams
self . make_stream ( ' private_stream ' , realm = realm , invite_only = True )
self . subscribe ( test_bot , ' private_stream ' )
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" /api/v1/streams?include_owner_subscribed=true&include_public=true&include_subscribed=false " ,
2019-02-28 22:20:24 +01:00
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
actual = sorted ( [ s [ " name " ] for s in json [ " streams " ] ] )
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
expected . extend ( [ ' Rome ' , ' Venice ' , ' Scotland ' ] )
expected . sort ( )
self . assertEqual ( actual , expected )
# Check it correctly lists the bot owner's subs + all public streams +
# the bot's subs
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" /api/v1/streams?include_owner_subscribed=true&include_public=true&include_subscribed=true " ,
2019-02-28 22:20:24 +01:00
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
actual = sorted ( [ s [ " name " ] for s in json [ " streams " ] ] )
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
expected . extend ( [ ' Rome ' , ' Venice ' , ' Scotland ' , ' private_stream ' ] )
expected . sort ( )
self . assertEqual ( actual , expected )
2020-03-01 13:05:05 +01:00
def test_all_active_streams_api ( self ) - > None :
url = ' /api/v1/streams?include_all_active=true '
# Check non-superuser can't use include_all_active
normal_user = self . example_user ( ' cordelia ' )
2020-03-10 11:48:26 +01:00
result = self . api_get ( normal_user , url )
2020-03-01 13:05:05 +01:00
self . assertEqual ( result . status_code , 400 )
# Even realm admin users can't see all
# active streams (without additional privileges).
admin_user = self . example_user ( ' iago ' )
self . assertTrue ( admin_user . is_realm_admin )
2020-03-10 11:48:26 +01:00
result = self . api_get ( admin_user , url )
2020-03-01 13:05:05 +01:00
self . assertEqual ( result . status_code , 400 )
'''
HAPPY PATH :
We can get all active streams ONLY if we are
an API " super user " . We typically create
api - super - user accounts for things like
Zephyr / Jabber mirror API users , but here
we just " knight " Hamlet for testing expediency .
'''
super_user = self . example_user ( ' hamlet ' )
super_user . is_api_super_user = True
super_user . save ( )
2020-03-10 11:48:26 +01:00
result = self . api_get ( super_user , url )
2020-03-01 13:05:05 +01:00
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( ' streams ' , json )
self . assertIsInstance ( json [ ' streams ' ] , list )
stream_names = { s [ ' name ' ] for s in json [ ' streams ' ] }
self . assertEqual (
stream_names ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ ' Venice ' , ' Denmark ' , ' Scotland ' , ' Verona ' , ' Rome ' } ,
2020-03-01 13:05:05 +01:00
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_public_streams_api ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-04-17 14:47:43 +02:00
Ensure that the query we use to get public streams successfully returns
a list of streams
2014-01-29 22:03:40 +01:00
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( ' hamlet ' )
2017-05-23 20:57:59 +02:00
realm = get_realm ( ' zulip ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# Check it correctly lists the user's subs with include_public=false
2020-03-10 11:48:26 +01:00
result = self . api_get ( user , " /api/v1/streams?include_public=false " )
result2 = self . api_get ( user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
self . assert_json_success ( result2 )
2020-08-07 01:09:47 +02:00
json2 = orjson . loads ( result2 . content )
2014-01-29 22:03:40 +01:00
self . assertEqual ( sorted ( [ s [ " name " ] for s in json [ " streams " ] ] ) ,
sorted ( [ s [ " name " ] for s in json2 [ " subscriptions " ] ] ) )
# Check it correctly lists all public streams with include_subscribed=false
2020-07-30 00:43:58 +02:00
filters = dict (
include_public = " true " ,
include_subscribed = " false "
)
request_variables = urlencode ( filters )
result = self . api_get ( user , f " /api/v1/streams? { request_variables } " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
all_streams = [ stream . name for stream in
2017-05-23 20:57:59 +02:00
Stream . objects . filter ( realm = realm ) ]
2014-01-29 22:03:40 +01:00
self . assertEqual ( sorted ( s [ " name " ] for s in json [ " streams " ] ) ,
sorted ( all_streams ) )
2017-01-03 18:31:43 +01:00
class StreamIdTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_get_stream_id ( self ) - > None :
2020-03-06 18:40:46 +01:00
user = self . example_user ( ' hamlet ' )
self . login_user ( user )
stream = gather_subscriptions ( user ) [ 0 ] [ 0 ]
2020-06-10 06:41:04 +02:00
result = self . client_get ( " /json/get_stream_id?stream= {} " . format ( stream [ ' name ' ] ) )
2017-01-03 18:31:43 +01:00
self . assert_json_success ( result )
self . assertEqual ( result . json ( ) [ ' stream_id ' ] , stream [ ' stream_id ' ] )
2017-11-05 10:51:25 +01:00
def test_get_stream_id_wrong_name ( self ) - > None :
2020-03-06 18:40:46 +01:00
user = self . example_user ( ' hamlet ' )
self . login_user ( user )
2017-01-03 18:31:43 +01:00
result = self . client_get ( " /json/get_stream_id?stream=wrongname " )
2020-04-09 21:51:58 +02:00
self . assert_json_error ( result , " Invalid stream name ' wrongname ' " )
2017-01-03 18:31:43 +01:00
2016-08-23 02:08:42 +02:00
class InviteOnlyStreamTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_must_be_subbed_to_send ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
If you try to send a message to an invite - only stream to which
you aren ' t subscribed, you ' ll get a 400.
"""
2020-03-07 11:43:05 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# Create Saxony as an invite-only stream.
self . assert_json_success (
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( user , [ " Saxony " ] ,
2014-01-29 22:03:40 +01:00
invite_only = True ) )
2020-03-07 11:43:05 +01:00
cordelia = self . example_user ( " cordelia " )
2014-01-29 22:03:40 +01:00
with self . assertRaises ( JsonableError ) :
2020-03-07 11:43:05 +01:00
self . send_stream_message ( cordelia , " Saxony " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_list_respects_invite_only_bit ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Make sure that / api / v1 / users / me / subscriptions properly returns
the invite - only bit for streams that are invite - only
"""
2020-03-09 21:41:26 +01:00
user = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-09 21:41:26 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( user , [ " Saxony " ] , invite_only = True )
self . common_subscribe_to_streams ( user , [ " Normandy " ] , invite_only = False )
2020-03-10 11:48:26 +01:00
result = self . api_get ( user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscriptions " , result . json ( ) )
for sub in result . json ( ) [ " subscriptions " ] :
2014-01-29 22:03:40 +01:00
if sub [ ' name ' ] == " Normandy " :
2018-06-27 00:04:03 +02:00
self . assertEqual ( sub [ ' invite_only ' ] , False , " Normandy was mistakenly marked private " )
2014-01-29 22:03:40 +01:00
if sub [ ' name ' ] == " Saxony " :
2018-06-27 00:04:03 +02:00
self . assertEqual ( sub [ ' invite_only ' ] , True , " Saxony was not properly marked private " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_inviteonly ( self ) - > None :
2014-01-29 22:03:40 +01:00
# Creating an invite-only stream is allowed
2020-03-12 14:17:25 +01:00
hamlet = self . example_user ( ' hamlet ' )
othello = self . example_user ( ' othello ' )
2014-01-29 22:03:40 +01:00
stream_name = " Saxony "
2020-03-12 14:17:25 +01:00
result = self . common_subscribe_to_streams ( hamlet , [ stream_name ] , invite_only = True )
2014-01-29 22:03:40 +01:00
2017-08-17 08:45:20 +02:00
json = result . json ( )
2020-03-12 14:17:25 +01:00
self . assertEqual ( json [ " subscribed " ] , { hamlet . email : [ stream_name ] } )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Subscribing oneself to an invite-only stream is not allowed
2020-03-12 14:17:25 +01:00
self . login_user ( othello )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( othello , [ stream_name ] , allow_fail = True )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result , ' Unable to access stream (Saxony). ' )
# authorization_errors_fatal=False works
2020-03-12 14:17:25 +01:00
self . login_user ( othello )
result = self . common_subscribe_to_streams ( othello , [ stream_name ] ,
2020-08-07 01:09:47 +02:00
extra_post_data = { ' authorization_errors_fatal ' : orjson . dumps ( False ) . decode ( ) } )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " unauthorized " ] , [ stream_name ] )
self . assertEqual ( json [ " subscribed " ] , { } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Inviting another user to an invite-only stream is allowed
2020-03-12 14:17:25 +01:00
self . login_user ( hamlet )
2014-01-29 22:03:40 +01:00
result = self . common_subscribe_to_streams (
2020-03-12 14:17:25 +01:00
hamlet , [ stream_name ] ,
2020-08-07 01:09:47 +02:00
extra_post_data = { ' principals ' : orjson . dumps ( [ othello . id ] ) . decode ( ) } )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2020-03-12 14:17:25 +01:00
self . assertEqual ( json [ " subscribed " ] , { othello . email : [ stream_name ] } )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Make sure both users are subscribed to this stream
2020-03-12 14:17:25 +01:00
stream_id = get_stream ( stream_name , hamlet . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . api_get ( hamlet , f " /api/v1/streams/ { stream_id } /members " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
2020-03-12 14:17:25 +01:00
self . assertTrue ( othello . email in json [ ' subscribers ' ] )
self . assertTrue ( hamlet . email in json [ ' subscribers ' ] )
2014-01-29 22:03:40 +01:00
2016-08-23 02:08:42 +02:00
class GetSubscribersTest ( ZulipTestCase ) :
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2017-05-07 21:25:59 +02:00
self . user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( self . user_profile )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def assert_user_got_subscription_notification ( self , expected_msg : str ) - > None :
2017-05-16 01:32:50 +02:00
# verify that the user was sent a message informing them about the subscription
msg = self . get_last_message ( )
self . assertEqual ( msg . recipient . type , msg . recipient . PERSONAL )
self . assertEqual ( msg . sender_id , self . notification_bot ( ) . id )
2018-05-11 01:39:38 +02:00
def non_ws ( s : str ) - > str :
2017-05-16 01:32:50 +02:00
return s . replace ( ' \n ' , ' ' ) . replace ( ' ' , ' ' )
self . assertEqual ( non_ws ( msg . content ) , non_ws ( expected_msg ) )
2018-05-11 01:39:38 +02:00
def check_well_formed_result ( self , result : Dict [ str , Any ] , stream_name : str , realm : Realm ) - > None :
2014-01-29 22:03:40 +01:00
"""
A successful call to get_subscribers returns the list of subscribers in
the form :
{ " msg " : " " ,
" result " : " success " ,
2017-07-12 12:44:55 +02:00
" subscribers " : [ self . example_email ( " hamlet " ) , self . example_email ( " prospero " ) ] }
2014-01-29 22:03:40 +01:00
"""
self . assertIn ( " subscribers " , result )
self . assertIsInstance ( result [ " subscribers " ] , list )
true_subscribers = [ user_profile . email for user_profile in self . users_subscribed_to_stream (
2017-01-24 07:06:13 +01:00
stream_name , realm ) ]
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( result [ " subscribers " ] ) , sorted ( true_subscribers ) )
2014-01-29 22:03:40 +01:00
2020-03-10 11:48:26 +01:00
def make_subscriber_request ( self , stream_id : int , user : Optional [ UserProfile ] = None ) - > HttpResponse :
if user is None :
user = self . user_profile
2020-06-13 08:59:37 +02:00
return self . api_get ( user , f " /api/v1/streams/ { stream_id } /members " )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def make_successful_subscriber_request ( self , stream_name : str ) - > None :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
2016-12-30 11:42:59 +01:00
result = self . make_subscriber_request ( stream_id )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
self . check_well_formed_result ( result . json ( ) ,
2016-12-24 08:00:19 +01:00
stream_name , self . user_profile . realm )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriber ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
get_subscribers returns the list of subscribers .
"""
stream_name = gather_subscriptions ( self . user_profile ) [ 0 ] [ 0 ] [ ' name ' ]
self . make_successful_subscriber_request ( stream_name )
2017-11-05 10:51:25 +01:00
def test_gather_subscriptions ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
gather_subscriptions returns correct results with only 3 queries
2017-05-16 01:32:50 +02:00
( We also use this test to verify subscription notifications to
folks who get subscribed to streams . )
2014-01-29 22:03:40 +01:00
"""
2020-06-10 06:41:04 +02:00
streams = [ f " stream_ { i } " for i in range ( 10 ) ]
2016-10-21 23:22:25 +02:00
for stream_name in streams :
self . make_stream ( stream_name )
2020-03-12 14:17:25 +01:00
users_to_subscribe = [
2020-04-09 19:07:57 +02:00
self . user_profile . id ,
self . example_user ( " othello " ) . id ,
self . example_user ( " cordelia " ) . id ,
2020-03-12 14:17:25 +01:00
]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2014-01-29 22:03:40 +01:00
streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) )
2017-05-16 01:32:50 +02:00
msg = '''
2019-07-11 01:52:48 +02:00
@ * * King Hamlet * * subscribed you to the following streams :
2017-05-16 01:32:50 +02:00
* #**stream_0**
* #**stream_1**
* #**stream_2**
* #**stream_3**
* #**stream_4**
* #**stream_5**
* #**stream_6**
* #**stream_7**
* #**stream_8**
* #**stream_9**
'''
self . assert_user_got_subscription_notification ( msg )
# Subscribe ourself first.
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-05-16 01:32:50 +02:00
[ " stream_invite_only_1 " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ) ,
2017-05-16 01:32:50 +02:00
invite_only = True )
# Now add in other users, and this should trigger messages
# to notify the user.
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2014-01-29 22:03:40 +01:00
[ " stream_invite_only_1 " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2014-01-29 22:03:40 +01:00
invite_only = True )
2017-05-16 01:32:50 +02:00
msg = '''
2019-07-11 01:52:48 +02:00
@ * * King Hamlet * * subscribed you to the stream #**stream_invite_only_1**.
2017-05-16 01:32:50 +02:00
'''
self . assert_user_got_subscription_notification ( msg )
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2019-08-08 21:58:38 +02:00
subscribed_streams , _ = gather_subscriptions (
self . user_profile , include_subscribers = True )
self . assertTrue ( len ( subscribed_streams ) > = 11 )
for sub in subscribed_streams :
2014-01-29 22:03:40 +01:00
if not sub [ " name " ] . startswith ( " stream_ " ) :
continue
self . assertTrue ( len ( sub [ " subscribers " ] ) == len ( users_to_subscribe ) )
2020-01-08 10:51:08 +01:00
self . assert_length ( queries , 6 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_never_subscribed_streams ( self ) - > None :
2016-07-12 23:57:16 +02:00
"""
2020-07-23 23:18:32 +02:00
Check never_subscribed streams are fetched correctly and not include invite_only streams ,
or invite_only and public streams to guest users .
2016-07-12 23:57:16 +02:00
"""
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2017-08-22 16:51:07 +02:00
users_to_subscribe = [
2020-04-09 19:07:57 +02:00
self . example_user ( " othello " ) . id ,
self . example_user ( " cordelia " ) . id ,
2017-08-22 16:51:07 +02:00
]
public_streams = [
' test_stream_public_1 ' ,
' test_stream_public_2 ' ,
' test_stream_public_3 ' ,
' test_stream_public_4 ' ,
' test_stream_public_5 ' ,
]
private_streams = [
' test_stream_invite_only_1 ' ,
' test_stream_invite_only_2 ' ,
]
2020-07-23 23:18:32 +02:00
web_public_streams = [
' test_stream_web_public_1 ' ,
' test_stream_web_public_2 ' ,
]
2017-11-05 10:51:25 +01:00
def create_public_streams ( ) - > None :
2017-08-22 16:51:07 +02:00
for stream_name in public_streams :
self . make_stream ( stream_name , realm = realm )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-08-22 16:51:07 +02:00
public_streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2017-08-22 16:51:07 +02:00
)
create_public_streams ( )
2020-07-23 23:18:32 +02:00
def create_web_public_streams ( ) - > None :
for stream_name in web_public_streams :
self . make_stream ( stream_name , realm = realm , is_web_public = True )
ret = self . common_subscribe_to_streams (
self . user_profile ,
web_public_streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) )
2020-07-23 23:18:32 +02:00
)
self . assert_json_success ( ret )
create_web_public_streams ( )
2017-11-05 10:51:25 +01:00
def create_private_streams ( ) - > None :
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-08-22 16:51:07 +02:00
private_streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
invite_only = True ,
2017-08-22 16:51:07 +02:00
)
create_private_streams ( )
2017-11-05 10:51:25 +01:00
def get_never_subscribed ( ) - > List [ Dict [ str , Any ] ] :
2017-08-22 16:51:07 +02:00
with queries_captured ( ) as queries :
sub_data = gather_subscriptions_helper ( self . user_profile )
never_subscribed = sub_data [ 2 ]
2020-01-08 10:51:08 +01:00
self . assert_length ( queries , 5 )
2017-08-22 16:51:07 +02:00
# Ignore old streams.
never_subscribed = [
dct for dct in never_subscribed
if dct [ ' name ' ] . startswith ( ' test_ ' )
]
return never_subscribed
never_subscribed = get_never_subscribed ( )
2016-07-12 23:57:16 +02:00
# Invite only stream should not be there in never_subscribed streams
2020-07-23 23:18:32 +02:00
self . assertEqual ( len ( never_subscribed ) , len ( public_streams ) + len ( web_public_streams ) )
2016-07-12 23:57:16 +02:00
for stream_dict in never_subscribed :
2017-08-22 16:51:07 +02:00
name = stream_dict [ ' name ' ]
self . assertFalse ( ' invite_only ' in name )
self . assertTrue ( len ( stream_dict [ " subscribers " ] ) == len ( users_to_subscribe ) )
2016-07-12 23:57:16 +02:00
2018-03-16 12:28:19 +01:00
# Send private stream subscribers to all realm admins.
2017-11-05 10:51:25 +01:00
def test_admin_case ( ) - > None :
2019-10-05 02:35:07 +02:00
self . user_profile . role = UserProfile . ROLE_REALM_ADMINISTRATOR
2018-03-16 12:28:19 +01:00
# Test realm admins can get never subscribed private stream's subscribers.
2017-08-22 16:18:35 +02:00
never_subscribed = get_never_subscribed ( )
self . assertEqual (
len ( never_subscribed ) ,
2020-07-23 23:18:32 +02:00
len ( public_streams ) + len ( private_streams ) + len ( web_public_streams ) ,
2017-08-22 16:18:35 +02:00
)
for stream_dict in never_subscribed :
2018-03-16 12:28:19 +01:00
self . assertTrue ( len ( stream_dict [ " subscribers " ] ) == len ( users_to_subscribe ) )
2017-08-22 16:18:35 +02:00
test_admin_case ( )
2020-07-23 23:18:32 +02:00
def test_guest_user_case ( ) - > None :
self . user_profile . role = UserProfile . ROLE_GUEST
sub , unsub , never_sub = gather_subscriptions_helper ( self . user_profile )
# It's +1 because of the stream Rome.
self . assertEqual ( len ( never_sub ) , len ( web_public_streams ) + 1 )
sub_ids = list ( map ( lambda stream : stream [ " stream_id " ] , sub ) )
unsub_ids = list ( map ( lambda stream : stream [ " stream_id " ] , unsub ) )
for stream_dict in never_sub :
self . assertTrue ( stream_dict [ " is_web_public " ] )
self . assertTrue ( stream_dict [ " stream_id " ] not in sub_ids )
self . assertTrue ( stream_dict [ " stream_id " ] not in unsub_ids )
2020-07-24 00:45:26 +02:00
# The Rome stream has is_web_public=True, with default
# subscribers not setup by this test, so we do the
# following check only for the streams we created.
if stream_dict [ " name " ] in web_public_streams :
self . assertEqual (
len ( stream_dict [ " subscribers " ] ) ,
len ( users_to_subscribe ) )
2020-07-23 23:18:32 +02:00
test_guest_user_case ( )
2018-06-02 09:25:39 +02:00
def test_gather_subscribed_streams_for_guest_user ( self ) - > None :
guest_user = self . example_user ( " polonius " )
stream_name_sub = " public_stream_1 "
self . make_stream ( stream_name_sub , realm = get_realm ( " zulip " ) )
self . subscribe ( guest_user , stream_name_sub )
stream_name_unsub = " public_stream_2 "
self . make_stream ( stream_name_unsub , realm = get_realm ( " zulip " ) )
self . subscribe ( guest_user , stream_name_unsub )
self . unsubscribe ( guest_user , stream_name_unsub )
stream_name_never_sub = " public_stream_3 "
self . make_stream ( stream_name_never_sub , realm = get_realm ( " zulip " ) )
normal_user = self . example_user ( " aaron " )
self . subscribe ( normal_user , stream_name_sub )
self . subscribe ( normal_user , stream_name_unsub )
self . subscribe ( normal_user , stream_name_unsub )
subs , unsubs , neversubs = gather_subscriptions_helper ( guest_user )
# Guest users get info about subscribed public stream's subscribers
expected_stream_exists = False
for sub in subs :
if sub [ " name " ] == stream_name_sub :
expected_stream_exists = True
self . assertEqual ( len ( sub [ " subscribers " ] ) , 2 )
self . assertTrue ( expected_stream_exists )
2020-07-23 23:18:32 +02:00
# Guest user only get data about never subscribed streams if they're
# web-public.
for stream in neversubs :
self . assertTrue ( stream [ ' is_web_public ' ] )
2018-06-02 09:25:39 +02:00
2020-07-23 23:18:32 +02:00
# Guest user only get data about never subscribed web-public streams
self . assertEqual ( len ( neversubs ) , 1 )
2018-06-02 09:25:39 +02:00
2018-03-16 12:28:19 +01:00
def test_previously_subscribed_private_streams ( self ) - > None :
admin_user = self . example_user ( " iago " )
non_admin_user = self . example_user ( " cordelia " )
2020-07-23 23:18:32 +02:00
guest_user = self . example_user ( " polonius " )
2018-03-16 12:28:19 +01:00
stream_name = " private_stream "
self . make_stream ( stream_name , realm = get_realm ( " zulip " ) , invite_only = True )
self . subscribe ( admin_user , stream_name )
self . subscribe ( non_admin_user , stream_name )
2020-07-23 23:18:32 +02:00
self . subscribe ( guest_user , stream_name )
2018-03-16 12:28:19 +01:00
self . subscribe ( self . example_user ( " othello " ) , stream_name )
self . unsubscribe ( admin_user , stream_name )
self . unsubscribe ( non_admin_user , stream_name )
2020-07-23 23:18:32 +02:00
self . unsubscribe ( guest_user , stream_name )
2018-03-16 12:28:19 +01:00
2018-06-01 22:17:47 +02:00
# Test admin user gets previously subscribed private stream's subscribers.
2018-03-16 12:28:19 +01:00
sub_data = gather_subscriptions_helper ( admin_user )
unsubscribed_streams = sub_data [ 1 ]
self . assertEqual ( len ( unsubscribed_streams ) , 1 )
self . assertEqual ( len ( unsubscribed_streams [ 0 ] [ " subscribers " ] ) , 1 )
2018-06-01 22:17:47 +02:00
# Test non admin users cannot get previously subscribed private stream's subscribers.
2018-03-16 12:28:19 +01:00
sub_data = gather_subscriptions_helper ( non_admin_user )
unsubscribed_streams = sub_data [ 1 ]
self . assertEqual ( len ( unsubscribed_streams ) , 1 )
2018-06-01 22:17:47 +02:00
self . assertFalse ( ' subscribers ' in unsubscribed_streams [ 0 ] )
2018-03-16 12:28:19 +01:00
2020-07-23 23:18:32 +02:00
sub_data = gather_subscriptions_helper ( guest_user )
unsubscribed_streams = sub_data [ 1 ]
self . assertEqual ( len ( unsubscribed_streams ) , 1 )
self . assertFalse ( ' subscribers ' in unsubscribed_streams [ 0 ] )
2017-11-05 10:51:25 +01:00
def test_gather_subscriptions_mit ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
gather_subscriptions returns correct results with only 3 queries
"""
# Subscribe only ourself because invites are disabled on mit.edu
2017-05-23 02:33:53 +02:00
mit_user_profile = self . mit_user ( ' starnine ' )
2020-04-09 19:07:57 +02:00
user_id = mit_user_profile . id
users_to_subscribe = [ user_id , self . mit_user ( " espuser " ) . id ]
2014-01-29 22:03:40 +01:00
for email in users_to_subscribe :
2020-03-09 21:41:26 +01:00
stream = self . subscribe ( mit_user_profile , " mit_stream " )
2017-10-08 21:16:51 +02:00
self . assertTrue ( stream . is_in_zephyr_realm )
2014-01-29 22:03:40 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
mit_user_profile ,
2014-01-29 22:03:40 +01:00
[ " mit_invite_only " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2017-08-26 00:58:13 +02:00
invite_only = True ,
subdomain = " zephyr " )
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2019-08-08 21:58:38 +02:00
subscribed_streams , _ = gather_subscriptions (
mit_user_profile , include_subscribers = True )
2014-01-29 22:03:40 +01:00
2019-08-08 21:58:38 +02:00
self . assertTrue ( len ( subscribed_streams ) > = 2 )
for sub in subscribed_streams :
2014-01-29 22:03:40 +01:00
if not sub [ " name " ] . startswith ( " mit_ " ) :
2017-03-05 08:07:56 +01:00
raise AssertionError ( " Unexpected stream! " )
2014-01-29 22:03:40 +01:00
if sub [ " name " ] == " mit_invite_only " :
self . assertTrue ( len ( sub [ " subscribers " ] ) == len ( users_to_subscribe ) )
else :
self . assertTrue ( len ( sub [ " subscribers " ] ) == 0 )
2017-04-03 17:13:42 +02:00
self . assert_length ( queries , 6 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_nonsubscriber ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Even a non - subscriber to a public stream can query a stream ' s membership
with get_subscribers .
"""
# Create a stream for which Hamlet is the only subscriber.
stream_name = " Saxony "
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] )
2020-03-06 18:40:46 +01:00
other_user = self . example_user ( " othello " )
2014-01-29 22:03:40 +01:00
# Fetch the subscriber list as a non-member.
2020-03-06 18:40:46 +01:00
self . login_user ( other_user )
2014-01-29 22:03:40 +01:00
self . make_successful_subscriber_request ( stream_name )
2017-11-05 10:51:25 +01:00
def test_subscriber_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
A subscriber to a private stream can query that stream ' s membership.
"""
stream_name = " Saxony "
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] ,
2014-01-29 22:03:40 +01:00
invite_only = True )
self . make_successful_subscriber_request ( stream_name )
2018-02-14 17:59:01 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
# Verify another user can't get the data.
2020-03-06 18:40:46 +01:00
self . login ( ' cordelia ' )
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2020-04-09 21:51:58 +02:00
self . assert_json_error ( result , ' Invalid stream id ' )
2018-02-14 17:59:01 +01:00
# But an organization administrator can
2020-03-06 18:40:46 +01:00
self . login ( ' iago ' )
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2018-02-14 17:59:01 +01:00
self . assert_json_success ( result )
2017-11-05 10:51:25 +01:00
def test_json_get_subscribers_stream_not_exist ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
json_get_subscribers also returns the list of subscribers for a stream .
"""
2016-12-30 11:42:59 +01:00
stream_id = 99999999
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2020-04-09 21:51:58 +02:00
self . assert_json_error ( result , ' Invalid stream id ' )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_json_get_subscribers ( self ) - > None :
2016-06-21 18:20:15 +02:00
"""
json_get_subscribers in zerver / views / streams . py
2019-08-08 21:58:38 +02:00
also returns the list of subscribers for a stream , when requested .
2016-06-21 18:20:15 +02:00
"""
stream_name = gather_subscriptions ( self . user_profile ) [ 0 ] [ 0 ] [ ' name ' ]
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
2019-08-08 21:58:38 +02:00
expected_subscribers = gather_subscriptions (
self . user_profile , include_subscribers = True ) [ 0 ] [ 0 ] [ ' subscribers ' ]
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2016-06-21 18:20:15 +02:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
result_dict = result . json ( )
2016-06-21 18:20:15 +02:00
self . assertIn ( ' subscribers ' , result_dict )
self . assertIsInstance ( result_dict [ ' subscribers ' ] , list )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
subscribers : List [ str ] = [ ]
2016-06-21 18:20:15 +02:00
for subscriber in result_dict [ ' subscribers ' ] :
2017-09-27 10:11:59 +02:00
self . assertIsInstance ( subscriber , str )
2016-06-21 18:20:15 +02:00
subscribers . append ( subscriber )
self . assertEqual ( set ( subscribers ) , set ( expected_subscribers ) )
2020-07-24 00:45:26 +02:00
def test_json_get_subscribers_for_guest_user ( self ) - > None :
"""
Guest users should have access to subscribers of web - public streams , even
if they aren ' t subscribed or have never subscribed to that stream.
"""
guest_user = self . example_user ( " polonius " )
_ , _ , never_subscribed = gather_subscriptions_helper ( guest_user , True )
# A guest user can only see never subscribed streams that are web-public.
# For Polonius, the only web public stream that he is not subscribed at
# this point is Rome.
self . assertTrue ( len ( never_subscribed ) == 1 )
web_public_stream_id = never_subscribed [ 0 ] [ ' stream_id ' ]
result = self . client_get ( f " /json/streams/ { web_public_stream_id } /members " )
self . assert_json_success ( result )
result_dict = result . json ( )
self . assertIn ( ' subscribers ' , result_dict )
self . assertIsInstance ( result_dict [ ' subscribers ' ] , list )
self . assertTrue ( len ( result_dict [ ' subscribers ' ] ) > 0 )
2017-11-05 10:51:25 +01:00
def test_nonsubscriber_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2018-02-14 17:59:01 +01:00
A non - subscriber non realm admin user to a private stream can ' t query that stream ' s membership .
But unsubscribed realm admin users can query private stream ' s membership.
2014-01-29 22:03:40 +01:00
"""
# Create a private stream for which Hamlet is the only subscriber.
stream_name = " NewStream "
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] ,
2014-01-29 22:03:40 +01:00
invite_only = True )
2017-05-07 21:25:59 +02:00
user_profile = self . example_user ( ' othello ' )
2014-01-29 22:03:40 +01:00
2018-02-14 17:59:01 +01:00
# Try to fetch the subscriber list as a non-member & non-realm-admin-user.
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , user_profile . realm ) . id
2020-03-10 11:48:26 +01:00
result = self . make_subscriber_request ( stream_id , user = user_profile )
2017-01-30 02:01:53 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2017-01-30 00:48:45 +01:00
2018-02-14 17:59:01 +01:00
# Try to fetch the subscriber list as a non-member & realm-admin-user.
2020-03-06 18:40:46 +01:00
self . login ( ' iago ' )
2018-02-14 17:59:01 +01:00
self . make_successful_subscriber_request ( stream_name )
2017-01-30 00:48:45 +01:00
class AccessStreamTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_access_stream ( self ) - > None :
2017-01-30 00:48:45 +01:00
"""
A comprehensive security test for the access_stream_by_ * API functions .
"""
# Create a private stream for which Hamlet is the only subscriber.
2017-05-07 21:25:59 +02:00
hamlet = self . example_user ( ' hamlet ' )
2017-01-30 00:48:45 +01:00
stream_name = " new_private_stream "
2020-03-06 18:40:46 +01:00
self . login_user ( hamlet )
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( hamlet , [ stream_name ] ,
2017-01-30 00:48:45 +01:00
invite_only = True )
stream = get_stream ( stream_name , hamlet . realm )
2017-05-07 21:25:59 +02:00
othello = self . example_user ( ' othello ' )
2017-01-30 00:48:45 +01:00
# Nobody can access a stream that doesn't exist
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( hamlet , 501232 )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' invalid stream ' " ) :
access_stream_by_name ( hamlet , " invalid stream " )
# Hamlet can access the private stream
( stream_ret , rec_ret , sub_ret ) = access_stream_by_id ( hamlet , stream . id )
2017-09-17 19:53:38 +02:00
self . assertEqual ( stream . id , stream_ret . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2017-01-30 00:48:45 +01:00
self . assertEqual ( sub_ret . recipient , rec_ret )
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
( stream_ret2 , rec_ret2 , sub_ret2 ) = access_stream_by_name ( hamlet , stream . name )
2017-09-17 19:53:38 +02:00
self . assertEqual ( stream_ret . id , stream_ret2 . id )
2017-01-30 00:48:45 +01:00
self . assertEqual ( sub_ret , sub_ret2 )
self . assertEqual ( rec_ret , rec_ret2 )
# Othello cannot access the private stream
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( othello , stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' new_private_stream ' " ) :
access_stream_by_name ( othello , stream . name )
# Both Othello and Hamlet can access a public stream that only
# Hamlet is subscribed to in this realm
public_stream_name = " public_stream "
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( hamlet , [ public_stream_name ] ,
2017-01-30 00:48:45 +01:00
invite_only = False )
public_stream = get_stream ( public_stream_name , hamlet . realm )
access_stream_by_id ( othello , public_stream . id )
access_stream_by_name ( othello , public_stream . name )
access_stream_by_id ( hamlet , public_stream . id )
access_stream_by_name ( hamlet , public_stream . name )
# Nobody can access a public stream in another realm
2017-03-04 09:19:37 +01:00
mit_realm = get_realm ( " zephyr " )
2018-03-21 22:05:21 +01:00
mit_stream = ensure_stream ( mit_realm , " mit_stream " , invite_only = False )
2017-05-23 01:27:31 +02:00
sipbtest = self . mit_user ( " sipbtest " )
2017-01-30 00:48:45 +01:00
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( hamlet , mit_stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' mit_stream ' " ) :
access_stream_by_name ( hamlet , mit_stream . name )
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( sipbtest , stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' new_private_stream ' " ) :
access_stream_by_name ( sipbtest , stream . name )
# MIT realm users cannot access even public streams in their realm
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( sipbtest , mit_stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' mit_stream ' " ) :
access_stream_by_name ( sipbtest , mit_stream . name )
# But they can access streams they are subscribed to
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( sipbtest , [ mit_stream . name ] , subdomain = " zephyr " )
2017-01-30 00:48:45 +01:00
access_stream_by_id ( sipbtest , mit_stream . id )
access_stream_by_name ( sipbtest , mit_stream . name )
2018-05-02 17:00:06 +02:00
def test_stream_access_by_guest ( self ) - > None :
guest_user_profile = self . example_user ( ' polonius ' )
2020-03-06 18:40:46 +01:00
self . login_user ( guest_user_profile )
2018-05-02 17:00:06 +02:00
stream_name = " public_stream_1 "
stream = self . make_stream ( stream_name , guest_user_profile . realm , invite_only = False )
# Guest user don't have access to unsubscribed public streams
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( guest_user_profile , stream . id )
# Guest user have access to subscribed public streams
self . subscribe ( guest_user_profile , stream_name )
( stream_ret , rec_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2018-05-02 17:00:06 +02:00
self . assertEqual ( stream . id , stream_ret . id )
self . assertEqual ( sub_ret . recipient , rec_ret )
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
stream_name = " private_stream_1 "
stream = self . make_stream ( stream_name , guest_user_profile . realm , invite_only = True )
# Obviously, a guest user doesn't have access to unsubscribed private streams either
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( guest_user_profile , stream . id )
# Guest user have access to subscribed private streams
self . subscribe ( guest_user_profile , stream_name )
( stream_ret , rec_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2018-05-02 17:00:06 +02:00
self . assertEqual ( stream . id , stream_ret . id )
self . assertEqual ( sub_ret . recipient , rec_ret )
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
2018-06-20 23:03:03 +02:00
2020-07-24 04:56:12 +02:00
stream_name = " web_public_stream "
stream = self . make_stream ( stream_name , guest_user_profile . realm , is_web_public = True )
# Guest users have access to web public streams even if they aren't subscribed.
( stream_ret , rec_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
self . assertTrue ( can_access_stream_history ( guest_user_profile , stream ) )
assert sub_ret is None
self . assertEqual ( stream . id , stream_ret . id )
2018-06-20 23:03:03 +02:00
class StreamTrafficTest ( ZulipTestCase ) :
def test_average_weekly_stream_traffic_calculation ( self ) - > None :
# No traffic data for the stream
self . assertEqual (
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 300 ) , { 1 : 4003 } ) , 0 )
# using high numbers here to make it more likely to catch small errors in the denominators
# of the calculations. That being said we don't want to go over 100, since then the 2
# significant digits calculation gets applied
# old stream
self . assertEqual (
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 300 ) , { 42 : 98 * 4 + 3 } ) , 98 )
# stream between 7 and 27 days old
self . assertEqual (
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 10 ) , { 42 : ( 98 * 10 + 9 ) / / 7 } ) , 98 )
# stream less than 7 days old
self . assertEqual (
2018-07-23 23:05:32 +02:00
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 5 ) , { 42 : 100 } ) , None )
2018-06-20 23:03:03 +02:00
# average traffic between 0 and 1
self . assertEqual (
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 300 ) , { 42 : 1 } ) , 1 )
def test_round_to_2_significant_digits ( self ) - > None :
self . assertEqual ( 120 , round_to_2_significant_digits ( 116 ) )
2020-07-08 01:51:44 +02:00
class NoRecipientIDsTest ( ZulipTestCase ) :
def test_no_recipient_ids ( self ) - > None :
user_profile = self . example_user ( ' cordelia ' )
Subscription . objects . filter ( user_profile = user_profile , recipient__type = Recipient . STREAM ) . delete ( )
subs = gather_subscriptions_helper ( user_profile )
# Checks that gather_subscriptions_helper will not return anything
# since there will not be any recipients, without crashing.
#
# This covers a rare corner case.
self . assertEqual ( len ( subs [ 0 ] ) , 0 )