2021-03-23 07:34:01 +01:00
import hashlib
2020-06-11 00:54:34 +02:00
import random
from datetime import timedelta
2022-03-23 05:09:26 +01:00
from io import StringIO
2020-06-13 03:34:01 +02:00
from typing import Any , Dict , List , Mapping , Optional , Sequence , Set , Union
2020-06-11 00:54:34 +02:00
from unittest import mock
2016-06-03 07:57:07 +02:00
2020-08-07 01:09:47 +02:00
import orjson
2017-04-27 00:03:21 +02:00
from django . conf import settings
2018-05-16 03:36:18 +02:00
from django . core . exceptions import ValidationError
2021-02-16 01:19:02 +01:00
from django . http import HttpResponse
2017-05-29 19:54:06 +02:00
from django . utils . timezone import now as timezone_now
2016-09-12 17:21:49 +02:00
2022-04-14 23:58:15 +02:00
from zerver . actions . create_realm import do_create_realm
2022-04-14 23:34:23 +02:00
from zerver . actions . default_streams import (
2020-06-11 00:54:34 +02:00
do_add_default_stream ,
do_add_streams_to_default_stream_group ,
do_change_default_stream_group_description ,
do_change_default_stream_group_name ,
2022-04-14 23:34:23 +02:00
do_create_default_stream_group ,
do_remove_default_stream ,
do_remove_default_stream_group ,
do_remove_streams_from_default_stream_group ,
get_default_streams_for_realm ,
lookup_default_stream_groups ,
)
2022-04-14 23:57:15 +02:00
from zerver . actions . realm_settings import do_change_realm_plan_type , do_set_realm_property
2022-04-14 23:51:16 +02:00
from zerver . actions . streams import (
2022-04-14 23:34:23 +02:00
bulk_add_subscriptions ,
bulk_remove_subscriptions ,
2020-06-11 00:54:34 +02:00
do_change_stream_post_policy ,
2020-07-13 16:13:28 +02:00
do_change_subscription_property ,
2017-02-18 18:01:00 +01:00
do_deactivate_stream ,
2022-04-14 23:51:16 +02:00
)
from zerver . actions . users import do_change_user_role , do_deactivate_user
2021-07-16 22:11:10 +02:00
from zerver . lib . exceptions import JsonableError
2022-03-07 15:12:24 +01:00
from zerver . lib . message import UnreadStreamInfo , aggregate_unread_data , get_raw_unread_data
2021-07-04 10:00:55 +02:00
from zerver . lib . response import json_success
2022-03-12 13:46:20 +01:00
from zerver . lib . stream_color import STREAM_ASSIGNMENT_COLORS , pick_colors
2020-06-11 00:54:34 +02:00
from zerver . lib . stream_subscription import (
get_active_subscriptions_for_stream_id ,
num_subscribers_for_stream_id ,
2021-03-27 03:01:37 +01:00
subscriber_ids_with_stream_history_access ,
2016-09-12 17:21:49 +02:00
)
2022-03-12 13:33:15 +01:00
from zerver . lib . stream_traffic import (
get_average_weekly_stream_traffic ,
round_to_2_significant_digits ,
)
2020-06-11 00:54:34 +02:00
from zerver . lib . streams import (
2020-09-29 18:06:50 +02:00
StreamDict ,
2020-06-11 00:54:34 +02:00
access_stream_by_id ,
access_stream_by_name ,
2020-07-24 04:56:12 +02:00
can_access_stream_history ,
2021-07-16 22:11:10 +02:00
can_access_stream_user_ids ,
create_stream_if_needed ,
2020-06-11 00:54:34 +02:00
create_streams_if_needed ,
2022-04-14 23:42:50 +02:00
do_get_streams ,
ensure_stream ,
2020-06-11 00:54:34 +02:00
filter_stream_authorization ,
list_to_streams ,
2017-11-13 21:24:51 +01:00
)
2022-04-14 23:45:12 +02:00
from zerver . lib . subscription_info import (
bulk_get_subscriber_user_ids ,
gather_subscriptions ,
gather_subscriptions_helper ,
validate_user_access_to_subscribers_helper ,
)
2022-04-14 23:58:37 +02:00
from zerver . lib . test_classes import ZulipTestCase , get_topic_messages
2020-06-11 00:54:34 +02:00
from zerver . lib . test_helpers import (
2022-02-05 00:36:40 +01:00
HostRequestMock ,
2020-10-15 14:59:13 +02:00
cache_tries_captured ,
2020-06-11 00:54:34 +02:00
get_subscription ,
2021-12-29 20:10:36 +01:00
most_recent_message ,
2020-08-06 18:21:42 +02:00
most_recent_usermessage ,
2020-06-11 00:54:34 +02:00
queries_captured ,
reset_emails_in_zulip_realm ,
)
2022-03-11 19:32:25 +01:00
from zerver . lib . types import NeverSubscribedStreamDict , SubscriptionInfo
2020-06-11 00:54:34 +02:00
from zerver . models import (
2022-03-23 05:09:26 +01:00
Attachment ,
2020-06-11 00:54:34 +02:00
DefaultStream ,
DefaultStreamGroup ,
Message ,
Realm ,
2021-12-07 20:47:49 +01:00
RealmAuditLog ,
2020-06-11 00:54:34 +02:00
Recipient ,
Stream ,
Subscription ,
UserMessage ,
UserProfile ,
active_non_guest_user_ids ,
flush_per_request_caches ,
get_default_stream_groups ,
get_realm ,
2021-07-16 22:11:10 +02:00
get_stream ,
2020-06-11 00:54:34 +02:00
get_user ,
get_user_profile_by_id_in_realm ,
2022-03-23 05:09:26 +01:00
validate_attachment_request ,
validate_attachment_request_for_spectator_access ,
2020-06-11 00:54:34 +02:00
)
from zerver . views . streams import compose_views
2017-11-13 21:24:51 +01:00
2014-01-29 22:03:40 +01:00
2018-08-15 21:03:05 +02:00
class TestMiscStuff ( ZulipTestCase ) :
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
def test_test_helper ( self ) - > None :
cordelia = self . example_user ( " cordelia " )
s = self . subscribed_stream_name_list ( cordelia )
self . assertIn ( " * Verona " , s )
self . assertNotIn ( " * Denmark " , s )
stream colors: Fix stream color assignment.
The bug here probably didn't come up too much in
practice, but if we were adding a user to multiple
streams when they already had used all N available
colors, all the new streams would be assigned the same
color, since the size of used_colors would stay at N,
thwarting our little modulo-len hackery.
It's not a terrible bug, since users can obviously
customize their stream colors as they see fit.
Usually when we are adding a user to multiple streams,
the users are fairly new, and thus don't have many
existing streams, so I have never heard this bug
reported in the field.
Anyway, assigning the colors in bulk seems to make more
sense, and I added some tests.
For the situations where all the colors have already
been used, I didn't put a ton of thought into exactly
which repeated colors we want to choose; instead, I
just ensure they're different modulo 24. It's possible
that we should just have more than 24 canned colors, or
we should just assign the same default color every time
and let users change it themselves (once they've gone
beyond the 24, to be clear). Or maybe we can just do
something smarter here. I don't have enough time for a
deep dive on this issue.
2021-12-24 19:19:45 +01:00
def test_pick_colors ( self ) - > None :
used_colors : Set [ str ] = set ( )
color_map : Dict [ int , str ] = { }
recipient_ids = list ( range ( 30 ) )
user_color_map = pick_colors ( used_colors , color_map , recipient_ids )
self . assertEqual (
user_color_map ,
{
0 : " #76ce90 " ,
1 : " #fae589 " ,
2 : " #a6c7e5 " ,
3 : " #e79ab5 " ,
4 : " #bfd56f " ,
5 : " #f4ae55 " ,
6 : " #b0a5fd " ,
7 : " #addfe5 " ,
8 : " #f5ce6e " ,
9 : " #c2726a " ,
10 : " #94c849 " ,
11 : " #bd86e5 " ,
12 : " #ee7e4a " ,
13 : " #a6dcbf " ,
14 : " #95a5fd " ,
15 : " #53a063 " ,
16 : " #9987e1 " ,
17 : " #e4523d " ,
18 : " #c2c2c2 " ,
19 : " #4f8de4 " ,
20 : " #c6a8ad " ,
21 : " #e7cc4d " ,
22 : " #c8bebf " ,
23 : " #a47462 " ,
# start repeating
24 : " #76ce90 " ,
25 : " #fae589 " ,
26 : " #a6c7e5 " ,
27 : " #e79ab5 " ,
28 : " #bfd56f " ,
29 : " #f4ae55 " ,
} ,
)
color_map = { 98 : " color98 " , 99 : " color99 " }
used_colors = set ( STREAM_ASSIGNMENT_COLORS ) - { " #c6a8ad " , " #9987e1 " }
recipient_ids = [ 99 , 98 , 1 , 2 , 3 , 4 ]
user_color_map = pick_colors ( used_colors , color_map , recipient_ids )
self . assertEqual (
user_color_map ,
2022-01-16 15:15:53 +01:00
{ 98 : " color98 " , 99 : " color99 " , 1 : " #9987e1 " , 2 : " #c6a8ad " , 3 : " #e79ab5 " , 4 : " #bfd56f " } ,
)
"""
If we are assigning colors to a user with 24 + streams , we have to start
re - using old colors . Our algorithm basically uses recipient_id % 24 , so
the following code reflects the worse case scenario that our new
streams have recipient ids spaced out by exact multiples of 24. We
don ' t try to work around this edge case, since users who really depend
on the stream colors can always just assign themselves custom colors
for the streams that they really want to stand out .
Even if recipient_ids were completely random , the odds of collisions
are low , but it ' s often the case that bulk-adds are done for streams
that either were or are being created at roughly the same time , so the
recipient_ids tend to have even fewer collisions .
"""
used_colors = set ( STREAM_ASSIGNMENT_COLORS )
color_map = { }
recipient_ids = [ 2 , 26 , 50 , 74 ]
user_color_map = pick_colors ( used_colors , color_map , recipient_ids )
self . assertEqual (
user_color_map ,
{ 2 : " #a6c7e5 " , 26 : " #a6c7e5 " , 50 : " #a6c7e5 " , 74 : " #a6c7e5 " } ,
stream colors: Fix stream color assignment.
The bug here probably didn't come up too much in
practice, but if we were adding a user to multiple
streams when they already had used all N available
colors, all the new streams would be assigned the same
color, since the size of used_colors would stay at N,
thwarting our little modulo-len hackery.
It's not a terrible bug, since users can obviously
customize their stream colors as they see fit.
Usually when we are adding a user to multiple streams,
the users are fairly new, and thus don't have many
existing streams, so I have never heard this bug
reported in the field.
Anyway, assigning the colors in bulk seems to make more
sense, and I added some tests.
For the situations where all the colors have already
been used, I didn't put a ton of thought into exactly
which repeated colors we want to choose; instead, I
just ensure they're different modulo 24. It's possible
that we should just have more than 24 canned colors, or
we should just assign the same default color every time
and let users change it themselves (once they've gone
beyond the 24, to be clear). Or maybe we can just do
something smarter here. I don't have enough time for a
deep dive on this issue.
2021-12-24 19:19:45 +01:00
)
2018-08-15 21:03:05 +02:00
def test_empty_results ( self ) - > None :
# These are essentially just tests to ensure line
# coverage for codepaths that won't ever really be
# called in practice.
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " cordelia " )
2018-08-15 21:03:05 +02:00
result = bulk_get_subscriber_user_ids (
stream_dicts = [ ] ,
user_profile = user_profile ,
2020-10-18 17:08:51 +02:00
subscribed_stream_ids = set ( ) ,
2018-08-15 21:03:05 +02:00
)
self . assertEqual ( result , { } )
streams = do_get_streams (
user_profile = user_profile ,
include_public = False ,
include_subscribed = False ,
include_all_active = False ,
include_default = False ,
)
self . assertEqual ( streams , [ ] )
2021-02-12 08:19:30 +01:00
2016-09-15 16:22:09 +02:00
class TestCreateStreams ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_creating_streams ( self ) - > None :
2021-02-12 08:20:45 +01:00
stream_names = [ " new1 " , " new2 " , " new3 " ]
stream_descriptions = [ " des1 " , " des2 " , " des3 " ]
realm = get_realm ( " zulip " )
2016-09-15 16:22:09 +02:00
2018-03-16 10:57:17 +01:00
# Test stream creation events.
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-04-02 18:11:45 +02:00
ensure_stream ( realm , " Public stream " , invite_only = False , acting_user = None )
2018-03-16 10:57:17 +01:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " event " ] [ " type " ] , " stream " )
self . assertEqual ( events [ 0 ] [ " event " ] [ " op " ] , " create " )
2018-03-16 10:57:17 +01:00
# Send public stream creation event to all active users.
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " users " ] , active_non_guest_user_ids ( realm . id ) )
self . assertEqual ( events [ 0 ] [ " event " ] [ " streams " ] [ 0 ] [ " name " ] , " Public stream " )
2018-03-16 10:57:17 +01:00
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-04-02 18:11:45 +02:00
ensure_stream ( realm , " Private stream " , invite_only = True , acting_user = None )
2018-03-16 10:57:17 +01:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " event " ] [ " type " ] , " stream " )
self . assertEqual ( events [ 0 ] [ " event " ] [ " op " ] , " create " )
2018-03-16 10:57:17 +01:00
# Send private stream creation event to only realm admins.
2021-05-17 05:41:32 +02:00
self . assert_length ( events [ 0 ] [ " users " ] , 2 )
2021-02-12 08:20:45 +01:00
self . assertTrue ( self . example_user ( " iago " ) . id in events [ 0 ] [ " users " ] )
self . assertTrue ( self . example_user ( " desdemona " ) . id in events [ 0 ] [ " users " ] )
self . assertEqual ( events [ 0 ] [ " event " ] [ " streams " ] [ 0 ] [ " name " ] , " Private stream " )
2018-03-16 10:57:17 +01:00
2016-09-15 16:22:09 +02:00
new_streams , existing_streams = create_streams_if_needed (
realm ,
2021-02-12 08:19:30 +01:00
[
{
" name " : stream_name ,
" description " : stream_description ,
" invite_only " : True ,
" stream_post_policy " : Stream . STREAM_POST_POLICY_ADMINS ,
" message_retention_days " : - 1 ,
}
for ( stream_name , stream_description ) in zip ( stream_names , stream_descriptions )
] ,
)
2016-11-21 00:16:52 +01:00
2021-05-17 05:41:32 +02:00
self . assert_length ( new_streams , 3 )
self . assert_length ( existing_streams , 0 )
2016-09-15 16:22:09 +02:00
actual_stream_names = { stream . name for stream in new_streams }
self . assertEqual ( actual_stream_names , set ( stream_names ) )
2016-11-27 15:31:53 +01:00
actual_stream_descriptions = { stream . description for stream in new_streams }
self . assertEqual ( actual_stream_descriptions , set ( stream_descriptions ) )
for stream in new_streams :
self . assertTrue ( stream . invite_only )
2020-02-04 21:50:55 +01:00
self . assertTrue ( stream . stream_post_policy == Stream . STREAM_POST_POLICY_ADMINS )
2020-06-14 18:57:02 +02:00
self . assertTrue ( stream . message_retention_days == - 1 )
2016-09-15 16:22:09 +02:00
new_streams , existing_streams = create_streams_if_needed (
realm ,
2021-02-12 08:19:30 +01:00
[
{ " name " : stream_name , " description " : stream_description , " invite_only " : True }
for ( stream_name , stream_description ) in zip ( stream_names , stream_descriptions )
] ,
)
2016-11-21 00:16:52 +01:00
2021-05-17 05:41:32 +02:00
self . assert_length ( new_streams , 0 )
self . assert_length ( existing_streams , 3 )
2016-09-15 16:22:09 +02:00
actual_stream_names = { stream . name for stream in existing_streams }
self . assertEqual ( actual_stream_names , set ( stream_names ) )
2016-11-27 15:31:53 +01:00
actual_stream_descriptions = { stream . description for stream in existing_streams }
self . assertEqual ( actual_stream_descriptions , set ( stream_descriptions ) )
for stream in existing_streams :
self . assertTrue ( stream . invite_only )
2014-01-29 22:03:40 +01:00
2019-02-20 21:09:21 +01:00
def test_create_api_multiline_description ( self ) - > None :
user = self . example_user ( " hamlet " )
realm = user . realm
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2021-02-12 08:19:30 +01:00
post_data = {
2021-02-12 08:20:45 +01:00
" subscriptions " : orjson . dumps (
[ { " name " : " new_stream " , " description " : " multi \n line \n description " } ]
2021-02-12 08:19:30 +01:00
) . decode ( ) ,
2021-02-12 08:20:45 +01:00
" invite_only " : orjson . dumps ( False ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
}
result = self . api_post ( user , " /api/v1/users/me/subscriptions " , post_data , subdomain = " zulip " )
2019-02-20 21:09:21 +01:00
self . assert_json_success ( result )
stream = get_stream ( " new_stream " , realm )
2021-02-12 08:20:45 +01:00
self . assertEqual ( stream . description , " multi line description " )
2019-02-20 21:09:21 +01:00
2018-04-27 01:00:26 +02:00
def test_history_public_to_subscribers_on_stream_creation ( self ) - > None :
2021-02-12 08:20:45 +01:00
realm = get_realm ( " zulip " )
2020-09-29 18:06:50 +02:00
stream_dicts : List [ StreamDict ] = [
2018-04-27 01:00:26 +02:00
{
" name " : " publicstream " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" description " : " Public stream with public history " ,
2018-04-27 01:00:26 +02:00
} ,
2022-01-29 00:54:13 +01:00
{ " name " : " webpublicstream " , " description " : " Web-public stream " , " is_web_public " : True } ,
2018-04-27 01:00:26 +02:00
{
" name " : " privatestream " ,
" description " : " Private stream with non-public history " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" invite_only " : True ,
2018-04-27 01:00:26 +02:00
} ,
{
" name " : " privatewithhistory " ,
" description " : " Private stream with public history " ,
" invite_only " : True ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" history_public_to_subscribers " : True ,
2018-04-27 01:00:26 +02:00
} ,
{
" name " : " publictrywithouthistory " ,
" description " : " Public stream without public history (disallowed) " ,
" invite_only " : False ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" history_public_to_subscribers " : False ,
2018-04-27 01:00:26 +02:00
} ,
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
]
2018-04-27 01:00:26 +02:00
created , existing = create_streams_if_needed ( realm , stream_dicts )
2021-05-17 05:41:32 +02:00
self . assert_length ( created , 5 )
self . assert_length ( existing , 0 )
2018-04-27 01:00:26 +02:00
for stream in created :
2021-02-12 08:20:45 +01:00
if stream . name == " publicstream " :
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
2021-02-12 08:20:45 +01:00
if stream . name == " webpublicstream " :
2020-07-24 04:56:12 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
2021-02-12 08:20:45 +01:00
if stream . name == " privatestream " :
2018-04-27 01:00:26 +02:00
self . assertFalse ( stream . history_public_to_subscribers )
2021-02-12 08:20:45 +01:00
if stream . name == " privatewithhistory " :
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
2021-02-12 08:20:45 +01:00
if stream . name == " publictrywithouthistory " :
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
def test_history_public_to_subscribers_zephyr_realm ( self ) - > None :
2021-02-12 08:20:45 +01:00
realm = get_realm ( " zephyr " )
2018-04-27 01:00:26 +02:00
stream , created = create_stream_if_needed ( realm , " private_stream " , invite_only = True )
self . assertTrue ( created )
self . assertTrue ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
stream , created = create_stream_if_needed ( realm , " public_stream " , invite_only = False )
self . assertTrue ( created )
self . assertFalse ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
2019-07-24 08:47:38 +02:00
def test_auto_mark_stream_created_message_as_read_for_stream_creator ( self ) - > None :
2020-03-12 14:17:25 +01:00
# This test relies on email == delivery_email for
# convenience.
reset_emails_in_zulip_realm ( )
2021-02-12 08:20:45 +01:00
realm = Realm . objects . get ( name = " Zulip Dev " )
iago = self . example_user ( " iago " )
hamlet = self . example_user ( " hamlet " )
cordelia = self . example_user ( " cordelia " )
aaron = self . example_user ( " aaron " )
2019-07-24 08:47:38 +02:00
# Establish a stream for notifications.
2021-04-02 18:11:45 +02:00
announce_stream = ensure_stream (
realm , " announce " , False , " announcements here. " , acting_user = None
)
2019-07-24 08:47:38 +02:00
realm . notifications_stream_id = announce_stream . id
2021-02-12 08:20:45 +01:00
realm . save ( update_fields = [ " notifications_stream_id " ] )
2019-07-24 08:47:38 +02:00
self . subscribe ( iago , announce_stream . name )
self . subscribe ( hamlet , announce_stream . name )
2020-03-06 18:40:46 +01:00
self . login_user ( iago )
2019-07-24 08:47:38 +02:00
initial_message_count = Message . objects . count ( )
initial_usermessage_count = UserMessage . objects . count ( )
data = {
" subscriptions " : ' [ { " name " : " brand new stream " , " description " : " " }] ' ,
2021-02-12 08:20:45 +01:00
" history_public_to_subscribers " : " true " ,
" invite_only " : " false " ,
" announce " : " true " ,
2020-08-07 01:09:47 +02:00
" principals " : orjson . dumps ( [ iago . id , aaron . id , cordelia . id , hamlet . id ] ) . decode ( ) ,
2021-02-12 08:20:45 +01:00
" stream_post_policy " : " 1 " ,
2019-07-24 08:47:38 +02:00
}
response = self . client_post ( " /json/users/me/subscriptions " , data )
final_message_count = Message . objects . count ( )
final_usermessage_count = UserMessage . objects . count ( )
expected_response = {
" result " : " success " ,
" msg " : " " ,
" subscribed " : {
" AARON@zulip.com " : [ " brand new stream " ] ,
" cordelia@zulip.com " : [ " brand new stream " ] ,
" hamlet@zulip.com " : [ " brand new stream " ] ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" iago@zulip.com " : [ " brand new stream " ] ,
2019-07-24 08:47:38 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" already_subscribed " : { } ,
2019-07-24 08:47:38 +02:00
}
self . assertEqual ( response . status_code , 200 )
2020-10-30 01:18:43 +01:00
self . assertEqual ( orjson . loads ( response . content ) , expected_response )
2019-07-24 08:47:38 +02:00
# 2 messages should be created, one in announce and one in the new stream itself.
self . assertEqual ( final_message_count - initial_message_count , 2 )
# 4 UserMessages per subscriber: One for each of the subscribers, plus 1 for
# each user in the notifications stream.
2019-12-06 00:27:21 +01:00
announce_stream_subs = Subscription . objects . filter ( recipient = announce_stream . recipient )
2021-02-12 08:19:30 +01:00
self . assertEqual (
final_usermessage_count - initial_usermessage_count , 4 + announce_stream_subs . count ( )
)
2019-07-24 08:47:38 +02:00
2022-03-07 15:12:24 +01:00
def get_unread_stream_data ( user : UserProfile ) - > List [ UnreadStreamInfo ] :
2019-07-24 08:47:38 +02:00
raw_unread_data = get_raw_unread_data ( user )
aggregated_data = aggregate_unread_data ( raw_unread_data )
2021-02-12 08:20:45 +01:00
return aggregated_data [ " streams " ]
2019-07-24 08:47:38 +02:00
2021-02-12 08:20:45 +01:00
stream_id = Stream . objects . get ( name = " brand new stream " ) . id
2019-07-24 08:47:38 +02:00
iago_unread_messages = get_unread_stream_data ( iago )
hamlet_unread_messages = get_unread_stream_data ( hamlet )
# The stream creation messages should be unread for Hamlet
2021-05-17 05:41:32 +02:00
self . assert_length ( hamlet_unread_messages , 2 )
2019-07-24 08:47:38 +02:00
# According to the code in zerver/views/streams/add_subscriptions_backend
# the notification stream message is sent first, then the new stream's message.
2021-02-12 08:20:45 +01:00
self . assertEqual ( hamlet_unread_messages [ 1 ] [ " stream_id " ] , stream_id )
2019-07-24 08:47:38 +02:00
# But it should be marked as read for Iago, the stream creator.
2021-05-17 05:41:32 +02:00
self . assert_length ( iago_unread_messages , 0 )
2019-07-24 08:47:38 +02:00
2021-02-12 08:19:30 +01:00
2016-09-20 02:40:52 +02:00
class RecipientTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_recipient ( self ) - > None :
2021-02-12 08:20:45 +01:00
realm = get_realm ( " zulip " )
stream = get_stream ( " Verona " , realm )
2016-09-20 02:40:52 +02:00
recipient = Recipient . objects . get (
type_id = stream . id ,
type = Recipient . STREAM ,
)
2021-02-12 08:20:45 +01:00
self . assertEqual ( str ( recipient ) , f " <Recipient: Verona ( { stream . id } , { Recipient . STREAM } )> " )
2016-09-20 02:40:52 +02:00
2021-02-12 08:19:30 +01:00
2016-08-23 02:08:42 +02:00
class StreamAdminTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_make_stream_public ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
self . make_stream ( " private_stream_1 " , invite_only = True )
self . make_stream ( " private_stream_2 " , invite_only = True )
2014-01-29 22:03:40 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( False ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " private_stream_1 " , user_profile . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " private_stream_1 " )
2017-10-08 21:16:51 +02:00
self . assertFalse ( stream . is_in_zephyr_realm )
2014-01-29 22:03:40 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( False ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2016-10-20 00:50:09 +02:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
stream = get_stream ( " private_stream_1 " , realm )
2020-07-13 16:13:28 +02:00
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Private, protected history** to **Public**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
history_public_to_subscribers_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert history_public_to_subscribers_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " history_public_to_subscribers " ,
}
) . decode ( )
self . assertEqual ( history_public_to_subscribers_log . extra_data , expected_extra_data )
invite_only_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . order_by ( " -id " ) [ 1 ]
assert invite_only_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : True ,
RealmAuditLog . NEW_VALUE : False ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( invite_only_log . extra_data , expected_extra_data )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2020-07-13 16:13:28 +02:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( False ) . decode ( ) ,
2020-07-13 16:13:28 +02:00
}
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " private_stream_2 " )
2020-07-13 16:13:28 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assertTrue ( stream . invite_only )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2021-02-12 08:20:45 +01:00
sub = get_subscription ( " private_stream_2 " , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " private_stream_2 " , realm )
2014-01-29 22:03:40 +01:00
self . assertFalse ( stream . invite_only )
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . history_public_to_subscribers )
2014-01-29 22:03:40 +01:00
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Private, protected history** to **Public**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
history_public_to_subscribers_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert history_public_to_subscribers_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " history_public_to_subscribers " ,
}
) . decode ( )
self . assertEqual ( history_public_to_subscribers_log . extra_data , expected_extra_data )
invite_only_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . order_by ( " -id " ) [ 1 ]
assert invite_only_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : True ,
RealmAuditLog . NEW_VALUE : False ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( invite_only_log . extra_data , expected_extra_data )
2017-11-05 10:51:25 +01:00
def test_make_stream_private ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream_1 " , realm = realm )
self . make_stream ( " public_stream_2 " )
2014-01-29 22:03:40 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2021-12-11 00:41:25 +01:00
stream_id = self . subscribe ( user_profile , " public_stream_1 " ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " public_stream_1 " , realm )
2020-07-13 16:13:28 +02:00
self . assertTrue ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Public** to **Private, protected history**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
history_public_to_subscribers_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert history_public_to_subscribers_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : True ,
RealmAuditLog . NEW_VALUE : False ,
" property " : " history_public_to_subscribers " ,
}
) . decode ( )
self . assertEqual ( history_public_to_subscribers_log . extra_data , expected_extra_data )
invite_only_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . order_by ( " -id " ) [ 1 ]
assert invite_only_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( invite_only_log . extra_data , expected_extra_data )
2021-02-12 08:20:45 +01:00
default_stream = self . make_stream ( " default_stream " , realm = realm )
2020-08-08 18:30:28 +02:00
do_add_default_stream ( default_stream )
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
2020-08-08 18:30:28 +02:00
}
result = self . client_patch ( f " /json/streams/ { default_stream . id } " , params )
self . assert_json_error ( result , " Default streams cannot be made private. " )
self . assertFalse ( default_stream . invite_only )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2020-07-13 16:13:28 +02:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
2020-07-13 16:13:28 +02:00
}
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " public_stream_2 " )
2020-07-13 16:13:28 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assertFalse ( stream . invite_only )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2021-02-12 08:20:45 +01:00
sub = get_subscription ( " public_stream_2 " , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " public_stream_2 " , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( stream . invite_only )
2018-04-27 01:00:26 +02:00
self . assertFalse ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Public** to **Private, protected history**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
history_public_to_subscribers_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert history_public_to_subscribers_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : True ,
RealmAuditLog . NEW_VALUE : False ,
" property " : " history_public_to_subscribers " ,
}
) . decode ( )
self . assertEqual ( history_public_to_subscribers_log . extra_data , expected_extra_data )
invite_only_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . order_by ( " -id " ) [ 1 ]
assert invite_only_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( invite_only_log . extra_data , expected_extra_data )
2021-05-20 10:50:17 +02:00
def test_create_web_public_stream ( self ) - > None :
user_profile = self . example_user ( " hamlet " )
owner = self . example_user ( " desdemona " )
stream_names = [ " new1 " , " new2 " , " new3 " ]
stream_descriptions = [ " des1 " , " des2 " , " des3 " ]
streams_raw : List [ StreamDict ] = [
{ " name " : stream_name , " description " : stream_description , " is_web_public " : True }
for ( stream_name , stream_description ) in zip ( stream_names , stream_descriptions )
]
2021-10-04 09:03:01 +02:00
self . assertFalse ( user_profile . can_create_web_public_streams ( ) )
self . assertTrue ( owner . can_create_web_public_streams ( ) )
2021-10-04 09:56:16 +02:00
# As per create_web_public_stream_policy, only owners can create web-public streams by default.
with self . assertRaisesRegex ( JsonableError , " Insufficient permission " ) :
2021-05-20 10:50:17 +02:00
list_to_streams (
streams_raw ,
user_profile ,
autocreate = True ,
)
with self . settings ( WEB_PUBLIC_STREAMS_ENABLED = False ) :
2021-10-04 09:03:01 +02:00
self . assertFalse ( user_profile . can_create_web_public_streams ( ) )
self . assertFalse ( owner . can_create_web_public_streams ( ) )
2022-01-29 00:54:13 +01:00
with self . assertRaisesRegex ( JsonableError , " Web-public streams are not enabled. " ) :
2021-05-20 10:50:17 +02:00
list_to_streams (
streams_raw ,
owner ,
autocreate = True ,
)
existing_streams , new_streams = list_to_streams (
streams_raw ,
owner ,
autocreate = True ,
)
self . assert_length ( new_streams , 3 )
self . assert_length ( existing_streams , 0 )
actual_stream_names = { stream . name for stream in new_streams }
self . assertEqual ( actual_stream_names , set ( stream_names ) )
actual_stream_descriptions = { stream . description for stream in new_streams }
self . assertEqual ( actual_stream_descriptions , set ( stream_descriptions ) )
for stream in new_streams :
self . assertTrue ( stream . is_web_public )
2018-04-27 01:00:26 +02:00
def test_make_stream_public_zephyr_mirror ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . mit_user ( " starnine " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
self . make_stream ( " target_stream " , realm = realm , invite_only = True )
self . subscribe ( user_profile , " target_stream " )
2018-04-27 01:00:26 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2018-04-27 01:00:26 +02:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( False ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " target_stream " , realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params , subdomain = " zephyr " )
2018-04-27 01:00:26 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " target_stream " , realm )
2018-04-27 01:00:26 +02:00
self . assertFalse ( stream . invite_only )
self . assertFalse ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Private, protected history** to **Public, protected history**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : True ,
RealmAuditLog . NEW_VALUE : False ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2018-04-27 01:00:26 +02:00
def test_make_stream_private_with_public_history ( self ) - > None :
2021-12-13 23:57:37 +01:00
# Convert a public stream to a private stream with shared history
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_history_stream " , realm = realm )
2018-04-27 01:00:26 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2018-04-27 01:00:26 +02:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
2021-12-11 00:41:25 +01:00
stream_id = self . subscribe ( user_profile , " public_history_stream " ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2018-04-27 01:00:26 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " public_history_stream " , realm )
2018-04-27 01:00:26 +02:00
self . assertTrue ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Public** to **Private, shared history**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " invite_only " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
# Convert a private stream with protected history to a private stream
# with shared history.
self . make_stream (
" private_protected_stream " ,
realm = realm ,
invite_only = True ,
history_public_to_subscribers = False ,
)
params = {
" is_private " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
stream_id = self . subscribe ( user_profile , " private_protected_stream " ) . id
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
self . assert_json_success ( result )
stream = get_stream ( " private_protected_stream " , realm )
self . assertTrue ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Private, protected history** to **Private, shared history**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " history_public_to_subscribers " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2020-11-10 14:15:04 +01:00
def test_make_stream_web_public ( self ) - > None :
user_profile = self . example_user ( " hamlet " )
self . login_user ( user_profile )
realm = user_profile . realm
self . make_stream ( " test_stream " , realm = realm )
2021-12-11 00:41:25 +01:00
stream_id = self . subscribe ( user_profile , " test_stream " ) . id
2020-11-10 14:15:04 +01:00
params = {
" is_web_public " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2021-11-23 11:30:08 +01:00
do_set_realm_property (
realm , " create_web_public_stream_policy " , Realm . POLICY_OWNERS_ONLY , acting_user = None
)
2020-11-10 14:15:04 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2021-11-23 11:30:08 +01:00
self . assert_json_error ( result , " Insufficient permission " )
do_set_realm_property (
realm , " create_web_public_stream_policy " , Realm . POLICY_NOBODY , acting_user = None
)
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_OWNER , acting_user = None )
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
self . assert_json_error ( result , " Insufficient permission " )
2020-11-10 14:15:04 +01:00
2021-11-23 11:30:08 +01:00
do_set_realm_property (
realm , " create_web_public_stream_policy " , Realm . POLICY_OWNERS_ONLY , acting_user = None
)
2020-11-10 14:15:04 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_OWNER , acting_user = None )
with self . settings ( WEB_PUBLIC_STREAMS_ENABLED = False ) :
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2022-01-29 00:54:13 +01:00
self . assert_json_error ( result , " Web-public streams are not enabled. " )
2020-11-10 14:15:04 +01:00
bad_params = {
" is_web_public " : orjson . dumps ( True ) . decode ( ) ,
" is_private " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream_id } " , bad_params )
self . assert_json_error ( result , " Invalid parameters " )
bad_params = {
" is_web_public " : orjson . dumps ( True ) . decode ( ) ,
" is_private " : orjson . dumps ( False ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( False ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream_id } " , bad_params )
self . assert_json_error ( result , " Invalid parameters " )
stream = get_stream ( " test_stream " , realm )
self . assertFalse ( stream . is_web_public )
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
self . assert_json_success ( result )
stream = get_stream ( " test_stream " , realm )
self . assertTrue ( stream . is_web_public )
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
2022-01-29 00:54:13 +01:00
" for this stream from **Public** to **Web-public**. "
2021-12-11 00:41:25 +01:00
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-13 23:57:37 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : False ,
RealmAuditLog . NEW_VALUE : True ,
" property " : " is_web_public " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2022-03-23 05:09:26 +01:00
def test_stream_permission_changes_updates_updates_attachments ( self ) - > None :
self . login ( " desdemona " )
fp = StringIO ( " zulip! " )
fp . name = " zulip.txt "
result = self . client_post ( " /json/user_uploads " , { " file " : fp } )
uri = result . json ( ) [ " uri " ]
self . assert_json_success ( result )
owner = self . example_user ( " desdemona " )
realm = owner . realm
stream = self . make_stream ( " test_stream " , realm = realm )
self . subscribe ( owner , " test_stream " )
body = f " First message ...[zulip.txt](http:// { realm . host } " + uri + " ) "
msg_id = self . send_stream_message ( owner , " test_stream " , body , " test " )
attachment = Attachment . objects . get ( messages__id = msg_id )
self . assertFalse ( stream . is_web_public )
self . assertFalse ( attachment . is_web_public )
self . assertFalse ( stream . invite_only )
self . assertTrue ( attachment . is_realm_public )
params = {
" is_private " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
attachment . refresh_from_db ( )
stream . refresh_from_db ( )
self . assertFalse ( stream . is_web_public )
self . assertFalse ( attachment . is_web_public )
self . assertTrue ( stream . invite_only )
self . assertIsNone ( attachment . is_realm_public )
cordelia = self . example_user ( " cordelia " )
self . assertFalse ( validate_attachment_request ( cordelia , attachment . path_id ) )
self . assertTrue ( validate_attachment_request ( owner , attachment . path_id ) )
attachment . refresh_from_db ( )
self . assertFalse ( attachment . is_realm_public )
self . assertFalse ( validate_attachment_request_for_spectator_access ( realm , attachment ) )
params = {
" is_private " : orjson . dumps ( False ) . decode ( ) ,
" is_web_public " : orjson . dumps ( True ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
attachment . refresh_from_db ( )
stream . refresh_from_db ( )
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . is_web_public )
self . assertIsNone ( attachment . is_realm_public )
self . assertIsNone ( attachment . is_web_public )
self . assertTrue ( validate_attachment_request_for_spectator_access ( realm , attachment ) )
attachment . refresh_from_db ( )
self . assertTrue ( attachment . is_web_public )
self . assertIsNone ( attachment . is_realm_public )
self . assertTrue ( validate_attachment_request ( cordelia , attachment . path_id ) )
attachment . refresh_from_db ( )
self . assertTrue ( attachment . is_realm_public )
params = {
" is_private " : orjson . dumps ( False ) . decode ( ) ,
" is_web_public " : orjson . dumps ( False ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( True ) . decode ( ) ,
}
result = self . client_patch ( f " /json/streams/ { stream . id } " , params )
self . assert_json_success ( result )
attachment . refresh_from_db ( )
stream . refresh_from_db ( )
self . assertIsNone ( attachment . is_web_public )
self . assertFalse ( stream . invite_only )
self . assertTrue ( attachment . is_realm_public )
self . assertFalse ( validate_attachment_request_for_spectator_access ( realm , attachment ) )
attachment . refresh_from_db ( )
stream . refresh_from_db ( )
self . assertFalse ( attachment . is_web_public )
# Verify moving a message to another public stream doesn't reset cache.
new_stream = self . make_stream ( " new_stream " , realm = realm )
self . subscribe ( owner , " new_stream " )
result = self . client_patch (
" /json/messages/ " + str ( msg_id ) ,
{
" stream_id " : new_stream . id ,
" propagate_mode " : " change_all " ,
} ,
)
self . assert_json_success ( result )
attachment . refresh_from_db ( )
self . assertFalse ( attachment . is_web_public )
self . assertTrue ( attachment . is_realm_public )
# Verify moving a message to a private stream
private_stream = self . make_stream ( " private_stream " , realm = realm , invite_only = True )
self . subscribe ( owner , " private_stream " )
result = self . client_patch (
" /json/messages/ " + str ( msg_id ) ,
{
" stream_id " : private_stream . id ,
" propagate_mode " : " change_all " ,
} ,
)
self . assert_json_success ( result )
attachment . refresh_from_db ( )
self . assertFalse ( attachment . is_web_public )
self . assertIsNone ( attachment . is_realm_public )
self . assertFalse ( validate_attachment_request ( cordelia , attachment . path_id ) )
self . assertTrue ( validate_attachment_request ( owner , attachment . path_id ) )
attachment . refresh_from_db ( )
self . assertFalse ( attachment . is_realm_public )
# Verify moving a message to a web-public stream
web_public_stream = self . make_stream ( " web_public_stream " , realm = realm , is_web_public = True )
result = self . client_patch (
" /json/messages/ " + str ( msg_id ) ,
{
" stream_id " : web_public_stream . id ,
" propagate_mode " : " change_all " ,
} ,
)
self . assert_json_success ( result )
attachment . refresh_from_db ( )
self . assertIsNone ( attachment . is_web_public )
self . assertIsNone ( attachment . is_realm_public )
self . assertTrue ( validate_attachment_request_for_spectator_access ( realm , attachment ) )
attachment . refresh_from_db ( )
self . assertTrue ( attachment . is_web_public )
2018-04-27 01:00:26 +02:00
def test_try_make_stream_public_with_private_history ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-04-27 01:00:26 +02:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream " , realm = realm )
2018-04-27 01:00:26 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2018-04-27 01:00:26 +02:00
params = {
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( False ) . decode ( ) ,
" history_public_to_subscribers " : orjson . dumps ( False ) . decode ( ) ,
2018-04-27 01:00:26 +02:00
}
2021-12-11 00:41:25 +01:00
stream_id = self . subscribe ( user_profile , " public_stream " ) . id
2020-06-13 08:59:37 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , params )
2018-04-27 01:00:26 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " public_stream " , realm )
2018-04-27 01:00:26 +02:00
self . assertFalse ( stream . invite_only )
self . assertTrue ( stream . history_public_to_subscribers )
2014-01-29 22:03:40 +01:00
2021-12-11 00:41:25 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
# This test verifies the (weird) outcome for a transition that
# is not currently possible. For background, we only support
# public streams with private history if
# is_zephyr_mirror_realm, and don't allow changing stream
# permissions in such realms. So changing the
# history_public_to_subscribers property of a public stream is
# not possible in Zulip today; this test covers that situation
# and will produce the odd/wrong output of "Public to Public".
#
# This test should be corrected if we add support for such a
# stream configuration transition.
expected_notification = (
f " @_**King Hamlet| { user_profile . id } ** changed the [access permissions](/help/stream-permissions) "
" for this stream from **Public** to **Public**. "
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-03-27 03:01:37 +01:00
def test_subscriber_ids_with_stream_history_access ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
polonius = self . example_user ( " polonius " )
stream1 = self . make_stream (
" history_private_stream " , invite_only = True , history_public_to_subscribers = False
)
self . subscribe ( hamlet , stream1 . name )
self . subscribe ( polonius , stream1 . name )
self . assertEqual ( set ( ) , subscriber_ids_with_stream_history_access ( stream1 ) )
stream2 = self . make_stream (
" history_public_web_private_stream " ,
invite_only = True ,
is_web_public = False ,
history_public_to_subscribers = True ,
)
self . subscribe ( hamlet , stream2 . name )
self . subscribe ( polonius , stream2 . name )
2021-04-17 18:24:02 +02:00
self . assertEqual (
{ hamlet . id , polonius . id } , subscriber_ids_with_stream_history_access ( stream2 )
)
2021-03-27 03:01:37 +01:00
stream3 = self . make_stream (
" history_public_web_public_stream " ,
is_web_public = True ,
history_public_to_subscribers = True ,
)
self . subscribe ( hamlet , stream3 . name )
self . subscribe ( polonius , stream3 . name )
self . assertEqual (
{ hamlet . id , polonius . id } , subscriber_ids_with_stream_history_access ( stream3 )
)
2021-04-17 18:24:02 +02:00
stream4 = self . make_stream (
" regular_public_stream " ,
)
self . subscribe ( hamlet , stream4 . name )
self . subscribe ( polonius , stream4 . name )
self . assertEqual (
{ hamlet . id , polonius . id } , subscriber_ids_with_stream_history_access ( stream4 )
)
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_backend ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " new_stream_1 " )
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , stream . name )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/streams/ { stream . id } " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
subscription_exists = (
2021-04-17 13:29:55 +02:00
get_active_subscriptions_for_stream_id ( stream . id , include_deactivated_users = True )
2021-02-12 08:19:30 +01:00
. filter (
user_profile = user_profile ,
)
. exists ( )
)
2014-01-29 22:03:40 +01:00
self . assertFalse ( subscription_exists )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " new_stream_2 " )
2020-07-13 16:13:28 +02:00
self . subscribe ( user_profile , stream . name )
sub = get_subscription ( stream . name , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/streams/ { stream . id } " )
2020-07-13 16:13:28 +02:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
subscription_exists = (
2021-04-17 13:29:55 +02:00
get_active_subscriptions_for_stream_id ( stream . id , include_deactivated_users = True )
2021-02-12 08:19:30 +01:00
. filter (
user_profile = user_profile ,
)
. exists ( )
)
2020-07-13 16:13:28 +02:00
self . assertFalse ( subscription_exists )
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_removes_default_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " new_stream " )
2017-02-18 18:01:00 +01:00
do_add_default_stream ( stream )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 1 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2021-04-02 17:49:36 +02:00
do_deactivate_stream ( stream , acting_user = None )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 0 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-02-18 18:01:00 +01:00
2020-04-26 21:42:31 +02:00
def test_deactivate_stream_removes_stream_from_default_stream_groups ( self ) - > None :
2021-02-12 08:20:45 +01:00
realm = get_realm ( " zulip " )
2020-04-26 21:42:31 +02:00
streams_to_keep = [ ]
for stream_name in [ " stream1 " , " stream2 " ] :
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , stream_name , acting_user = None )
2020-04-26 21:42:31 +02:00
streams_to_keep . append ( stream )
streams_to_remove = [ ]
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , " stream3 " , acting_user = None )
2020-04-26 21:42:31 +02:00
streams_to_remove . append ( stream )
all_streams = streams_to_keep + streams_to_remove
def get_streams ( group : DefaultStreamGroup ) - > List [ Stream ] :
2021-02-12 08:20:45 +01:00
return list ( group . streams . all ( ) . order_by ( " name " ) )
2020-04-26 21:42:31 +02:00
group_name = " group1 "
description = " This is group1 "
do_create_default_stream_group ( realm , group_name , description , all_streams )
default_stream_groups = get_default_stream_groups ( realm )
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , all_streams )
2021-04-02 17:49:36 +02:00
do_deactivate_stream ( streams_to_remove [ 0 ] , acting_user = None )
2020-04-26 21:42:31 +02:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams_to_keep )
2020-08-06 18:21:42 +02:00
def test_deactivate_stream_marks_messages_as_read ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
cordelia = self . example_user ( " cordelia " )
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " new_stream " )
2020-08-06 18:21:42 +02:00
self . subscribe ( hamlet , stream . name )
self . subscribe ( cordelia , stream . name )
self . subscribe ( hamlet , " Denmark " )
self . subscribe ( cordelia , " Denmark " )
self . send_stream_message ( hamlet , stream . name )
new_stream_usermessage = most_recent_usermessage ( cordelia )
# We send a message to a different stream too, to verify that the
# deactivation of new_stream won't corrupt read state of UserMessage elsewhere.
self . send_stream_message ( hamlet , " Denmark " )
denmark_usermessage = most_recent_usermessage ( cordelia )
self . assertFalse ( new_stream_usermessage . flags . read )
self . assertFalse ( denmark_usermessage . flags . read )
2021-12-12 20:21:02 +01:00
with self . captureOnCommitCallbacks ( execute = True ) :
do_deactivate_stream ( stream , acting_user = None )
2020-08-06 18:21:42 +02:00
new_stream_usermessage . refresh_from_db ( )
denmark_usermessage . refresh_from_db ( )
self . assertTrue ( new_stream_usermessage . flags . read )
self . assertFalse ( denmark_usermessage . flags . read )
2017-11-05 10:51:25 +01:00
def test_vacate_private_stream_removes_default_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " new_stream " , invite_only = True )
2017-08-25 06:01:29 +02:00
self . subscribe ( self . example_user ( " hamlet " ) , stream . name )
2017-06-04 19:32:41 +02:00
do_add_default_stream ( stream )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 1 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-08-25 06:23:11 +02:00
self . unsubscribe ( self . example_user ( " hamlet " ) , stream . name )
2017-09-17 19:53:38 +02:00
self . assertEqual ( 0 , DefaultStream . objects . filter ( stream_id = stream . id ) . count ( ) )
2017-06-04 19:32:41 +02:00
# Fetch stream again from database.
stream = Stream . objects . get ( id = stream . id )
self . assertTrue ( stream . deactivated )
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_backend_requires_existing_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
self . make_stream ( " new_stream " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2016-07-16 18:50:41 +02:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( " /json/streams/999999999 " )
self . assert_json_error ( result , " Invalid stream id " )
2016-07-16 18:50:41 +02:00
2020-07-13 16:13:28 +02:00
def test_deactivate_stream_backend_requires_admin ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " new_stream " )
sub = get_subscription ( " new_stream " , user_profile )
2020-07-13 16:13:28 +02:00
self . assertFalse ( sub . is_stream_admin )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/streams/ { stream . id } " )
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_private_stream_live_updates ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-11-04 07:02:24 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2016-11-04 07:02:24 +01:00
2021-02-12 08:20:45 +01:00
self . make_stream ( " private_stream " , invite_only = True )
self . subscribe ( user_profile , " private_stream " )
self . subscribe ( self . example_user ( " cordelia " ) , " private_stream " )
2016-11-04 07:02:24 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-12-14 20:08:48 +01:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " private_stream " , user_profile . realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-12 19:50:08 +02:00
{ " description " : " Test description " } ,
2021-02-12 08:19:30 +01:00
)
2016-11-04 07:02:24 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
cordelia = self . example_user ( " cordelia " )
prospero = self . example_user ( " prospero " )
2016-11-04 07:02:24 +01:00
2021-12-14 20:08:48 +01:00
notified_user_ids = set ( events [ 0 ] [ " users " ] )
2016-11-04 07:02:24 +01:00
self . assertIn ( user_profile . id , notified_user_ids )
self . assertIn ( cordelia . id , notified_user_ids )
self . assertNotIn ( prospero . id , notified_user_ids )
2021-05-27 15:53:22 +02:00
# Three events should be sent: a name event, an email address event and a notification event
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " private_stream " , user_profile . realm ) . id
2021-05-10 19:42:11 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " whatever " } )
2016-11-04 07:02:24 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
notified_user_ids = set ( events [ 0 ] [ " users " ] )
2016-11-04 07:02:24 +01:00
self . assertIn ( user_profile . id , notified_user_ids )
self . assertIn ( cordelia . id , notified_user_ids )
self . assertNotIn ( prospero . id , notified_user_ids )
2021-02-12 08:20:45 +01:00
notified_with_bot_users = events [ - 1 ] [ " users " ]
2019-01-05 12:47:38 +01:00
notified_with_bot_user_ids = [ ]
2021-02-12 08:20:45 +01:00
notified_with_bot_user_ids . append ( notified_with_bot_users [ 0 ] [ " id " ] )
notified_with_bot_user_ids . append ( notified_with_bot_users [ 1 ] [ " id " ] )
2019-01-05 12:47:38 +01:00
self . assertIn ( user_profile . id , notified_with_bot_user_ids )
self . assertIn ( cordelia . id , notified_with_bot_user_ids )
self . assertNotIn ( prospero . id , notified_with_bot_user_ids )
2017-11-05 10:51:25 +01:00
def test_rename_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " stream_name1 " } )
2017-01-30 04:44:40 +01:00
self . assert_json_error ( result , " Stream already has that name! " )
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " Denmark " } )
2018-01-08 19:54:19 +01:00
self . assert_json_error ( result , " Stream name ' Denmark ' is already taken. " )
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " denmark " } )
2018-01-08 19:54:19 +01:00
self . assert_json_error ( result , " Stream name ' denmark ' is already taken. " )
2017-01-30 04:44:40 +01:00
2017-02-01 23:20:46 +01:00
# Do a rename that is case-only--this should succeed.
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " sTREAm_name1 " } )
2017-02-01 23:20:46 +01:00
self . assert_json_success ( result )
2021-05-28 10:47:43 +02:00
# Three events should be sent: stream_email update, stream_name update and notification message.
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , user_profile . realm ) . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " stream_name2 " } )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
event = events [ 1 ] [ " event " ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
event ,
dict (
2021-02-12 08:20:45 +01:00
op = " update " ,
type = " stream " ,
property = " name " ,
value = " stream_name2 " ,
2021-02-12 08:19:30 +01:00
stream_id = stream_id ,
2021-02-12 08:20:45 +01:00
name = " sTREAm_name1 " ,
2021-02-12 08:19:30 +01:00
) ,
)
2021-02-12 08:20:45 +01:00
notified_user_ids = set ( events [ 1 ] [ " users " ] )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
self . assertRaises ( Stream . DoesNotExist , get_stream , " stream_name1 " , realm )
2017-03-23 07:22:28 +01:00
2021-02-12 08:20:45 +01:00
stream_name2_exists = get_stream ( " stream_name2 " , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( stream_name2_exists )
2018-06-03 19:11:52 +02:00
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
2021-02-12 08:19:30 +01:00
self . assertIn ( user_profile . id , notified_user_ids )
2021-02-12 08:20:45 +01:00
self . assertIn ( self . example_user ( " prospero " ) . id , notified_user_ids )
self . assertNotIn ( self . example_user ( " polonius " ) . id , notified_user_ids )
2016-11-04 07:02:24 +01:00
2020-10-23 02:43:28 +02:00
# Test case to handle Unicode stream name change
# *NOTE: Here encoding is needed when Unicode string is passed as an argument*
2021-05-28 07:59:38 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name2_exists . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " नया नाम " } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2020-10-23 02:43:28 +02:00
# While querying, system can handle Unicode strings.
2021-02-12 08:20:45 +01:00
stream_name_uni_exists = get_stream ( " नया नाम " , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_uni_exists )
2020-10-23 02:43:28 +02:00
# Test case to handle changing of Unicode stream name to newer name
2016-09-28 09:07:09 +02:00
# NOTE: Unicode string being part of URL is handled cleanly
# by client_patch call, encoding of URL is not needed.
2021-05-28 07:59:38 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_uni_exists . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-08 18:41:54 +02:00
{ " new_name " : " नाम में क्या रक्खा हे " } ,
2021-02-12 08:19:30 +01:00
)
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2020-10-23 02:43:28 +02:00
# While querying, system can handle Unicode strings.
2021-02-12 08:20:45 +01:00
self . assertRaises ( Stream . DoesNotExist , get_stream , " नया नाम " , realm )
2017-03-23 07:22:28 +01:00
2021-02-12 08:20:45 +01:00
stream_name_new_uni_exists = get_stream ( " नाम में क्या रक्खा हे " , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_new_uni_exists )
# Test case to change name from one language to other.
2021-05-28 07:59:38 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_new_uni_exists . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " français " } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream_name_fr_exists = get_stream ( " français " , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_fr_exists )
# Test case to change name to mixed language name.
2021-05-28 07:59:38 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2016-12-30 11:42:59 +01:00
stream_id = stream_name_fr_exists . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " français name " } )
2016-09-28 09:07:09 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream_name_mixed_exists = get_stream ( " français name " , realm )
2016-09-28 09:07:09 +02:00
self . assertTrue ( stream_name_mixed_exists )
2018-04-13 02:42:30 +02:00
# Test case for notified users in private streams.
2021-02-12 08:19:30 +01:00
stream_private = self . make_stream (
2021-02-12 08:20:45 +01:00
" stream_private_name1 " , realm = user_profile . realm , invite_only = True
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . subscribe ( self . example_user ( " cordelia " ) , " stream_private_name1 " )
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_private_name1 " , realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-08 18:41:54 +02:00
{ " new_name " : " stream_private_name2 " } ,
2021-02-12 08:19:30 +01:00
)
2018-04-13 02:42:30 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
notified_user_ids = set ( events [ 1 ] [ " users " ] )
2018-04-13 02:42:30 +02:00
self . assertEqual ( notified_user_ids , can_access_stream_user_ids ( stream_private ) )
2021-02-12 08:20:45 +01:00
self . assertIn ( self . example_user ( " cordelia " ) . id , notified_user_ids )
2018-04-13 02:42:30 +02:00
# An important corner case is that all organization admins are notified.
2021-02-12 08:20:45 +01:00
self . assertIn ( self . example_user ( " iago " ) . id , notified_user_ids )
2018-04-13 02:42:30 +02:00
# The current user, Hamlet was made an admin and thus should be notified too.
self . assertIn ( user_profile . id , notified_user_ids )
2021-02-12 08:20:45 +01:00
self . assertNotIn ( self . example_user ( " prospero " ) . id , notified_user_ids )
2018-04-13 02:42:30 +02:00
2020-07-13 16:13:28 +02:00
# Test renaming of stream by stream admin.
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-02-12 08:20:45 +01:00
new_stream = self . make_stream ( " new_stream " , realm = user_profile . realm )
self . subscribe ( user_profile , " new_stream " )
sub = get_subscription ( " new_stream " , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
new_stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { new_stream . id } " ,
2021-05-08 18:41:54 +02:00
{ " new_name " : " stream_rename " } ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream_rename_exists = get_stream ( " stream_rename " , realm )
2020-07-13 16:13:28 +02:00
self . assertTrue ( stream_rename_exists )
def test_rename_stream_requires_admin ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
self . make_stream ( " stream_name1 " )
self . subscribe ( user_profile , " stream_name1 " )
2020-07-13 16:13:28 +02:00
2021-02-12 08:20:45 +01:00
sub = get_subscription ( " stream_name1 " , user_profile )
2020-07-13 16:13:28 +02:00
self . assertFalse ( sub . is_stream_admin )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , user_profile . realm ) . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " stream_name2 " } )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2014-01-29 22:03:40 +01:00
2019-01-05 12:47:38 +01:00
def test_notify_on_stream_rename ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2021-02-12 08:20:45 +01:00
self . make_stream ( " stream_name1 " )
2019-01-05 12:47:38 +01:00
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " stream_name2 " } )
2019-01-05 12:47:38 +01:00
self . assert_json_success ( result )
# Inspect the notification message sent
message = self . get_last_message ( )
actual_stream = Stream . objects . get ( id = message . recipient . type_id )
2021-02-12 08:20:45 +01:00
message_content = f " @_**King Hamlet| { user_profile . id } ** renamed stream **stream_name1** to **stream_name2**. "
self . assertEqual ( actual_stream . name , " stream_name2 " )
2019-07-24 07:38:56 +02:00
self . assertEqual ( actual_stream . realm_id , user_profile . realm_id )
2019-01-05 12:47:38 +01:00
self . assertEqual ( message . recipient . type , Recipient . STREAM )
self . assertEqual ( message . content , message_content )
2021-02-12 08:20:45 +01:00
self . assertEqual ( message . sender . email , " notification-bot@zulip.com " )
2019-07-24 07:38:56 +02:00
self . assertEqual ( message . sender . realm , get_realm ( settings . SYSTEM_BOT_REALM ) )
2019-01-05 12:47:38 +01:00
2018-02-12 16:02:19 +01:00
def test_realm_admin_can_update_unsub_private_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
iago = self . example_user ( " iago " )
hamlet = self . example_user ( " hamlet " )
2020-03-12 14:17:25 +01:00
2020-03-06 18:40:46 +01:00
self . login_user ( iago )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
iago ,
[ " private_stream " ] ,
dict ( principals = orjson . dumps ( [ hamlet . id ] ) . decode ( ) ) ,
invite_only = True ,
)
2018-02-12 16:02:19 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " private_stream " , iago . realm ) . id
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " new_private_stream " } )
2018-02-12 16:02:19 +01:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-12 19:50:08 +02:00
{ " description " : " new description " } ,
2021-02-12 08:19:30 +01:00
)
2018-02-12 16:02:19 +01:00
self . assert_json_success ( result )
2018-04-03 00:36:31 +02:00
# But cannot change stream type.
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
} ,
)
2018-02-12 16:02:19 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2020-07-13 16:13:28 +02:00
def test_non_admin_cannot_access_unsub_private_stream ( self ) - > None :
2021-02-12 08:20:45 +01:00
iago = self . example_user ( " iago " )
hamlet = self . example_user ( " hamlet " )
2020-07-13 16:13:28 +02:00
self . login_user ( hamlet )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
hamlet ,
[ " private_stream_1 " ] ,
dict ( principals = orjson . dumps ( [ iago . id ] ) . decode ( ) ) ,
invite_only = True ,
)
2020-07-13 16:13:28 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " private_stream_1 " , hamlet . realm ) . id
2020-07-13 16:13:28 +02:00
2021-05-08 18:41:54 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " new_name " : " private_stream_2 " } )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-12 19:50:08 +02:00
{ " description " : " new description " } ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" is_private " : orjson . dumps ( True ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
} ,
)
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/streams/ { stream_id } " )
2020-07-13 16:13:28 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2017-11-05 10:51:25 +01:00
def test_change_stream_description ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " iago " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
self . subscribe ( user_profile , " stream_name1 " )
2014-01-29 22:03:40 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-12-14 20:08:48 +01:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-10 19:42:11 +02:00
{ " description " : " Test description " } ,
2021-02-12 08:19:30 +01:00
)
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
event = events [ 0 ] [ " event " ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
event ,
dict (
2021-02-12 08:20:45 +01:00
op = " update " ,
type = " stream " ,
property = " description " ,
value = " Test description " ,
rendered_description = " <p>Test description</p> " ,
2021-02-12 08:19:30 +01:00
stream_id = stream_id ,
2021-02-12 08:20:45 +01:00
name = " stream_name1 " ,
2021-02-12 08:19:30 +01:00
) ,
)
2021-02-12 08:20:45 +01:00
notified_user_ids = set ( events [ 0 ] [ " users " ] )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
2018-06-03 19:11:52 +02:00
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
2021-02-12 08:19:30 +01:00
self . assertIn ( user_profile . id , notified_user_ids )
2021-02-12 08:20:45 +01:00
self . assertIn ( self . example_user ( " prospero " ) . id , notified_user_ids )
self . assertNotIn ( self . example_user ( " polonius " ) . id , notified_user_ids )
self . assertEqual ( " Test description " , stream . description )
2014-01-29 22:03:40 +01:00
2021-05-10 19:42:11 +02:00
result = self . client_patch ( f " /json/streams/ { stream_id } " , { " description " : " a " * 1025 } )
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " description is too long (limit: { Stream . MAX_DESCRIPTION_LENGTH } characters) " ,
)
2018-04-30 08:59:51 +02:00
2022-03-29 08:25:03 +02:00
result = self . client_patch (
f " /json/streams/ { stream_id } " ,
{ " description " : " " } ,
)
self . assert_json_success ( result )
stream = get_stream ( " stream_name1 " , realm )
self . assertEqual ( stream . description , " " )
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the description for this stream. \n \n "
" * **Old description:** \n "
" ```` quote \n "
" Test description \n "
" ```` \n "
" * **New description:** \n "
" ```` quote \n "
" *No description.* \n "
" ```` "
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
result = self . client_patch (
f " /json/streams/ { stream_id } " ,
{ " description " : " Test description " } ,
)
self . assert_json_success ( result )
stream = get_stream ( " stream_name1 " , realm )
self . assertEqual ( stream . description , " Test description " )
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the description for this stream. \n \n "
" * **Old description:** \n "
" ```` quote \n "
" *No description.* \n "
" ```` \n "
" * **New description:** \n "
" ```` quote \n "
" Test description \n "
" ```` "
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-10 19:42:11 +02:00
{ " description " : " a \n multi \n line \n description " } ,
2021-02-12 08:19:30 +01:00
)
2019-02-20 21:09:21 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
self . assertEqual ( stream . description , " a multi line description " )
2019-02-20 21:09:21 +01:00
2021-12-14 20:08:48 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
2022-01-08 01:54:42 +01:00
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the description for this stream. \n \n "
2022-01-14 04:42:59 +01:00
" * **Old description:** \n "
2022-03-09 22:55:07 +01:00
" ```` quote \n "
2021-12-14 20:08:48 +01:00
" Test description \n "
2022-03-09 22:55:07 +01:00
" ```` \n "
2022-01-14 04:42:59 +01:00
" * **New description:** \n "
2022-03-09 22:55:07 +01:00
" ```` quote \n "
2021-12-14 20:08:48 +01:00
" a multi line description \n "
2022-03-09 22:55:07 +01:00
" ```` "
2021-12-14 20:08:48 +01:00
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2021-12-14 21:00:45 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : " Test description " ,
RealmAuditLog . NEW_VALUE : " a multi line description " ,
" property " : " description " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2019-03-01 02:00:40 +01:00
# Verify that we don't render inline URL previews in this code path.
with self . settings ( INLINE_URL_EMBED_PREVIEW = True ) :
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " ,
2021-05-10 19:42:11 +02:00
{ " description " : " See https://zulip.com/team " } ,
2021-02-12 08:19:30 +01:00
)
2019-03-01 02:00:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
2020-05-09 03:44:56 +02:00
self . assertEqual (
stream . rendered_description ,
2020-06-08 23:04:39 +02:00
' <p>See <a href= " https://zulip.com/team " >https://zulip.com/team</a></p> ' ,
2020-05-09 03:44:56 +02:00
)
2019-03-01 02:00:40 +01:00
2020-07-13 16:13:28 +02:00
# Test changing stream description by stream admin.
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-02-12 08:20:45 +01:00
sub = get_subscription ( " stream_name1 " , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
2021-12-14 20:08:48 +01:00
stream_id = get_stream ( " stream_name1 " , realm ) . id
result = self . client_patch (
f " /json/streams/ { stream_id } " ,
{ " description " : " Test description " } ,
)
2020-07-13 16:13:28 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
self . assertEqual ( stream . description , " Test description " )
2020-07-13 16:13:28 +02:00
2021-12-14 20:08:48 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
2022-01-08 01:54:42 +01:00
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the description for this stream. \n \n "
2022-01-14 04:42:59 +01:00
" * **Old description:** \n "
2022-03-09 22:55:07 +01:00
" ```` quote \n "
2021-12-14 20:08:48 +01:00
" See https://zulip.com/team \n "
2022-03-09 22:55:07 +01:00
" ```` \n "
2022-01-14 04:42:59 +01:00
" * **New description:** \n "
2022-03-09 22:55:07 +01:00
" ```` quote \n "
2021-12-14 20:08:48 +01:00
" Test description \n "
2022-03-09 22:55:07 +01:00
" ```` "
2021-12-14 20:08:48 +01:00
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2021-12-14 21:00:45 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : " See https://zulip.com/team " ,
RealmAuditLog . NEW_VALUE : " Test description " ,
" property " : " description " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2020-07-13 16:13:28 +02:00
def test_change_stream_description_requires_admin ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
sub = get_subscription ( " stream_name1 " , user_profile )
2020-07-13 16:13:28 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-04-08 02:41:57 +02:00
do_change_subscription_property (
user_profile , sub , stream , " role " , Subscription . ROLE_MEMBER , acting_user = None
)
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , user_profile . realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-05-10 19:42:11 +02:00
f " /json/streams/ { stream_id } " , { " description " : " Test description " }
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2014-01-29 22:03:40 +01:00
2020-02-04 21:50:55 +01:00
def test_change_to_stream_post_policy_admins ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-05-12 07:25:42 +02:00
2021-02-12 08:20:45 +01:00
self . subscribe ( user_profile , " stream_name1 " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2018-05-12 07:25:42 +02:00
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , user_profile . realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " , { " is_announcement_only " : orjson . dumps ( True ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2018-05-12 07:25:42 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , user_profile . realm )
2021-12-15 01:04:35 +01:00
self . assertEqual ( stream . stream_post_policy , Stream . STREAM_POST_POLICY_ADMINS )
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the "
" [posting permissions](/help/stream-sending-policy) for this stream: \n \n "
" * **Old permissions**: All stream members can post. \n "
" * **New permissions**: Only organization administrators can post. "
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2018-05-12 07:25:42 +02:00
2021-12-15 01:27:14 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : Stream . STREAM_POST_POLICY_EVERYONE ,
RealmAuditLog . NEW_VALUE : Stream . STREAM_POST_POLICY_ADMINS ,
" property " : " stream_post_policy " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2020-07-13 16:13:28 +02:00
def test_change_stream_post_policy_requires_admin ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-05-12 07:25:42 +02:00
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
sub = get_subscription ( " stream_name1 " , user_profile )
2020-07-13 16:13:28 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-04-08 02:41:57 +02:00
do_change_subscription_property (
user_profile , sub , stream , " role " , Subscription . ROLE_MEMBER , acting_user = None
)
2018-05-12 07:25:42 +02:00
2021-03-01 11:33:24 +01:00
do_set_realm_property ( user_profile . realm , " waiting_period_threshold " , 10 , acting_user = None )
2020-02-04 21:50:55 +01:00
2020-04-07 20:43:04 +02:00
def test_non_admin ( how_old : int , is_new : bool , policy : int ) - > None :
user_profile . date_joined = timezone_now ( ) - timedelta ( days = how_old )
user_profile . save ( )
2021-02-24 20:39:28 +01:00
self . assertEqual ( user_profile . is_provisional_member , is_new )
2021-02-12 08:20:45 +01:00
stream_id = get_stream ( " stream_name1 " , user_profile . realm ) . id
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream_id } " , { " stream_post_policy " : orjson . dumps ( policy ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2021-02-12 08:19:30 +01:00
policies = [
Stream . STREAM_POST_POLICY_ADMINS ,
2021-03-29 16:01:39 +02:00
Stream . STREAM_POST_POLICY_MODERATORS ,
2021-02-12 08:19:30 +01:00
Stream . STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS ,
]
2020-04-07 20:43:04 +02:00
for policy in policies :
test_non_admin ( how_old = 15 , is_new = False , policy = policy )
test_non_admin ( how_old = 5 , is_new = True , policy = policy )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
for policy in policies :
2021-12-15 01:27:14 +01:00
stream = get_stream ( " stream_name1 " , user_profile . realm )
old_post_policy = stream . stream_post_policy
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-12-15 01:27:14 +01:00
f " /json/streams/ { stream . id } " , { " stream_post_policy " : orjson . dumps ( policy ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-07-13 16:13:28 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , user_profile . realm )
2020-07-13 16:13:28 +02:00
self . assertEqual ( stream . stream_post_policy , policy )
2021-12-15 01:04:35 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the "
" [posting permissions](/help/stream-sending-policy) for this stream: \n \n "
f " * **Old permissions**: { Stream . POST_POLICIES [ old_post_policy ] } . \n "
f " * **New permissions**: { Stream . POST_POLICIES [ policy ] } . "
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2021-12-15 01:27:14 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : old_post_policy ,
RealmAuditLog . NEW_VALUE : policy ,
" property " : " stream_post_policy " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2020-04-07 20:43:04 +02:00
for policy in policies :
2021-12-15 01:27:14 +01:00
stream = get_stream ( " stream_name1 " , user_profile . realm )
old_post_policy = stream . stream_post_policy
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-12-15 01:27:14 +01:00
f " /json/streams/ { stream . id } " , { " stream_post_policy " : orjson . dumps ( policy ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-04-07 20:43:04 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , user_profile . realm )
2020-04-07 20:43:04 +02:00
self . assertEqual ( stream . stream_post_policy , policy )
2018-05-12 07:25:42 +02:00
2021-12-15 01:04:35 +01:00
messages = get_topic_messages ( user_profile , stream , " stream events " )
expected_notification = (
f " @_** { user_profile . full_name } | { user_profile . id } ** changed the "
" [posting permissions](/help/stream-sending-policy) for this stream: \n \n "
f " * **Old permissions**: { Stream . POST_POLICIES [ old_post_policy ] } . \n "
f " * **New permissions**: { Stream . POST_POLICIES [ policy ] } . "
)
self . assertEqual ( messages [ - 1 ] . content , expected_notification )
2021-12-15 01:27:14 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_PROPERTY_CHANGED ,
modified_stream = stream ,
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : old_post_policy ,
RealmAuditLog . NEW_VALUE : policy ,
" property " : " stream_post_policy " ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2021-12-06 18:19:12 +01:00
def test_change_stream_message_retention_days_notifications ( self ) - > None :
user_profile = self . example_user ( " desdemona " )
self . login_user ( user_profile )
realm = user_profile . realm
do_change_realm_plan_type ( realm , Realm . PLAN_TYPE_SELF_HOSTED , acting_user = None )
stream = self . subscribe ( user_profile , " stream_name1 " )
# Go from realm default (forever) to 2 days
result = self . client_patch (
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 2 ) . decode ( ) }
)
self . assert_json_success ( result )
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 1 )
expected_notification = (
2022-02-01 20:00:40 +01:00
f " @_**Desdemona| { user_profile . id } ** has changed the [message retention period](/help/message-retention-policy) for this stream: \n "
" * **Old retention period**: Forever \n "
" * **New retention period**: 2 days \n \n "
" Messages in this stream will now be automatically deleted 2 days after they are sent. "
2021-12-06 18:19:12 +01:00
)
self . assertEqual ( messages [ 0 ] . content , expected_notification )
2021-12-07 20:47:49 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_MESSAGE_RETENTION_DAYS_CHANGED
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{ RealmAuditLog . OLD_VALUE : None , RealmAuditLog . NEW_VALUE : 2 }
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2021-12-06 18:19:12 +01:00
# Go from 2 days to 8 days
result = self . client_patch (
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 8 ) . decode ( ) }
)
self . assert_json_success ( result )
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 2 )
expected_notification = (
2022-02-01 20:00:40 +01:00
f " @_**Desdemona| { user_profile . id } ** has changed the [message retention period](/help/message-retention-policy) for this stream: \n "
" * **Old retention period**: 2 days \n "
" * **New retention period**: 8 days \n \n "
" Messages in this stream will now be automatically deleted 8 days after they are sent. "
2021-12-06 18:19:12 +01:00
)
self . assertEqual ( messages [ 1 ] . content , expected_notification )
2021-12-07 20:47:49 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_MESSAGE_RETENTION_DAYS_CHANGED
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{ RealmAuditLog . OLD_VALUE : 2 , RealmAuditLog . NEW_VALUE : 8 }
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
# Go from 8 days to realm default (None on stream, forever/-1 on realm)
2021-12-06 18:19:12 +01:00
result = self . client_patch (
f " /json/streams/ { stream . id } " ,
{ " message_retention_days " : orjson . dumps ( " realm_default " ) . decode ( ) } ,
)
self . assert_json_success ( result )
messages = get_topic_messages ( user_profile , stream , " stream events " )
self . assert_length ( messages , 3 )
expected_notification = (
2022-02-01 20:00:40 +01:00
f " @_**Desdemona| { user_profile . id } ** has changed the [message retention period](/help/message-retention-policy) for this stream: \n "
" * **Old retention period**: 8 days \n "
" * **New retention period**: Forever \n \n "
" Messages in this stream will now be retained forever. "
2021-12-06 18:19:12 +01:00
)
self . assertEqual ( messages [ 2 ] . content , expected_notification )
2021-12-07 20:47:49 +01:00
realm_audit_log = RealmAuditLog . objects . filter (
event_type = RealmAuditLog . STREAM_MESSAGE_RETENTION_DAYS_CHANGED
) . last ( )
assert realm_audit_log is not None
expected_extra_data = orjson . dumps (
{
RealmAuditLog . OLD_VALUE : 8 ,
RealmAuditLog . NEW_VALUE : None ,
}
) . decode ( )
self . assertEqual ( realm_audit_log . extra_data , expected_extra_data )
2021-12-06 18:19:12 +01:00
2020-06-14 18:57:02 +02:00
def test_change_stream_message_retention_days ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " desdemona " )
2020-06-14 18:57:02 +02:00
self . login_user ( user_profile )
realm = user_profile . realm
2021-12-01 02:10:40 +01:00
do_change_realm_plan_type ( realm , Realm . PLAN_TYPE_LIMITED , acting_user = None )
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
2020-06-14 18:57:02 +02:00
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 2 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2022-02-05 08:29:54 +01:00
self . assert_json_error ( result , " Available on Zulip Cloud Standard. Upgrade to access. " )
2020-06-14 18:57:02 +02:00
2021-12-01 02:10:40 +01:00
do_change_realm_plan_type ( realm , Realm . PLAN_TYPE_SELF_HOSTED , acting_user = None )
2020-06-14 18:57:02 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-12-06 18:19:12 +01:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 2 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
event = events [ 0 ] [ " event " ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
event ,
dict (
2021-02-12 08:20:45 +01:00
op = " update " ,
type = " stream " ,
property = " message_retention_days " ,
2021-02-12 08:19:30 +01:00
value = 2 ,
stream_id = stream . id ,
2021-02-12 08:20:45 +01:00
name = " stream_name1 " ,
2021-02-12 08:19:30 +01:00
) ,
)
2021-02-12 08:20:45 +01:00
notified_user_ids = set ( events [ 0 ] [ " users " ] )
stream = get_stream ( " stream_name1 " , realm )
2020-06-14 18:57:02 +02:00
self . assertEqual ( notified_user_ids , set ( active_non_guest_user_ids ( realm . id ) ) )
self . assertIn ( user_profile . id , notified_user_ids )
2021-02-12 08:20:45 +01:00
self . assertIn ( self . example_user ( " prospero " ) . id , notified_user_ids )
self . assertNotIn ( self . example_user ( " polonius " ) . id , notified_user_ids )
2020-06-14 18:57:02 +02:00
self . assertEqual ( stream . message_retention_days , 2 )
2021-12-06 18:19:12 +01:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " ,
2021-08-02 18:43:08 +02:00
{ " message_retention_days " : orjson . dumps ( " unlimited " ) . decode ( ) } ,
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
event = events [ 0 ] [ " event " ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
event ,
dict (
2021-02-12 08:20:45 +01:00
op = " update " ,
type = " stream " ,
property = " message_retention_days " ,
2021-02-12 08:19:30 +01:00
value = - 1 ,
stream_id = stream . id ,
2021-02-12 08:20:45 +01:00
name = " stream_name1 " ,
2021-02-12 08:19:30 +01:00
) ,
)
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
2020-06-14 18:57:02 +02:00
self . assertEqual ( stream . message_retention_days , - 1 )
2021-12-06 18:19:12 +01:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " ,
{ " message_retention_days " : orjson . dumps ( " realm_default " ) . decode ( ) } ,
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
event = events [ 0 ] [ " event " ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
event ,
dict (
2021-02-12 08:20:45 +01:00
op = " update " ,
type = " stream " ,
property = " message_retention_days " ,
2021-02-12 08:19:30 +01:00
value = None ,
stream_id = stream . id ,
2021-02-12 08:20:45 +01:00
name = " stream_name1 " ,
2021-02-12 08:19:30 +01:00
) ,
)
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
2020-06-14 18:57:02 +02:00
self . assertEqual ( stream . message_retention_days , None )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " ,
{ " message_retention_days " : orjson . dumps ( " invalid " ) . decode ( ) } ,
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : invalid " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( - 1 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : -1 " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 0 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-06-21 10:50:01 +02:00
self . assert_json_error ( result , " Bad value for ' message_retention_days ' : 0 " )
2020-06-14 18:57:02 +02:00
def test_change_stream_message_retention_days_requires_realm_owner ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " iago " )
2020-06-14 18:57:02 +02:00
self . login_user ( user_profile )
realm = user_profile . realm
2021-02-12 08:20:45 +01:00
stream = self . subscribe ( user_profile , " stream_name1 " )
2020-06-14 18:57:02 +02:00
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 2 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_error ( result , " Must be an organization owner " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_OWNER , acting_user = None )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
2021-02-12 08:20:45 +01:00
f " /json/streams/ { stream . id } " , { " message_retention_days " : orjson . dumps ( 2 ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2020-06-14 18:57:02 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
stream = get_stream ( " stream_name1 " , realm )
2020-06-14 18:57:02 +02:00
self . assertEqual ( stream . message_retention_days , 2 )
def test_stream_message_retention_days_on_stream_creation ( self ) - > None :
"""
Only admins can create streams with message_retention_days
with value other than None .
"""
2021-02-12 08:20:45 +01:00
admin = self . example_user ( " iago " )
2020-06-14 18:57:02 +02:00
2021-02-12 08:19:30 +01:00
streams_raw : List [ StreamDict ] = [
{
2021-02-12 08:20:45 +01:00
" name " : " new_stream " ,
" message_retention_days " : 10 ,
2021-05-20 10:50:17 +02:00
" is_web_public " : False ,
2021-02-12 08:19:30 +01:00
}
]
2021-09-21 20:04:51 +02:00
with self . assertRaisesRegex ( JsonableError , " Must be an organization owner " ) :
2020-06-14 18:57:02 +02:00
list_to_streams ( streams_raw , admin , autocreate = True )
2021-02-12 08:19:30 +01:00
streams_raw = [
{
2021-02-12 08:20:45 +01:00
" name " : " new_stream " ,
" message_retention_days " : - 1 ,
2021-05-20 10:50:17 +02:00
" is_web_public " : False ,
2021-02-12 08:19:30 +01:00
}
]
2021-09-21 20:04:51 +02:00
with self . assertRaisesRegex ( JsonableError , " Must be an organization owner " ) :
2020-06-14 18:57:02 +02:00
list_to_streams ( streams_raw , admin , autocreate = True )
2021-02-12 08:19:30 +01:00
streams_raw = [
{
2021-02-12 08:20:45 +01:00
" name " : " new_stream " ,
" message_retention_days " : None ,
2021-05-20 10:50:17 +02:00
" is_web_public " : False ,
2021-02-12 08:19:30 +01:00
}
]
2020-06-14 18:57:02 +02:00
result = list_to_streams ( streams_raw , admin , autocreate = True )
self . assert_length ( result [ 0 ] , 0 )
self . assert_length ( result [ 1 ] , 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 1 ] [ 0 ] . name , " new_stream " )
2020-06-14 18:57:02 +02:00
self . assertEqual ( result [ 1 ] [ 0 ] . message_retention_days , None )
2021-02-12 08:20:45 +01:00
owner = self . example_user ( " desdemona " )
2020-06-14 18:57:02 +02:00
realm = owner . realm
streams_raw = [
2021-05-20 10:50:17 +02:00
{
" name " : " new_stream1 " ,
" message_retention_days " : 10 ,
" is_web_public " : False ,
} ,
{
" name " : " new_stream2 " ,
" message_retention_days " : - 1 ,
" is_web_public " : False ,
} ,
{
" name " : " new_stream3 " ,
" is_web_public " : False ,
} ,
2020-06-14 18:57:02 +02:00
]
2021-12-01 02:10:40 +01:00
do_change_realm_plan_type ( realm , Realm . PLAN_TYPE_LIMITED , acting_user = admin )
2021-02-12 08:19:30 +01:00
with self . assertRaisesRegex (
2022-02-05 08:29:54 +01:00
JsonableError , " Available on Zulip Cloud Standard. Upgrade to access. "
2021-02-12 08:19:30 +01:00
) :
2020-06-14 18:57:02 +02:00
list_to_streams ( streams_raw , owner , autocreate = True )
2021-12-01 02:10:40 +01:00
do_change_realm_plan_type ( realm , Realm . PLAN_TYPE_SELF_HOSTED , acting_user = admin )
2020-06-14 18:57:02 +02:00
result = list_to_streams ( streams_raw , owner , autocreate = True )
self . assert_length ( result [ 0 ] , 0 )
self . assert_length ( result [ 1 ] , 3 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 1 ] [ 0 ] . name , " new_stream1 " )
2020-06-14 18:57:02 +02:00
self . assertEqual ( result [ 1 ] [ 0 ] . message_retention_days , 10 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 1 ] [ 1 ] . name , " new_stream2 " )
2020-06-14 18:57:02 +02:00
self . assertEqual ( result [ 1 ] [ 1 ] . message_retention_days , - 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 1 ] [ 2 ] . name , " new_stream3 " )
2020-06-14 18:57:02 +02:00
self . assertEqual ( result [ 1 ] [ 2 ] . message_retention_days , None )
2021-03-31 16:15:24 +02:00
def set_up_stream_for_archiving (
2021-02-12 08:19:30 +01:00
self , stream_name : str , invite_only : bool = False , subscribed : bool = True
) - > Stream :
2014-01-29 22:03:40 +01:00
"""
2021-03-31 16:15:24 +02:00
Create a stream for archiving by an administrator .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2016-10-21 23:22:25 +02:00
stream = self . make_stream ( stream_name , invite_only = invite_only )
2014-01-29 22:03:40 +01:00
2021-03-31 16:15:24 +02:00
# For testing archiving streams you aren't on.
2014-01-29 22:03:40 +01:00
if subscribed :
2017-08-25 06:01:29 +02:00
self . subscribe ( user_profile , stream_name )
2014-01-29 22:03:40 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2014-01-29 22:03:40 +01:00
return stream
2021-03-31 16:15:24 +02:00
def archive_stream ( self , stream : Stream ) - > None :
2014-01-29 22:03:40 +01:00
"""
2021-03-31 16:15:24 +02:00
Archive the stream and assess the result .
2014-01-29 22:03:40 +01:00
"""
active_name = stream . name
2016-12-04 01:04:55 +01:00
realm = stream . realm
2016-12-30 11:42:59 +01:00
stream_id = stream . id
2014-01-29 22:03:40 +01:00
2018-08-11 17:28:52 +02:00
# Simulate that a stream by the same name has already been
# deactivated, just to exercise our renaming logic:
2021-03-23 07:34:01 +01:00
# Since we do not know the id of these simulated stream we prepend the name with a random hashed_stream_id
2021-04-02 18:11:45 +02:00
ensure_stream ( realm , " DB32B77 " + " !DEACTIVATED: " + active_name , acting_user = None )
2018-08-11 17:28:52 +02:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-02-12 08:20:45 +01:00
result = self . client_delete ( " /json/streams/ " + str ( stream_id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-02-15 17:38:44 +01:00
# We no longer send subscription events for stream deactivations.
2021-02-12 08:20:45 +01:00
sub_events = [ e for e in events if e [ " event " ] [ " type " ] == " subscription " ]
2017-02-15 17:38:44 +01:00
self . assertEqual ( sub_events , [ ] )
2021-02-12 08:20:45 +01:00
stream_events = [ e for e in events if e [ " event " ] [ " type " ] == " stream " ]
2021-05-17 05:41:32 +02:00
self . assert_length ( stream_events , 1 )
2021-02-12 08:20:45 +01:00
event = stream_events [ 0 ] [ " event " ]
self . assertEqual ( event [ " op " ] , " delete " )
self . assertEqual ( event [ " streams " ] [ 0 ] [ " stream_id " ] , stream . id )
2014-01-29 22:03:40 +01:00
with self . assertRaises ( Stream . DoesNotExist ) :
2017-01-04 05:30:48 +01:00
Stream . objects . get ( realm = get_realm ( " zulip " ) , name = active_name )
2014-01-29 22:03:40 +01:00
# A deleted stream's name is changed, is deactivated, is invite-only,
# and has no subscribers.
2021-08-02 23:20:39 +02:00
hashed_stream_id = hashlib . sha512 ( str ( stream_id ) . encode ( ) ) . hexdigest ( ) [ 0 : 7 ]
2021-03-23 07:34:01 +01:00
deactivated_stream_name = hashed_stream_id + " !DEACTIVATED: " + active_name
2017-01-13 15:50:17 +01:00
deactivated_stream = get_stream ( deactivated_stream_name , realm )
2014-01-29 22:03:40 +01:00
self . assertTrue ( deactivated_stream . deactivated )
self . assertTrue ( deactivated_stream . invite_only )
self . assertEqual ( deactivated_stream . name , deactivated_stream_name )
2021-02-12 08:19:30 +01:00
subscribers = self . users_subscribed_to_stream ( deactivated_stream_name , realm )
2014-01-29 22:03:40 +01:00
self . assertEqual ( subscribers , [ ] )
# It doesn't show up in the list of public streams anymore.
2020-09-13 00:11:30 +02:00
result = self . client_get ( " /json/streams " , { " include_subscribed " : " false " } )
2017-08-17 08:45:20 +02:00
public_streams = [ s [ " name " ] for s in result . json ( ) [ " streams " ] ]
2014-01-29 22:03:40 +01:00
self . assertNotIn ( active_name , public_streams )
self . assertNotIn ( deactivated_stream_name , public_streams )
# Even if you could guess the new name, you can't subscribe to it.
2016-07-28 00:30:22 +02:00
result = self . client_post (
2015-11-30 21:39:40 +01:00
" /json/users/me/subscriptions " ,
2021-02-12 08:19:30 +01:00
{ " subscriptions " : orjson . dumps ( [ { " name " : deactivated_stream_name } ] ) . decode ( ) } ,
)
self . assert_json_error ( result , f " Unable to access stream ( { deactivated_stream_name } ). " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_you_must_be_realm_admin ( self ) - > None :
2017-08-22 21:41:08 +02:00
"""
You must be on the realm to create a stream .
"""
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2017-08-22 21:41:08 +02:00
2021-03-08 13:22:43 +01:00
other_realm = do_create_realm ( string_id = " other " , name = " other " )
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " other_realm_stream " , realm = other_realm )
2017-08-22 21:41:08 +02:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( " /json/streams/ " + str ( stream . id ) )
self . assert_json_error ( result , " Invalid stream id " )
2017-08-22 21:41:08 +02:00
# Even becoming a realm admin doesn't help us for an out-of-realm
# stream.
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2021-02-12 08:20:45 +01:00
result = self . client_delete ( " /json/streams/ " + str ( stream . id ) )
self . assert_json_error ( result , " Invalid stream id " )
2017-08-22 21:41:08 +02:00
2017-11-05 10:51:25 +01:00
def test_delete_public_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
When an administrator deletes a public stream , that stream is not
visible to users at all anymore .
"""
2021-03-31 16:15:24 +02:00
stream = self . set_up_stream_for_archiving ( " newstream " )
self . archive_stream ( stream )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_delete_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Administrators can delete private streams they are on .
"""
2021-03-31 16:15:24 +02:00
stream = self . set_up_stream_for_archiving ( " newstream " , invite_only = True )
self . archive_stream ( stream )
2014-01-29 22:03:40 +01:00
2021-03-31 16:15:24 +02:00
def test_archive_streams_youre_not_on ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-08-22 21:41:08 +02:00
Administrators can delete public streams they aren ' t on, including
private streams in their realm .
2014-01-29 22:03:40 +01:00
"""
2021-03-31 16:15:24 +02:00
pub_stream = self . set_up_stream_for_archiving ( " pubstream " , subscribed = False )
self . archive_stream ( pub_stream )
2014-01-29 22:03:40 +01:00
2021-03-31 16:15:24 +02:00
priv_stream = self . set_up_stream_for_archiving (
2021-02-12 08:19:30 +01:00
" privstream " , subscribed = False , invite_only = True
)
2021-03-31 16:15:24 +02:00
self . archive_stream ( priv_stream )
2014-01-29 22:03:40 +01:00
2020-10-15 15:31:20 +02:00
def attempt_unsubscribe_of_principal (
self ,
target_users : List [ UserProfile ] ,
query_count : int ,
2021-02-12 08:19:30 +01:00
cache_count : Optional [ int ] = None ,
is_realm_admin : bool = False ,
is_stream_admin : bool = False ,
is_subbed : bool = True ,
invite_only : bool = False ,
target_users_subbed : bool = True ,
using_legacy_emails : bool = False ,
other_sub_users : Sequence [ UserProfile ] = [ ] ,
2020-10-15 15:31:20 +02:00
) - > HttpResponse :
2016-06-04 19:50:38 +02:00
2014-01-30 22:50:51 +01:00
# Set up the main user, who is in most cases an admin.
2020-07-13 16:13:28 +02:00
if is_realm_admin :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " iago " )
2018-03-14 02:20:31 +01:00
else :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2018-03-14 02:20:31 +01:00
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-30 22:50:51 +01:00
# Set up the stream.
2020-04-09 21:51:58 +02:00
stream_name = " hümbüǵ "
2016-10-21 23:22:25 +02:00
self . make_stream ( stream_name , invite_only = invite_only )
2014-01-30 22:50:51 +01:00
# Set up the principal to be unsubscribed.
2020-07-02 03:13:26 +02:00
principals : List [ Union [ str , int ] ] = [ ]
2020-06-06 18:37:47 +02:00
for user in target_users :
2020-06-02 15:45:45 +02:00
if using_legacy_emails :
2020-06-06 18:37:47 +02:00
principals . append ( user . email )
2020-06-02 15:45:45 +02:00
else :
2020-06-06 18:37:47 +02:00
principals . append ( user . id )
2014-01-30 22:50:51 +01:00
# Subscribe the admin and/or principal as specified in the flags.
if is_subbed :
2020-07-13 16:13:28 +02:00
stream = self . subscribe ( user_profile , stream_name )
if is_stream_admin :
sub = get_subscription ( stream_name , user_profile )
2021-02-12 08:19:30 +01:00
do_change_subscription_property (
2021-04-08 02:41:57 +02:00
user_profile ,
sub ,
stream ,
" role " ,
Subscription . ROLE_STREAM_ADMINISTRATOR ,
acting_user = None ,
2021-02-12 08:19:30 +01:00
)
2020-06-06 18:37:47 +02:00
if target_users_subbed :
for user in target_users :
self . subscribe ( user , stream_name )
2020-06-13 03:34:01 +02:00
for user in other_sub_users :
self . subscribe ( user , stream_name )
2014-01-30 22:50:51 +01:00
2017-10-29 21:03:11 +01:00
with queries_captured ( ) as queries :
2020-10-15 15:31:20 +02:00
with cache_tries_captured ( ) as cache_tries :
result = self . client_delete (
" /json/users/me/subscriptions " ,
2021-02-12 08:19:30 +01:00
{
" subscriptions " : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
" principals " : orjson . dumps ( principals ) . decode ( ) ,
} ,
)
2017-10-29 21:03:11 +01:00
self . assert_length ( queries , query_count )
2020-10-15 15:31:20 +02:00
if cache_count is not None :
self . assert_length ( cache_tries , cache_count )
2014-01-30 22:50:51 +01:00
2014-01-31 18:08:23 +01:00
# If the removal succeeded, then assert that Cordelia is no longer subscribed.
if result . status_code not in [ 400 ] :
2020-06-06 18:37:47 +02:00
subbed_users = self . users_subscribed_to_stream ( stream_name , user_profile . realm )
for user in target_users :
self . assertNotIn ( user , subbed_users )
2014-01-30 22:50:51 +01:00
return result
2017-11-05 10:51:25 +01:00
def test_cant_remove_others_from_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
If you ' re not an admin, you can ' t remove other people from streams .
"""
result = self . attempt_unsubscribe_of_principal (
2021-02-12 08:19:30 +01:00
query_count = 5 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2021-02-12 08:19:30 +01:00
is_realm_admin = False ,
is_stream_admin = False ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
)
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_public_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove people from public streams, even
2014-01-30 22:50:51 +01:00
those you aren ' t on.
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 16 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
)
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_multiple_users_from_stream ( self ) - > None :
2020-06-02 15:45:45 +02:00
"""
2020-10-15 15:31:20 +02:00
If you ' re a realm admin, you can remove multiple users from a stream.
TODO : We have too many queries for this situation - - each additional
2020-10-16 16:40:25 +02:00
user leads to 4 more queries .
2020-10-16 16:33:41 +02:00
Fortunately , some of the extra work here is in
do_mark_stream_messages_as_read , which gets deferred
using a queue .
2020-06-02 15:45:45 +02:00
"""
2020-10-15 15:31:20 +02:00
target_users = [
2021-02-12 08:20:45 +01:00
self . example_user ( name ) for name in [ " cordelia " , " prospero " , " iago " , " hamlet " , " ZOE " ]
2020-10-15 15:31:20 +02:00
]
2020-06-02 15:45:45 +02:00
result = self . attempt_unsubscribe_of_principal (
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
query_count = 31 ,
2020-10-15 15:31:20 +02:00
cache_count = 9 ,
target_users = target_users ,
is_realm_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
)
2020-06-02 15:45:45 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 5 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-06-02 15:45:45 +02:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_subbed_private_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove other people from private streams you
2014-01-30 22:50:51 +01:00
are on .
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 17 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = True ,
invite_only = True ,
target_users_subbed = True ,
)
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2014-01-30 22:50:51 +01:00
2020-07-13 16:13:28 +02:00
def test_realm_admin_remove_others_from_unsubbed_private_stream ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
2020-07-13 16:13:28 +02:00
If you ' re a realm admin, you can remove people from private
2014-01-30 22:50:51 +01:00
streams you aren ' t on.
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 17 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = False ,
invite_only = True ,
target_users_subbed = True ,
other_sub_users = [ self . example_user ( " othello " ) ] ,
)
2020-07-13 16:13:28 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-07-13 16:13:28 +02:00
def test_stream_admin_remove_others_from_public_stream ( self ) - > None :
"""
You can remove others from public streams you ' re a stream administrator of.
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 16 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = False ,
is_stream_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
)
2020-07-13 16:13:28 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-07-13 16:13:28 +02:00
def test_stream_admin_remove_multiple_users_from_stream ( self ) - > None :
"""
You can remove multiple users from public streams you ' re a stream administrator of.
"""
2020-10-15 15:31:20 +02:00
target_users = [
2021-02-12 08:20:45 +01:00
self . example_user ( name ) for name in [ " cordelia " , " prospero " , " othello " , " hamlet " , " ZOE " ]
2020-10-15 15:31:20 +02:00
]
2020-07-13 16:13:28 +02:00
result = self . attempt_unsubscribe_of_principal (
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
query_count = 31 ,
2020-10-15 15:31:20 +02:00
cache_count = 9 ,
target_users = target_users ,
is_realm_admin = False ,
is_stream_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
)
2020-07-13 16:13:28 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 5 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-07-13 16:13:28 +02:00
def test_stream_admin_remove_others_from_private_stream ( self ) - > None :
"""
You can remove others from private streams you ' re a stream administrator of.
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 17 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = False ,
is_stream_admin = True ,
is_subbed = True ,
invite_only = True ,
target_users_subbed = True ,
)
2018-02-20 18:56:01 +01:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2014-01-30 22:50:51 +01:00
2020-04-09 19:07:57 +02:00
def test_cant_remove_others_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2021-02-12 08:19:30 +01:00
query_count = 5 ,
is_realm_admin = False ,
is_stream_admin = False ,
is_subbed = True ,
invite_only = False ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2021-02-12 08:19:30 +01:00
target_users_subbed = True ,
using_legacy_emails = True ,
)
self . assert_json_error ( result , " Must be an organization or stream administrator " )
2020-04-09 19:07:57 +02:00
def test_admin_remove_others_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 16 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
using_legacy_emails = True ,
)
2020-04-09 19:07:57 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 1 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-04-09 19:07:57 +02:00
2020-06-02 15:45:45 +02:00
def test_admin_remove_multiple_users_from_stream_legacy_emails ( self ) - > None :
result = self . attempt_unsubscribe_of_principal (
2020-10-16 16:40:25 +02:00
query_count = 20 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) , self . example_user ( " prospero " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = True ,
invite_only = False ,
target_users_subbed = True ,
using_legacy_emails = True ,
)
2020-06-02 15:45:45 +02:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 2 )
self . assert_length ( json [ " not_removed " ] , 0 )
2020-06-02 15:45:45 +02:00
2017-11-05 10:51:25 +01:00
def test_remove_already_not_subbed ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
Trying to unsubscribe someone who already isn ' t subscribed to a stream
fails gracefully .
"""
result = self . attempt_unsubscribe_of_principal (
2020-10-20 17:46:31 +02:00
query_count = 10 ,
2021-02-12 08:20:45 +01:00
target_users = [ self . example_user ( " cordelia " ) ] ,
2020-10-13 12:53:23 +02:00
is_realm_admin = True ,
is_subbed = False ,
invite_only = False ,
target_users_subbed = False ,
)
2014-01-30 22:50:51 +01:00
json = self . assert_json_success ( result )
2021-05-17 05:41:32 +02:00
self . assert_length ( json [ " removed " ] , 0 )
self . assert_length ( json [ " not_removed " ] , 1 )
2014-01-30 22:50:51 +01:00
2017-11-05 10:51:25 +01:00
def test_remove_invalid_user ( self ) - > None :
2014-01-30 22:50:51 +01:00
"""
Trying to unsubscribe an invalid user from a stream fails gracefully .
"""
2021-02-12 08:20:45 +01:00
admin = self . example_user ( " iago " )
2020-03-06 18:40:46 +01:00
self . login_user ( admin )
self . assertTrue ( admin . is_realm_admin )
2014-01-30 22:50:51 +01:00
2020-04-09 21:51:58 +02:00
stream_name = " hümbüǵ "
2016-10-21 23:22:25 +02:00
self . make_stream ( stream_name )
2014-01-30 22:50:51 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_delete (
" /json/users/me/subscriptions " ,
{
" subscriptions " : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
" principals " : orjson . dumps ( [ 99 ] ) . decode ( ) ,
} ,
)
2014-01-30 22:50:51 +01:00
self . assert_json_error (
2021-02-12 08:19:30 +01:00
result , " User not authorized to execute queries on behalf of ' 99 ' " , status_code = 403
)
2014-01-30 22:50:51 +01:00
2016-08-23 02:08:42 +02:00
class DefaultStreamTest ( ZulipTestCase ) :
2018-05-11 01:39:38 +02:00
def get_default_stream_names ( self , realm : Realm ) - > Set [ str ] :
2017-09-17 00:34:13 +02:00
streams = get_default_streams_for_realm ( realm . id )
2014-01-29 22:03:40 +01:00
stream_names = [ s . name for s in streams ]
return set ( stream_names )
2017-11-05 10:51:25 +01:00
def test_add_and_remove_default_stream ( self ) - > None :
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2021-05-10 07:02:14 +02:00
stream = ensure_stream ( realm , " Added stream " , acting_user = None )
2014-01-29 22:03:40 +01:00
orig_stream_names = self . get_default_stream_names ( realm )
2017-01-30 04:23:08 +01:00
do_add_default_stream ( stream )
2014-01-29 22:03:40 +01:00
new_stream_names = self . get_default_stream_names ( realm )
added_stream_names = new_stream_names - orig_stream_names
2021-05-10 07:02:14 +02:00
self . assertEqual ( added_stream_names , { " Added stream " } )
2022-02-08 00:13:33 +01:00
# idempotency--2nd call to add_default_stream should be a noop
2017-01-30 04:23:08 +01:00
do_add_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , new_stream_names )
# start removing
2017-01-30 04:25:40 +01:00
do_remove_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , orig_stream_names )
2022-02-08 00:13:33 +01:00
# idempotency--2nd call to remove_default_stream should be a noop
2017-01-30 04:25:40 +01:00
do_remove_default_stream ( stream )
2014-01-29 22:03:40 +01:00
self . assertEqual ( self . get_default_stream_names ( realm ) , orig_stream_names )
2017-11-05 10:51:25 +01:00
def test_api_calls ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2018-08-15 20:45:55 +02:00
2021-04-29 17:22:48 +02:00
DefaultStream . objects . filter ( realm = user_profile . realm ) . delete ( )
2021-02-12 08:20:45 +01:00
stream_name = " stream ADDED via api "
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( user_profile . realm , stream_name , acting_user = None )
2021-02-12 08:20:45 +01:00
result = self . client_post ( " /json/default_streams " , dict ( stream_id = stream . id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
self . assertTrue ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
2018-08-15 20:45:55 +02:00
# look for it
self . subscribe ( user_profile , stream_name )
payload = dict (
2021-02-12 08:20:45 +01:00
include_public = " true " ,
include_default = " true " ,
2018-08-15 20:45:55 +02:00
)
2021-02-12 08:20:45 +01:00
result = self . client_get ( " /json/streams " , payload )
2018-08-15 20:45:55 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
streams = result . json ( ) [ " streams " ]
default_streams = { stream [ " name " ] for stream in streams if stream [ " is_default " ] }
2018-08-15 20:45:55 +02:00
self . assertEqual ( default_streams , { stream_name } )
2021-02-12 08:20:45 +01:00
other_streams = { stream [ " name " ] for stream in streams if not stream [ " is_default " ] }
2021-07-13 19:42:37 +02:00
self . assertGreater ( len ( other_streams ) , 0 )
2018-08-15 20:45:55 +02:00
2014-01-29 22:03:40 +01:00
# and remove it
2021-02-12 08:20:45 +01:00
result = self . client_delete ( " /json/default_streams " , dict ( stream_id = stream . id ) )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
self . assertFalse ( stream_name in self . get_default_stream_names ( user_profile . realm ) )
2018-08-07 14:13:58 +02:00
2020-08-08 00:49:24 +02:00
# Test admin can't access unsubscribed private stream for adding.
2018-08-07 14:13:58 +02:00
stream_name = " private_stream "
2020-03-22 20:29:49 +01:00
stream = self . make_stream ( stream_name , invite_only = True )
2021-02-12 08:20:45 +01:00
self . subscribe ( self . example_user ( " iago " ) , stream_name )
result = self . client_post ( " /json/default_streams " , dict ( stream_id = stream . id ) )
2020-04-13 13:57:07 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2018-08-07 14:13:58 +02:00
2020-08-08 00:49:24 +02:00
# Test admin can't add subscribed private stream also.
2018-08-07 14:13:58 +02:00
self . subscribe ( user_profile , stream_name )
2021-02-12 08:20:45 +01:00
result = self . client_post ( " /json/default_streams " , dict ( stream_id = stream . id ) )
2020-08-08 00:49:24 +02:00
self . assert_json_error ( result , " Private streams cannot be made default. " )
2014-01-29 22:03:40 +01:00
2020-07-24 17:50:36 +02:00
def test_guest_user_access_to_streams ( self ) - > None :
user_profile = self . example_user ( " polonius " )
self . login_user ( user_profile )
self . assertEqual ( user_profile . role , UserProfile . ROLE_GUEST )
2022-01-29 00:54:13 +01:00
# Get all the streams that Polonius has access to (subscribed + web-public streams)
2020-09-13 00:11:30 +02:00
result = self . client_get ( " /json/streams " , { " include_web_public " : " true " } )
2021-02-12 08:20:45 +01:00
streams = result . json ( ) [ " streams " ]
2021-01-14 21:44:56 +01:00
sub_info = gather_subscriptions_helper ( user_profile )
subscribed = sub_info . subscriptions
unsubscribed = sub_info . unsubscribed
never_subscribed = sub_info . never_subscribed
2021-05-17 05:41:32 +02:00
self . assert_length ( streams , len ( subscribed ) + len ( unsubscribed ) + len ( never_subscribed ) )
2021-02-12 08:20:45 +01:00
stream_names = [ stream [ " name " ] for stream in streams ]
2022-03-11 19:32:25 +01:00
expected_stream_names = [ stream [ " name " ] for stream in subscribed + unsubscribed ]
expected_stream_names + = [ stream [ " name " ] for stream in never_subscribed ]
2020-07-24 17:50:36 +02:00
self . assertEqual ( set ( stream_names ) , set ( expected_stream_names ) )
2021-02-12 08:19:30 +01:00
2017-11-01 18:20:34 +01:00
class DefaultStreamGroupTest ( ZulipTestCase ) :
def test_create_update_and_remove_default_stream_group ( self ) - > None :
realm = get_realm ( " zulip " )
# Test creating new default stream group
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
streams = [ ]
for stream_name in [ " stream1 " , " stream2 " , " stream3 " ] :
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , stream_name , acting_user = None )
2017-11-01 18:20:34 +01:00
streams . append ( stream )
2017-12-01 07:57:54 +01:00
def get_streams ( group : DefaultStreamGroup ) - > List [ Stream ] :
2021-02-12 08:20:45 +01:00
return list ( group . streams . all ( ) . order_by ( " name " ) )
2017-12-01 07:57:54 +01:00
2017-11-01 18:20:34 +01:00
group_name = " group1 "
2017-11-14 20:51:34 +01:00
description = " This is group1 "
do_create_default_stream_group ( realm , group_name , description , streams )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-11-14 20:51:34 +01:00
self . assertEqual ( default_stream_groups [ 0 ] . description , description )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams )
2017-11-01 18:20:34 +01:00
# Test adding streams to existing default stream group
2017-11-14 20:33:09 +01:00
group = lookup_default_stream_groups ( [ " group1 " ] , realm ) [ 0 ]
2017-11-01 18:20:34 +01:00
new_stream_names = [ " stream4 " , " stream5 " ]
new_streams = [ ]
for new_stream_name in new_stream_names :
2021-04-02 18:11:45 +02:00
new_stream = ensure_stream ( realm , new_stream_name , acting_user = None )
2017-11-01 18:20:34 +01:00
new_streams . append ( new_stream )
streams . append ( new_stream )
2017-11-14 20:33:09 +01:00
do_add_streams_to_default_stream_group ( realm , group , new_streams )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , streams )
2017-11-01 18:20:34 +01:00
# Test removing streams from existing default stream group
2017-11-14 20:33:09 +01:00
do_remove_streams_from_default_stream_group ( realm , group , new_streams )
2017-11-01 18:20:34 +01:00
remaining_streams = streams [ 0 : 3 ]
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , remaining_streams )
2017-11-01 18:20:34 +01:00
2017-11-14 20:51:34 +01:00
# Test changing default stream group description
new_description = " group1 new description "
do_change_default_stream_group_description ( realm , group , new_description )
default_stream_groups = get_default_stream_groups ( realm )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
self . assert_length ( default_stream_groups , 1 )
2017-11-14 21:06:02 +01:00
# Test changing default stream group name
new_group_name = " new group1 "
do_change_default_stream_group_name ( realm , group , new_group_name )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , new_group_name )
2017-12-01 07:57:54 +01:00
self . assertEqual ( get_streams ( default_stream_groups [ 0 ] ) , remaining_streams )
2017-11-14 21:06:02 +01:00
2017-11-01 18:20:34 +01:00
# Test removing default stream group
2017-11-14 20:33:09 +01:00
do_remove_default_stream_group ( realm , group )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
# Test creating a default stream group which contains a default stream
do_add_default_stream ( remaining_streams [ 0 ] )
2017-11-17 07:00:53 +01:00
with self . assertRaisesRegex (
2021-02-12 08:19:30 +01:00
JsonableError , " ' stream1 ' is a default stream and cannot be added to ' new group1 ' "
) :
do_create_default_stream_group (
realm , new_group_name , " This is group1 " , remaining_streams
)
2017-11-01 18:20:34 +01:00
def test_api_calls ( self ) - > None :
2021-02-12 08:20:45 +01:00
self . login ( " hamlet " )
user_profile = self . example_user ( " hamlet " )
2017-11-01 18:20:34 +01:00
realm = user_profile . realm
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2017-11-01 18:20:34 +01:00
# Test creating new default stream group
stream_names = [ " stream1 " , " stream2 " , " stream3 " ]
group_name = " group1 "
2017-11-14 20:51:34 +01:00
description = " This is group1 "
2017-11-01 18:20:34 +01:00
streams = [ ]
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
for stream_name in stream_names :
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , stream_name , acting_user = None )
2017-11-01 18:20:34 +01:00
streams . append ( stream )
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
" /json/default_stream_groups/create " ,
2021-02-12 08:19:30 +01:00
{
" group_name " : group_name ,
" description " : description ,
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) ,
} ,
)
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2017-11-14 20:51:34 +01:00
self . assertEqual ( default_stream_groups [ 0 ] . description , description )
2017-11-18 00:31:54 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( " id " ) ) , streams )
2017-11-01 18:20:34 +01:00
2018-08-22 16:54:28 +02:00
# Try adding the same streams to the group.
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
" /json/default_stream_groups/create " ,
2021-02-12 08:19:30 +01:00
{
" group_name " : group_name ,
" description " : description ,
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) ,
} ,
)
2018-08-22 16:54:28 +02:00
self . assert_json_error ( result , " Default stream group ' group1 ' already exists " )
2017-11-01 18:20:34 +01:00
# Test adding streams to existing default stream group
2017-11-14 20:33:09 +01:00
group_id = default_stream_groups [ 0 ] . id
2017-11-01 18:20:34 +01:00
new_stream_names = [ " stream4 " , " stream5 " ]
new_streams = [ ]
for new_stream_name in new_stream_names :
2021-04-02 18:11:45 +02:00
new_stream = ensure_stream ( realm , new_stream_name , acting_user = None )
2017-11-01 18:20:34 +01:00
new_streams . append ( new_stream )
streams . append ( new_stream )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " Missing ' op ' argument " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " invalid " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , ' Invalid value for " op " . Specify one of " add " or " remove " . ' )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
" /json/default_stream_groups/12345/streams " ,
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2017-11-01 18:20:34 +01:00
2020-06-09 00:25:09 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } /streams " , { " op " : " add " } )
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Missing ' stream_names ' argument " )
2017-11-01 18:20:34 +01:00
do_add_default_stream ( new_streams [ 0 ] )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
self . assert_json_error (
result , " ' stream4 ' is a default stream and cannot be added to ' group1 ' "
)
2017-11-01 18:20:34 +01:00
do_remove_default_stream ( new_streams [ 0 ] )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
2017-11-14 20:33:09 +01:00
self . assert_json_success ( result )
2017-11-01 18:20:34 +01:00
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2021-02-12 08:20:45 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( " name " ) ) , streams )
2017-11-01 18:20:34 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " add " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
self . assert_json_error (
result , " Stream ' stream4 ' is already present in default stream group ' group1 ' "
)
2017-11-01 18:20:34 +01:00
# Test removing streams from default stream group
2021-02-12 08:19:30 +01:00
result = self . client_patch (
" /json/default_stream_groups/12345/streams " ,
{ " op " : " remove " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
2017-11-14 20:33:09 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " remove " , " stream_names " : orjson . dumps ( [ " random stream name " ] ) . decode ( ) } ,
)
2017-11-01 18:20:34 +01:00
self . assert_json_error ( result , " Invalid stream name ' random stream name ' " )
streams . remove ( new_streams [ 0 ] )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " remove " , " stream_names " : orjson . dumps ( [ new_stream_names [ 0 ] ] ) . decode ( ) } ,
)
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
2021-02-12 08:20:45 +01:00
self . assertEqual ( list ( default_stream_groups [ 0 ] . streams . all ( ) . order_by ( " name " ) ) , streams )
2017-11-01 18:20:34 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } /streams " ,
{ " op " : " remove " , " stream_names " : orjson . dumps ( new_stream_names ) . decode ( ) } ,
)
self . assert_json_error (
result , " Stream ' stream4 ' is not present in default stream group ' group1 ' "
)
2017-11-01 18:20:34 +01:00
2017-11-14 20:51:34 +01:00
# Test changing description of default stream group
new_description = " new group1 description "
2022-04-07 17:32:33 +02:00
result = self . client_patch ( f " /json/default_stream_groups/ { group_id } " )
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , ' You must pass " new_description " or " new_group_name " . ' )
2017-11-14 20:51:34 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_patch (
" /json/default_stream_groups/12345 " ,
2022-04-07 17:32:33 +02:00
{ " new_description " : new_description } ,
2021-02-12 08:19:30 +01:00
)
2017-11-14 20:51:34 +01:00
self . assert_json_error ( result , " Default stream group with id ' 12345 ' does not exist. " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } " ,
2022-04-07 17:32:33 +02:00
{ " new_description " : new_description } ,
2021-02-12 08:19:30 +01:00
)
2017-11-14 20:51:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , group_name )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
2017-11-14 21:06:02 +01:00
# Test changing name of default stream group
new_group_name = " new group1 "
do_create_default_stream_group ( realm , " group2 " , " " , [ ] )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } " ,
2022-04-07 17:32:33 +02:00
{ " new_group_name " : " group2 " } ,
2021-02-12 08:19:30 +01:00
)
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , " Default stream group ' group2 ' already exists " )
new_group = lookup_default_stream_groups ( [ " group2 " ] , realm ) [ 0 ]
do_remove_default_stream_group ( realm , new_group )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } " ,
2022-04-07 17:32:33 +02:00
{ " new_group_name " : group_name } ,
2021-02-12 08:19:30 +01:00
)
2017-11-14 21:06:02 +01:00
self . assert_json_error ( result , " This default stream group is already named ' group1 ' " )
2021-02-12 08:19:30 +01:00
result = self . client_patch (
f " /json/default_stream_groups/ { group_id } " ,
2022-04-07 17:32:33 +02:00
{ " new_group_name " : new_group_name } ,
2021-02-12 08:19:30 +01:00
)
2017-11-14 21:06:02 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 1 )
self . assertEqual ( default_stream_groups [ 0 ] . name , new_group_name )
self . assertEqual ( default_stream_groups [ 0 ] . description , new_description )
2017-11-01 18:20:34 +01:00
# Test deleting a default stream group
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/default_stream_groups/ { group_id } " )
2017-11-01 18:20:34 +01:00
self . assert_json_success ( result )
default_stream_groups = get_default_stream_groups ( realm )
self . assert_length ( default_stream_groups , 0 )
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/default_stream_groups/ { group_id } " )
2020-06-09 00:25:09 +02:00
self . assert_json_error ( result , f " Default stream group with id ' { group_id } ' does not exist. " )
2017-11-01 18:20:34 +01:00
2018-02-04 19:50:47 +01:00
def test_invalid_default_stream_group_name ( self ) - > None :
2021-02-12 08:20:45 +01:00
self . login ( " iago " )
user_profile = self . example_user ( " iago " )
2018-02-04 19:50:47 +01:00
realm = user_profile . realm
stream_names = [ " stream1 " , " stream2 " , " stream3 " ]
description = " This is group1 "
streams = [ ]
for stream_name in stream_names :
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , stream_name , acting_user = None )
2018-02-04 19:50:47 +01:00
streams . append ( stream )
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
" /json/default_stream_groups/create " ,
2021-02-12 08:19:30 +01:00
{
" group_name " : " " ,
" description " : description ,
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) ,
} ,
)
2018-02-04 19:50:47 +01:00
self . assert_json_error ( result , " Invalid default stream group name ' ' " )
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
" /json/default_stream_groups/create " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" group_name " : " x " * 100 ,
2021-02-12 08:19:30 +01:00
" description " : description ,
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) ,
} ,
)
self . assert_json_error (
result ,
" Default stream group name too long (limit: {} characters) " . format (
DefaultStreamGroup . MAX_NAME_LENGTH
) ,
)
2018-02-04 19:50:47 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
" /json/default_stream_groups/create " ,
2021-02-12 08:19:30 +01:00
{
" group_name " : " abc \000 " ,
" description " : description ,
" stream_names " : orjson . dumps ( stream_names ) . decode ( ) ,
} ,
)
self . assert_json_error (
result , " Default stream group name ' abc \000 ' contains NULL (0x00) characters. "
)
2018-02-04 19:50:47 +01:00
2018-08-16 16:17:20 +02:00
# Also test that lookup_default_stream_groups raises an
# error if we pass it a bad name. This function is used
# during registration, but it's a bit heavy to do a full
# test of that.
2021-02-12 08:20:45 +01:00
with self . assertRaisesRegex ( JsonableError , " Invalid default stream group invalid-name " ) :
lookup_default_stream_groups ( [ " invalid-name " ] , realm )
2018-08-16 16:17:20 +02:00
2021-02-12 08:19:30 +01:00
2016-08-23 02:08:42 +02:00
class SubscriptionPropertiesTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_set_stream_color ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-05-09 07:01:42 +02:00
A POST request to / api / v1 / users / me / subscriptions / properties with stream_id and
2019-01-14 07:50:23 +01:00
color data sets the stream color , and for that stream only . Also , make sure that
any invalid hex color codes are bounced .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2014-01-29 22:03:40 +01:00
2017-05-08 16:23:43 +02:00
old_subs , _ = gather_subscriptions ( test_user )
2014-01-29 22:03:40 +01:00
sub = old_subs [ 0 ]
2021-02-12 08:20:45 +01:00
stream_id = sub [ " stream_id " ]
2017-07-11 21:51:31 +02:00
new_color = " #ffffff " # TODO: ensure that this is different from old_color
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " color " , " stream_id " : stream_id , " value " : " #ffffff " } ]
) . decode ( )
} ,
)
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
new_subs = gather_subscriptions ( test_user ) [ 0 ]
2014-01-29 22:03:40 +01:00
found_sub = None
for sub in new_subs :
2021-02-12 08:20:45 +01:00
if sub [ " stream_id " ] == stream_id :
2014-01-29 22:03:40 +01:00
found_sub = sub
break
2021-02-12 08:19:30 +01:00
assert found_sub is not None
2021-02-12 08:20:45 +01:00
self . assertEqual ( found_sub [ " color " ] , new_color )
2014-01-29 22:03:40 +01:00
new_subs . remove ( found_sub )
for sub in old_subs :
2021-02-12 08:20:45 +01:00
if sub [ " stream_id " ] == stream_id :
2014-01-29 22:03:40 +01:00
found_sub = sub
break
old_subs . remove ( found_sub )
self . assertEqual ( old_subs , new_subs )
2019-01-14 07:50:23 +01:00
invalid_color = " 3ffrff "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " color " , " stream_id " : stream_id , " value " : invalid_color } ]
) . decode ( )
} ,
)
2019-01-14 07:50:23 +01:00
self . assert_json_error ( result , " color is not a valid hex color code " )
2017-11-05 10:51:25 +01:00
def test_set_color_missing_stream_id ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-05-09 07:01:42 +02:00
Updating the color property requires a ` stream_id ` key .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " color " , " value " : " #ffffff " } ]
) . decode ( )
} ,
)
self . assert_json_error ( result , " stream_id key is missing from subscription_data[0] " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_set_color_unsubscribed_stream_id ( self ) - > None :
2016-08-18 12:33:06 +02:00
"""
Updating the color property requires a subscribed stream .
"""
2020-03-10 11:48:26 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2016-08-18 12:33:06 +02:00
2021-01-14 21:44:56 +01:00
sub_info = gather_subscriptions_helper ( test_user )
not_subbed = sub_info . never_subscribed
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : " color " ,
" stream_id " : not_subbed [ 0 ] [ " stream_id " ] ,
" value " : " #ffffff " ,
}
]
) . decode ( )
} ,
)
2016-08-18 12:33:06 +02:00
self . assert_json_error (
2021-02-12 08:19:30 +01:00
result , " Not subscribed to stream id {} " . format ( not_subbed [ 0 ] [ " stream_id " ] )
)
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_set_color_missing_color ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Updating the color property requires a color .
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " color " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
self . assert_json_error ( result , " value key is missing from subscription_data[0] " )
2014-01-29 22:03:40 +01:00
2019-11-26 02:37:12 +01:00
def test_set_stream_wildcard_mentions_notify ( self ) - > None :
"""
A POST request to / api / v1 / users / me / subscriptions / properties with wildcard_mentions_notify
sets the property .
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2019-11-26 02:37:12 +01:00
subs = gather_subscriptions ( test_user ) [ 0 ]
sub = subs [ 0 ]
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : " wildcard_mentions_notify " ,
" stream_id " : sub [ " stream_id " ] ,
" value " : True ,
}
]
) . decode ( )
} ,
)
2019-11-26 02:37:12 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
updated_sub = get_subscription ( sub [ " name " ] , test_user )
2019-11-26 02:37:12 +01:00
self . assertIsNotNone ( updated_sub )
self . assertEqual ( updated_sub . wildcard_mentions_notify , True )
2017-11-05 10:51:25 +01:00
def test_set_pin_to_top ( self ) - > None :
2016-07-01 07:26:09 +02:00
"""
2017-05-09 07:01:42 +02:00
A POST request to / api / v1 / users / me / subscriptions / properties with stream_id and
2016-07-01 07:26:09 +02:00
pin_to_top data pins the stream .
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-01 07:26:09 +02:00
2020-03-10 11:48:26 +01:00
old_subs , _ = gather_subscriptions ( user )
2016-07-01 07:26:09 +02:00
sub = old_subs [ 0 ]
2021-02-12 08:20:45 +01:00
stream_id = sub [ " stream_id " ]
new_pin_to_top = not sub [ " pin_to_top " ]
2021-02-12 08:19:30 +01:00
result = self . api_post (
user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " pin_to_top " , " stream_id " : stream_id , " value " : new_pin_to_top } ]
) . decode ( )
} ,
)
2016-07-01 07:26:09 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
updated_sub = get_subscription ( sub [ " name " ] , user )
2016-07-01 07:26:09 +02:00
self . assertIsNotNone ( updated_sub )
self . assertEqual ( updated_sub . pin_to_top , new_pin_to_top )
2018-08-02 23:46:05 +02:00
def test_change_is_muted ( self ) - > None :
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2018-08-02 23:46:05 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2021-02-12 08:19:30 +01:00
sub = Subscription . objects . get (
recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user ,
)
2018-08-02 23:46:05 +02:00
self . assertEqual ( sub . is_muted , False )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2018-08-02 23:46:05 +02:00
property_name = " is_muted "
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : property_name ,
" value " : True ,
" stream_id " : subs [ 0 ] [ " stream_id " ] ,
}
]
) . decode ( )
} ,
)
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " event " ] [ " property " ] , " in_home_view " )
self . assertEqual ( events [ 0 ] [ " event " ] [ " value " ] , False )
2021-02-12 08:19:30 +01:00
sub = Subscription . objects . get (
recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user ,
)
2018-08-02 23:46:05 +02:00
self . assertEqual ( sub . is_muted , True )
2021-02-12 08:20:45 +01:00
legacy_property_name = " in_home_view "
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : legacy_property_name ,
" value " : True ,
" stream_id " : subs [ 0 ] [ " stream_id " ] ,
}
]
) . decode ( )
} ,
)
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " event " ] [ " property " ] , " in_home_view " )
self . assertEqual ( events [ 0 ] [ " event " ] [ " value " ] , True )
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
sub = Subscription . objects . get (
recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user ,
)
2018-08-02 23:46:05 +02:00
self . assertEqual ( sub . is_muted , False )
2021-05-28 07:27:50 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 1 ) :
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : legacy_property_name ,
" value " : False ,
" stream_id " : subs [ 0 ] [ " stream_id " ] ,
}
]
) . decode ( )
} ,
)
2018-08-02 23:46:05 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
self . assertEqual ( events [ 0 ] [ " event " ] [ " property " ] , " in_home_view " )
self . assertEqual ( events [ 0 ] [ " event " ] [ " value " ] , False )
2018-08-02 23:46:05 +02:00
2021-02-12 08:19:30 +01:00
sub = Subscription . objects . get (
recipient__type = Recipient . STREAM ,
recipient__type_id = subs [ 0 ] [ " stream_id " ] ,
user_profile = test_user ,
)
2018-08-02 23:46:05 +02:00
self . assertEqual ( sub . is_muted , True )
2017-11-05 10:51:25 +01:00
def test_set_subscription_property_incorrect ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
Trying to set a property incorrectly returns a JSON error .
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2016-07-16 18:50:41 +02:00
2018-08-02 23:46:05 +02:00
property_name = " is_muted "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2018-08-02 23:46:05 +02:00
2016-07-16 18:50:41 +02:00
property_name = " in_home_view "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2016-07-16 18:50:41 +02:00
property_name = " desktop_notifications "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2016-07-16 18:50:41 +02:00
property_name = " audible_notifications "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2016-07-16 18:50:41 +02:00
2017-08-17 16:55:32 +02:00
property_name = " push_notifications "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2017-08-17 16:55:32 +02:00
2017-11-21 04:35:26 +01:00
property_name = " email_notifications "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2017-11-21 04:35:26 +01:00
2019-11-26 02:37:12 +01:00
property_name = " wildcard_mentions_notify "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2019-11-26 02:37:12 +01:00
2021-02-12 08:19:30 +01:00
self . assert_json_error ( result , f " { property_name } is not a boolean " )
2019-11-26 02:37:12 +01:00
2016-07-16 18:50:41 +02:00
property_name = " color "
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : property_name , " value " : False , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , f " { property_name } is not a string " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_json_subscription_property_invalid_stream ( self ) - > None :
2020-03-10 11:48:26 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2016-07-24 16:45:20 +02:00
2017-05-09 07:01:42 +02:00
stream_id = 1000
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " is_muted " , " stream_id " : stream_id , " value " : False } ]
) . decode ( )
} ,
)
2017-05-09 07:01:42 +02:00
self . assert_json_error ( result , " Invalid stream id " )
2016-07-24 16:45:20 +02:00
2017-11-05 10:51:25 +01:00
def test_set_invalid_property ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Trying to set an invalid property returns a JSON error .
"""
2021-02-12 08:20:45 +01:00
test_user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( test_user )
2017-05-08 16:23:43 +02:00
subs = gather_subscriptions ( test_user ) [ 0 ]
2021-02-12 08:19:30 +01:00
result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[ { " property " : " bad " , " value " : " bad " , " stream_id " : subs [ 0 ] [ " stream_id " ] } ]
) . decode ( )
} ,
)
self . assert_json_error ( result , " Unknown subscription property: bad " )
2021-11-24 18:00:12 +01:00
def test_ignored_parameters_in_subscriptions_properties_endpoint ( self ) - > None :
"""
Sending an invalid parameter with a valid parameter returns
an ` ignored_parameters_unsupported ` array .
"""
test_user = self . example_user ( " hamlet " )
self . login_user ( test_user )
subs = gather_subscriptions ( test_user ) [ 0 ]
sub = subs [ 0 ]
json_result = self . api_post (
test_user ,
" /api/v1/users/me/subscriptions/properties " ,
{
" subscription_data " : orjson . dumps (
[
{
" property " : " wildcard_mentions_notify " ,
" stream_id " : sub [ " stream_id " ] ,
" value " : True ,
}
]
) . decode ( ) ,
" invalid_parameter " : orjson . dumps (
[ { " property " : " pin_to_top " , " stream_id " : sub [ " stream_id " ] , " value " : False } ]
) . decode ( ) ,
} ,
)
self . assert_json_success ( json_result )
result = orjson . loads ( json_result . content )
self . assertIn ( " ignored_parameters_unsupported " , result )
self . assertEqual ( result [ " ignored_parameters_unsupported " ] , [ " invalid_parameter " ] )
2014-01-29 22:03:40 +01:00
2016-08-23 02:08:42 +02:00
class SubscriptionRestApiTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_basic_add_delete ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# add
request = {
2021-02-12 08:20:45 +01:00
" add " : orjson . dumps ( [ { " name " : " my_test_stream_1 " } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
streams = self . get_streams ( user )
2021-02-12 08:20:45 +01:00
self . assertTrue ( " my_test_stream_1 " in streams )
2014-01-29 22:03:40 +01:00
# now delete the same stream
request = {
2021-02-12 08:20:45 +01:00
" delete " : orjson . dumps ( [ " my_test_stream_1 " ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2020-03-10 11:48:26 +01:00
streams = self . get_streams ( user )
2021-02-12 08:20:45 +01:00
self . assertTrue ( " my_test_stream_1 " not in streams )
2014-01-29 22:03:40 +01:00
2019-01-10 15:03:15 +01:00
def test_add_with_color ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2019-01-10 15:03:15 +01:00
# add with color proposition
request = {
2021-02-12 08:20:45 +01:00
" add " : orjson . dumps ( [ { " name " : " my_test_stream_2 " , " color " : " #afafaf " } ] ) . decode ( ) ,
2019-01-10 15:03:15 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2019-01-10 15:03:15 +01:00
self . assert_json_success ( result )
# incorrect color format
request = {
2021-02-12 08:20:45 +01:00
" subscriptions " : orjson . dumps (
[ { " name " : " my_test_stream_3 " , " color " : " #0g0g0g " } ]
2021-02-12 08:19:30 +01:00
) . decode ( ) ,
2019-01-10 15:03:15 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , " /api/v1/users/me/subscriptions " , request )
2019-01-10 15:03:15 +01:00
self . assert_json_error ( result , ' subscriptions[0][ " color " ] is not a valid hex color code ' )
2017-11-05 10:51:25 +01:00
def test_api_valid_property ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set valid json returns success message .
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2017-05-09 22:29:59 +02:00
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-10 11:48:26 +01:00
subs = gather_subscriptions ( user ) [ 0 ]
2021-02-12 08:19:30 +01:00
result = self . api_patch (
user ,
" /api/v1/users/me/subscriptions/ {} " . format ( subs [ 0 ] [ " stream_id " ] ) ,
2021-02-12 08:20:45 +01:00
{ " property " : " color " , " value " : " #c2c2c2 " } ,
2021-02-12 08:19:30 +01:00
)
2017-05-09 22:29:59 +02:00
self . assert_json_success ( result )
2017-11-05 10:51:25 +01:00
def test_api_invalid_property ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set an invalid property returns a JSON error .
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2017-05-09 22:29:59 +02:00
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-10 11:48:26 +01:00
subs = gather_subscriptions ( user ) [ 0 ]
2017-05-09 22:29:59 +02:00
2021-02-12 08:19:30 +01:00
result = self . api_patch (
user ,
" /api/v1/users/me/subscriptions/ {} " . format ( subs [ 0 ] [ " stream_id " ] ) ,
2021-02-12 08:20:45 +01:00
{ " property " : " invalid " , " value " : " somevalue " } ,
2021-02-12 08:19:30 +01:00
)
self . assert_json_error ( result , " Unknown subscription property: invalid " )
2017-05-09 22:29:59 +02:00
2017-11-05 10:51:25 +01:00
def test_api_invalid_stream_id ( self ) - > None :
2017-05-09 22:29:59 +02:00
"""
Trying to set an invalid stream id returns a JSON error .
"""
2020-03-10 11:48:26 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2021-02-12 08:19:30 +01:00
result = self . api_patch (
user ,
" /api/v1/users/me/subscriptions/121 " ,
2021-02-12 08:20:45 +01:00
{ " property " : " is_muted " , " value " : " somevalue " } ,
2021-02-12 08:19:30 +01:00
)
self . assert_json_error ( result , " Invalid stream id " )
2017-05-09 22:29:59 +02:00
2017-11-05 10:51:25 +01:00
def test_bad_add_parameters ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def check_for_error ( val : Any , expected_message : str ) - > None :
2014-01-29 22:03:40 +01:00
request = {
2021-02-12 08:20:45 +01:00
" add " : orjson . dumps ( val ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result , expected_message )
2021-02-12 08:20:45 +01:00
check_for_error ( [ " foo " ] , " add[0] is not a dict " )
check_for_error ( [ { " bogus " : " foo " } ] , " name key is missing from add[0] " )
check_for_error ( [ { " name " : { } } ] , ' add[0][ " name " ] is not a string ' )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_bad_principals ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
request = {
2021-02-12 08:20:45 +01:00
" add " : orjson . dumps ( [ { " name " : " my_new_stream " } ] ) . decode ( ) ,
" principals " : orjson . dumps ( [ { } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " principals is not an allowed_type " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_bad_delete_parameters ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
request = {
2021-02-12 08:20:45 +01:00
" delete " : orjson . dumps ( [ { " name " : " my_test_stream_1 " } ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2014-01-29 22:03:40 +01:00
self . assert_json_error ( result , " delete[0] is not a string " )
2017-11-05 10:51:25 +01:00
def test_add_or_delete_not_specified ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , { } )
2021-02-12 08:19:30 +01:00
self . assert_json_error ( result , ' Nothing to do. Specify at least one of " add " or " delete " . ' )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_patch_enforces_valid_stream_name_check ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
Only way to force an error is with a empty string .
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
invalid_stream_name = " "
request = {
2021-02-12 08:20:45 +01:00
" delete " : orjson . dumps ( [ invalid_stream_name ] ) . decode ( ) ,
2016-07-16 18:50:41 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2022-01-11 21:57:53 +01:00
self . assert_json_error ( result , " Stream name can ' t be empty! " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_stream_name_too_long ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-07-16 18:50:41 +02:00
long_stream_name = " a " * 61
request = {
2021-02-12 08:20:45 +01:00
" delete " : orjson . dumps ( [ long_stream_name ] ) . decode ( ) ,
2016-07-16 18:50:41 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2021-02-12 08:19:30 +01:00
self . assert_json_error ( result , " Stream name too long (limit: 60 characters). " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_stream_name_contains_null ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2017-04-26 01:28:22 +02:00
stream_name = " abc \000 "
request = {
2021-02-12 08:20:45 +01:00
" delete " : orjson . dumps ( [ stream_name ] ) . decode ( ) ,
2017-04-26 01:28:22 +02:00
}
2020-03-10 11:48:26 +01:00
result = self . api_patch ( user , " /api/v1/users/me/subscriptions " , request )
2022-01-11 22:05:20 +01:00
self . assert_json_error ( result , " Invalid character in stream name, at position 4! " )
2017-04-26 01:28:22 +02:00
2017-11-05 10:51:25 +01:00
def test_compose_views_rollback ( self ) - > None :
2021-02-12 08:19:30 +01:00
"""
2016-09-12 17:21:49 +02:00
The compose_views function ( ) is used under the hood by
update_subscriptions_backend . It ' s a pretty simple method in terms of
control flow , but it uses a Django rollback , which may make it brittle
code when we upgrade Django . We test the functions ' s rollback logic
here with a simple scenario to avoid false positives related to
subscription complications .
2021-02-12 08:19:30 +01:00
"""
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
user_profile . full_name = " Hamlet "
2016-09-12 17:21:49 +02:00
user_profile . save ( )
2022-02-05 00:36:40 +01:00
request = HostRequestMock ( user_profile = user_profile )
2016-09-12 17:21:49 +02:00
2021-02-16 01:19:02 +01:00
def thunk1 ( ) - > HttpResponse :
2021-02-12 08:20:45 +01:00
user_profile . full_name = " Should not be committed "
2016-09-12 17:21:49 +02:00
user_profile . save ( )
2022-01-31 13:44:02 +01:00
return json_success ( request )
2016-09-12 17:21:49 +02:00
2021-02-16 01:19:02 +01:00
def thunk2 ( ) - > HttpResponse :
2021-07-04 10:00:55 +02:00
raise JsonableError ( " random failure " )
2016-09-12 17:21:49 +02:00
with self . assertRaises ( JsonableError ) :
2021-02-16 01:19:02 +01:00
compose_views ( [ thunk1 , thunk2 ] )
2016-09-12 17:21:49 +02:00
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
self . assertEqual ( user_profile . full_name , " Hamlet " )
2016-09-12 17:21:49 +02:00
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
class SubscriptionAPITest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
All tests will be logged in as hamlet . Also save various useful values
as attributes that tests can access .
"""
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2021-02-12 08:20:45 +01:00
self . user_profile = self . example_user ( " hamlet " )
2017-05-07 21:25:59 +02:00
self . test_email = self . user_profile . email
2017-10-07 16:00:39 +02:00
self . test_user = self . user_profile
2020-03-06 18:40:46 +01:00
self . login_user ( self . user_profile )
2017-07-12 12:32:14 +02:00
self . test_realm = self . user_profile . realm
2020-03-09 21:41:26 +01:00
self . streams = self . get_streams ( self . user_profile )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def make_random_stream_names ( self , existing_stream_names : List [ str ] ) - > List [ str ] :
2014-01-29 22:03:40 +01:00
"""
Helper function to make up random stream names . It takes
existing_stream_names and randomly appends a digit to the end of each ,
but avoids names that appear in the list names_to_avoid .
"""
random_streams = [ ]
2017-07-12 12:32:14 +02:00
all_stream_names = [ stream . name for stream in Stream . objects . filter ( realm = self . test_realm ) ]
2014-01-29 22:03:40 +01:00
for stream in existing_stream_names :
random_stream = stream + str ( random . randint ( 0 , 9 ) )
2016-05-10 01:55:43 +02:00
if random_stream not in all_stream_names :
2014-01-29 22:03:40 +01:00
random_streams . append ( random_stream )
return random_streams
2022-01-11 22:05:20 +01:00
def test_invalid_stream_name ( self ) - > None :
"""
Creating a stream with invalid ' Cc ' and ' Cn ' category of unicode characters in stream name
"""
user = self . example_user ( " hamlet " )
self . login_user ( user )
# For Cc category
post_data_cc = {
" subscriptions " : orjson . dumps (
[ { " name " : " new \n \r stream " , " description " : " this is description " } ]
) . decode ( ) ,
" invite_only " : orjson . dumps ( False ) . decode ( ) ,
}
result = self . api_post (
user , " /api/v1/users/me/subscriptions " , post_data_cc , subdomain = " zulip "
)
self . assert_json_error ( result , " Invalid character in stream name, at position 4! " )
# For Cn category
post_data_cn = {
" subscriptions " : orjson . dumps (
[ { " name " : " new \uFFFE stream " , " description " : " this is description " } ]
) . decode ( ) ,
" invite_only " : orjson . dumps ( False ) . decode ( ) ,
}
result = self . api_post (
user , " /api/v1/users/me/subscriptions " , post_data_cn , subdomain = " zulip "
)
self . assert_json_error ( result , " Invalid character in stream name, at position 4! " )
def test_invalid_stream_rename ( self ) - > None :
"""
Renaming a stream with invalid characters .
"""
user_profile = self . example_user ( " hamlet " )
self . login_user ( user_profile )
stream = self . subscribe ( user_profile , " stream_name1 " )
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
# Check for empty name
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " " } )
self . assert_json_error ( result , " Stream name can ' t be empty! " )
# Check for long name
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " a " * 61 } )
self . assert_json_error ( result , " Stream name too long (limit: 60 characters). " )
# Check for Cc characters
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " test \n \r name " } )
self . assert_json_error ( result , " Invalid character in stream name, at position 5! " )
# Check for Cn characters
result = self . client_patch ( f " /json/streams/ { stream . id } " , { " new_name " : " test \uFFFE ame " } )
self . assert_json_error ( result , " Invalid character in stream name, at position 5! " )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_list ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / api / v1 / users / me / subscriptions should successfully return your subscriptions .
"""
2020-03-10 11:48:26 +01:00
result = self . api_get ( self . test_user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertIn ( " subscriptions " , json )
2021-02-12 08:20:45 +01:00
for stream in json [ " subscriptions " ] :
self . assertIsInstance ( stream [ " name " ] , str )
self . assertIsInstance ( stream [ " color " ] , str )
self . assertIsInstance ( stream [ " invite_only " ] , bool )
2017-03-23 07:22:28 +01:00
# check that the stream name corresponds to an actual
# stream; will throw Stream.DoesNotExist if it doesn't
2021-02-12 08:20:45 +01:00
get_stream ( stream [ " name " ] , self . test_realm )
list_streams = [ stream [ " name " ] for stream in json [ " subscriptions " ] ]
2014-01-29 22:03:40 +01:00
# also check that this matches the list of your subscriptions
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( list_streams ) , sorted ( self . streams ) )
2014-01-29 22:03:40 +01:00
2021-07-17 00:29:45 +02:00
def test_successful_subscriptions_list_subscribers ( self ) - > None :
"""
Calling / api / v1 / users / me / subscriptions should successfully return your subscriptions .
"""
result = self . api_get (
self . test_user ,
" /api/v1/users/me/subscriptions " ,
{ " include_subscribers " : " true " } ,
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " subscriptions " , json )
for stream in json [ " subscriptions " ] :
self . assertIsInstance ( stream [ " name " ] , str )
self . assertIsInstance ( stream [ " color " ] , str )
self . assertIsInstance ( stream [ " invite_only " ] , bool )
# check that the stream name corresponds to an actual
# stream; will throw Stream.DoesNotExist if it doesn't
get_stream ( stream [ " name " ] , self . test_realm )
list_streams = [ stream [ " name " ] for stream in json [ " subscriptions " ] ]
# also check that this matches the list of your subscriptions
self . assertEqual ( sorted ( list_streams ) , sorted ( self . streams ) )
2021-02-12 08:19:30 +01:00
def helper_check_subs_before_and_after_add (
self ,
subscriptions : List [ str ] ,
other_params : Dict [ str , Any ] ,
subscribed : List [ str ] ,
already_subscribed : List [ str ] ,
email : str ,
new_subs : List [ str ] ,
realm : Realm ,
invite_only : bool = False ,
) - > None :
2014-01-29 22:03:40 +01:00
"""
Check result of adding subscriptions .
You can add subscriptions for yourself or possibly many
principals , which is why e - mails map to subscriptions in the
result .
The result json is of the form
{ " msg " : " " ,
" result " : " success " ,
2017-05-25 01:44:04 +02:00
" already_subscribed " : { self . example_email ( " iago " ) : [ " Venice " , " Verona " ] } ,
" subscribed " : { self . example_email ( " iago " ) : [ " Venice8 " ] } }
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user , subscriptions , other_params , invite_only = invite_only
)
2017-08-17 08:45:20 +02:00
json = result . json ( )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( subscribed ) , sorted ( json [ " subscribed " ] [ email ] ) )
self . assertEqual ( sorted ( already_subscribed ) , sorted ( json [ " already_subscribed " ] [ email ] ) )
2020-03-09 21:41:26 +01:00
user = get_user ( email , realm )
new_streams = self . get_streams ( user )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( new_streams ) , sorted ( new_subs ) )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_add ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-04-13 02:05:48 +02:00
Calling POST / json / users / me / subscriptions should successfully add
streams , and should determine which are new subscriptions vs
which were already subscribed . We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated .
2014-01-29 22:03:40 +01:00
"""
self . assertNotEqual ( len ( self . streams ) , 0 ) # necessary for full test coverage
2020-04-09 21:51:58 +02:00
add_streams = [ " Verona2 " , " Denmark5 " ]
2014-01-29 22:03:40 +01:00
self . assertNotEqual ( len ( add_streams ) , 0 ) # necessary for full test coverage
2021-05-28 10:47:43 +02:00
# Three events should be sent for each stream for stream creation, subscription add and message notifications.
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 6 ) :
2021-02-12 08:19:30 +01:00
self . helper_check_subs_before_and_after_add (
self . streams + add_streams ,
{ } ,
add_streams ,
self . streams ,
self . test_email ,
self . streams + add_streams ,
self . test_realm ,
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_add_with_announce ( self ) - > None :
2016-06-24 20:10:27 +02:00
"""
Calling POST / json / users / me / subscriptions should successfully add
streams , and should determine which are new subscriptions vs
which were already subscribed . We add 2 new streams to the
list of subscriptions and confirm the right number of events
are generated .
"""
self . assertNotEqual ( len ( self . streams ) , 0 )
2020-04-09 21:51:58 +02:00
add_streams = [ " Verona2 " , " Denmark5 " ]
2016-06-24 20:10:27 +02:00
self . assertNotEqual ( len ( add_streams ) , 0 )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2016-06-24 20:10:27 +02:00
other_params = {
2021-02-12 08:20:45 +01:00
" announce " : " true " ,
2016-06-24 20:10:27 +02:00
}
2017-07-12 22:17:24 +02:00
notifications_stream = get_stream ( self . streams [ 0 ] , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 22:17:24 +02:00
self . test_realm . save ( )
2016-06-24 20:10:27 +02:00
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 7 ) :
2021-02-12 08:19:30 +01:00
self . helper_check_subs_before_and_after_add (
self . streams + add_streams ,
other_params ,
add_streams ,
self . streams ,
self . test_email ,
self . streams + add_streams ,
self . test_realm ,
)
2020-10-26 13:16:10 +01:00
2021-02-12 08:19:30 +01:00
expected_stream_ids = { get_stream ( stream , self . test_realm ) . id for stream in add_streams }
2020-10-26 13:16:10 +01:00
2021-08-02 23:16:44 +02:00
( peer_add_event , ) = ( event for event in events if event [ " event " ] . get ( " op " ) == " peer_add " )
2020-10-26 13:16:10 +01:00
self . assertEqual ( set ( peer_add_event [ " event " ] [ " stream_ids " ] ) , expected_stream_ids )
self . assertEqual ( set ( peer_add_event [ " event " ] [ " user_ids " ] ) , { self . test_user . id } )
2016-06-24 20:10:27 +02:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_pm ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
2014-01-29 22:03:40 +01:00
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] ) [ : 1 ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = {
2021-02-12 08:20:45 +01:00
" announce " : " true " ,
" principals " : orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
} ,
)
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
2021-02-12 08:20:45 +01:00
invitee_full_name = " Iago "
2014-01-29 22:03:40 +01:00
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] ) [ : 1 ]
2014-01-29 22:03:40 +01:00
2017-07-12 12:32:14 +02:00
notifications_stream = get_stream ( current_stream , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 12:32:14 +02:00
self . test_realm . save ( )
2014-01-29 22:03:40 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = dict (
2021-02-12 08:20:45 +01:00
announce = " true " ,
2021-02-12 08:19:30 +01:00
principals = orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
) ,
)
2022-03-29 09:17:28 +02:00
target_stream = get_stream ( invite_streams [ 0 ] , self . test_realm )
2014-01-29 22:03:40 +01:00
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . recipient . type , Recipient . STREAM )
2021-03-08 11:39:48 +01:00
self . assertEqual ( msg . recipient . type_id , notifications_stream . id )
self . assertEqual ( msg . sender_id , self . notification_bot ( self . test_realm ) . id )
2021-02-12 08:19:30 +01:00
expected_msg = (
f " @_** { invitee_full_name } | { invitee . id } ** created a new stream #** { invite_streams [ 0 ] } **. "
)
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . content , expected_msg )
2022-03-29 09:17:28 +02:00
msg = self . get_last_message ( )
self . assertEqual ( msg . recipient . type , Recipient . STREAM )
self . assertEqual ( msg . recipient . type_id , target_stream . id )
self . assertEqual ( msg . sender_id , self . notification_bot ( self . test_realm ) . id )
expected_msg = (
f " **Public** stream created by @_** { invitee_full_name } | { invitee . id } **. **Description:** \n "
" ```` quote \n *No description.* \n ```` "
)
self . assertEqual ( msg . content , expected_msg )
2017-11-05 10:51:25 +01:00
def test_successful_cross_realm_notification ( self ) - > None :
2017-01-18 23:19:18 +01:00
"""
Calling POST / json / users / me / subscriptions in a new realm
should notify with a proper new stream link
"""
2017-08-24 04:52:34 +02:00
realm = do_create_realm ( " testrealm " , " Test Realm " )
2017-01-18 23:19:18 +01:00
2021-02-12 08:20:45 +01:00
notifications_stream = Stream . objects . get ( name = " general " , realm = realm )
2017-01-18 23:19:18 +01:00
realm . notifications_stream = notifications_stream
realm . save ( )
invite_streams = [ " cross_stream " ]
2021-02-12 08:20:45 +01:00
user = self . example_user ( " AARON " )
2017-01-18 23:19:18 +01:00
user . realm = realm
user . save ( )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
user ,
2017-01-18 23:19:18 +01:00
invite_streams ,
extra_post_data = dict (
2021-02-12 08:20:45 +01:00
announce = " true " ,
2017-01-18 23:19:18 +01:00
) ,
2017-08-26 00:58:13 +02:00
subdomain = " testrealm " ,
2017-01-18 23:19:18 +01:00
)
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2017-01-18 23:19:18 +01:00
self . assertEqual ( msg . recipient . type , Recipient . STREAM )
2021-03-08 11:39:48 +01:00
self . assertEqual ( msg . recipient . type_id , notifications_stream . id )
self . assertEqual ( msg . sender_id , self . notification_bot ( realm ) . id )
2021-02-12 08:20:45 +01:00
stream_id = Stream . objects . latest ( " id " ) . id
2020-06-13 08:59:37 +02:00
expected_rendered_msg = f ' <p><span class= " user-mention silent " data-user-id= " { user . id } " > { user . full_name } </span> created a new stream <a class= " stream " data-stream-id= " { stream_id } " href= " /#narrow/stream/ { stream_id } - { invite_streams [ 0 ] } " ># { invite_streams [ 0 ] } </a>.</p> '
2017-01-18 23:19:18 +01:00
self . assertEqual ( msg . rendered_content , expected_rendered_msg )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_notifies_with_escaping ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions should notify when a new stream is created .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
invitee_full_name = " Iago "
invitee = self . example_user ( " iago " )
2014-01-29 22:03:40 +01:00
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( invitee ) [ 0 ]
2017-07-12 12:32:14 +02:00
notifications_stream = get_stream ( current_stream , self . test_realm )
2017-09-17 19:53:38 +02:00
self . test_realm . notifications_stream_id = notifications_stream . id
2017-07-12 12:32:14 +02:00
self . test_realm . save ( )
2016-01-24 03:39:44 +01:00
2021-02-12 08:20:45 +01:00
invite_streams = [ " strange ) \\ test " ]
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2014-01-29 22:03:40 +01:00
invitee ,
invite_streams ,
extra_post_data = {
2021-02-12 08:20:45 +01:00
" announce " : " true " ,
" principals " : orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ,
2014-01-29 22:03:40 +01:00
} ,
)
2017-04-27 00:03:21 +02:00
msg = self . get_second_to_last_message ( )
2021-03-08 11:39:48 +01:00
self . assertEqual ( msg . sender_id , self . notification_bot ( notifications_stream . realm ) . id )
2021-02-12 08:19:30 +01:00
expected_msg = (
f " @_** { invitee_full_name } | { invitee . id } ** created a new stream #** { invite_streams [ 0 ] } **. "
)
2014-01-29 22:03:40 +01:00
self . assertEqual ( msg . content , expected_msg )
2017-11-05 10:51:25 +01:00
def test_non_ascii_stream_subscription ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Subscribing to a stream name with non - ASCII characters succeeds .
"""
2021-02-12 08:19:30 +01:00
self . helper_check_subs_before_and_after_add (
[ * self . streams , " hümbüǵ " ] ,
{ } ,
[ " hümbüǵ " ] ,
self . streams ,
self . test_email ,
[ * self . streams , " hümbüǵ " ] ,
self . test_realm ,
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_too_long ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on a stream whose name is > 60
2014-01-29 22:03:40 +01:00
characters should return a JSON error .
"""
# character limit is 60 characters
long_stream_name = " a " * 61
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user , [ long_stream_name ] , allow_fail = True
)
self . assert_json_error ( result , " Stream name too long (limit: 60 characters). " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_stream_with_null ( self ) - > None :
2017-04-26 01:28:22 +02:00
"""
Calling POST / json / users / me / subscriptions on a stream whose name contains
null characters should return a JSON error .
"""
stream_name = " abc \000 "
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( self . test_user , [ stream_name ] , allow_fail = True )
2022-01-11 22:05:20 +01:00
self . assert_json_error ( result , " Invalid character in stream name, at position 4! " )
2017-04-26 01:28:22 +02:00
2021-03-26 08:16:11 +01:00
def _test_user_settings_for_creating_streams (
2021-10-04 09:56:16 +02:00
self ,
stream_policy : str ,
* ,
invite_only : bool ,
is_web_public : bool ,
2021-03-26 08:16:11 +01:00
) - > None :
2021-03-23 17:17:34 +01:00
user_profile = self . example_user ( " cordelia " )
realm = user_profile . realm
2021-03-26 10:49:15 +01:00
do_set_realm_property ( realm , stream_policy , Realm . POLICY_ADMINS_ONLY , acting_user = None )
2021-03-21 18:17:45 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MODERATOR , acting_user = None )
2021-03-23 17:17:34 +01:00
result = self . common_subscribe_to_streams (
user_profile ,
[ " new_stream1 " ] ,
2021-03-26 08:16:11 +01:00
invite_only = invite_only ,
2021-10-04 09:56:16 +02:00
is_web_public = is_web_public ,
2021-03-23 17:17:34 +01:00
allow_fail = True ,
)
2021-04-07 21:12:40 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2021-03-23 17:17:34 +01:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2021-03-26 08:16:11 +01:00
self . common_subscribe_to_streams ( user_profile , [ " new_stream1 " ] , invite_only = invite_only )
2021-03-23 17:17:34 +01:00
2021-03-26 10:49:15 +01:00
do_set_realm_property ( realm , stream_policy , Realm . POLICY_MODERATORS_ONLY , acting_user = None )
2021-03-21 18:17:45 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self . assertFalse ( user_profile . is_provisional_member )
result = self . common_subscribe_to_streams (
user_profile ,
[ " new_stream2 " ] ,
allow_fail = True ,
2021-03-27 05:48:37 +01:00
invite_only = invite_only ,
2021-10-04 09:56:16 +02:00
is_web_public = is_web_public ,
2021-03-21 18:17:45 +01:00
)
2021-04-07 21:12:40 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2021-03-21 18:17:45 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MODERATOR , acting_user = None )
2021-03-27 05:48:37 +01:00
self . common_subscribe_to_streams ( user_profile , [ " new_stream2 " ] , invite_only = invite_only )
2021-03-21 18:17:45 +01:00
2021-03-26 10:49:15 +01:00
do_set_realm_property ( realm , stream_policy , Realm . POLICY_MEMBERS_ONLY , acting_user = None )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_GUEST , acting_user = None )
2021-03-23 17:17:34 +01:00
result = self . common_subscribe_to_streams (
user_profile ,
2021-03-26 08:16:11 +01:00
[ " new_stream3 " ] ,
invite_only = invite_only ,
2021-10-04 09:56:16 +02:00
is_web_public = is_web_public ,
2021-03-23 17:17:34 +01:00
allow_fail = True ,
)
self . assert_json_error ( result , " Not allowed for guest users " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( user_profile , UserProfile . ROLE_MEMBER , acting_user = None )
2021-03-23 17:17:34 +01:00
self . common_subscribe_to_streams (
self . test_user ,
2021-03-26 08:16:11 +01:00
[ " new_stream4 " ] ,
invite_only = invite_only ,
2021-10-04 09:56:16 +02:00
is_web_public = is_web_public ,
2021-03-23 17:17:34 +01:00
)
2021-03-01 11:33:24 +01:00
do_set_realm_property (
2021-03-26 08:16:11 +01:00
realm , stream_policy , Realm . POLICY_FULL_MEMBERS_ONLY , acting_user = None
2021-03-01 11:33:24 +01:00
)
do_set_realm_property ( realm , " waiting_period_threshold " , 100000 , acting_user = None )
2021-03-23 17:17:34 +01:00
result = self . common_subscribe_to_streams (
user_profile ,
2021-03-26 08:16:11 +01:00
[ " new_stream5 " ] ,
invite_only = invite_only ,
2021-10-04 09:56:16 +02:00
is_web_public = is_web_public ,
2021-03-23 17:17:34 +01:00
allow_fail = True ,
)
2021-04-07 21:12:40 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2021-03-23 17:17:34 +01:00
2021-03-01 11:33:24 +01:00
do_set_realm_property ( realm , " waiting_period_threshold " , 0 , acting_user = None )
2021-03-26 08:16:11 +01:00
self . common_subscribe_to_streams ( user_profile , [ " new_stream3 " ] , invite_only = invite_only )
2021-03-27 05:48:37 +01:00
def test_user_settings_for_creating_private_streams ( self ) - > None :
self . _test_user_settings_for_creating_streams (
2021-10-04 09:56:16 +02:00
" create_private_stream_policy " ,
invite_only = True ,
is_web_public = False ,
2021-03-27 05:48:37 +01:00
)
def test_user_settings_for_creating_public_streams ( self ) - > None :
self . _test_user_settings_for_creating_streams (
2021-10-04 09:56:16 +02:00
" create_public_stream_policy " ,
invite_only = False ,
is_web_public = False ,
)
def test_user_settings_for_creating_web_public_streams ( self ) - > None :
self . _test_user_settings_for_creating_streams (
" create_web_public_stream_policy " , invite_only = False , is_web_public = True
2021-03-27 05:48:37 +01:00
)
def _test_can_create_streams ( self , stream_policy : str , invite_only : bool ) - > None :
if invite_only :
def validation_func ( user_profile : UserProfile ) - > bool :
user_profile . refresh_from_db ( )
return user_profile . can_create_private_streams ( )
else :
def validation_func ( user_profile : UserProfile ) - > bool :
user_profile . refresh_from_db ( )
return user_profile . can_create_public_streams ( )
self . check_has_permission_policies ( stream_policy , validation_func )
2021-03-23 17:17:34 +01:00
2021-03-27 05:48:37 +01:00
def test_can_create_private_streams ( self ) - > None :
self . _test_can_create_streams ( " create_private_stream_policy " , invite_only = True )
2018-07-30 00:59:45 +02:00
2021-03-27 05:48:37 +01:00
def test_can_create_public_streams ( self ) - > None :
self . _test_can_create_streams ( " create_public_stream_policy " , invite_only = False )
2018-07-30 00:59:45 +02:00
2021-10-04 09:03:01 +02:00
def test_can_create_web_public_streams ( self ) - > None :
def validation_func ( user_profile : UserProfile ) - > bool :
user_profile . refresh_from_db ( )
return user_profile . can_create_web_public_streams ( )
self . check_has_permission_policies ( " create_web_public_stream_policy " , validation_func )
2018-07-30 01:25:13 +02:00
def test_user_settings_for_subscribing_other_users ( self ) - > None :
"""
2019-04-08 19:23:00 +02:00
You can ' t subscribe other people to streams if you are a guest or your account is not old
enough .
2018-07-30 01:25:13 +02:00
"""
2019-04-08 19:23:00 +02:00
user_profile = self . example_user ( " cordelia " )
2020-04-09 19:07:57 +02:00
invitee_user_id = user_profile . id
2019-04-08 19:23:00 +02:00
realm = user_profile . realm
2018-07-30 01:25:13 +02:00
2021-03-01 11:33:24 +01:00
do_set_realm_property (
2021-03-27 05:48:37 +01:00
realm , " create_public_stream_policy " , Realm . POLICY_MEMBERS_ONLY , acting_user = None
2021-03-01 11:33:24 +01:00
)
do_set_realm_property (
realm , " invite_to_stream_policy " , Realm . POLICY_ADMINS_ONLY , acting_user = None
)
2021-03-28 21:47:50 +02:00
do_change_user_role ( self . test_user , UserProfile . ROLE_MODERATOR , acting_user = None )
2019-04-08 19:23:00 +02:00
result = self . common_subscribe_to_streams (
2020-06-17 23:49:33 +02:00
self . test_user ,
2021-02-12 08:20:45 +01:00
[ " stream1 " ] ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
)
2021-04-07 21:18:33 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2019-04-08 19:23:00 +02:00
2021-03-27 05:13:46 +01:00
do_change_user_role ( self . test_user , UserProfile . ROLE_REALM_ADMINISTRATOR , acting_user = None )
2021-03-23 12:53:39 +01:00
self . common_subscribe_to_streams (
self . test_user , [ " stream1 " ] , { " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) }
)
2021-03-28 21:47:50 +02:00
do_set_realm_property (
realm , " invite_to_stream_policy " , Realm . POLICY_MODERATORS_ONLY , acting_user = None
)
do_change_user_role ( self . test_user , UserProfile . ROLE_MEMBER , acting_user = None )
# Make sure that we are checking the permission with a full member,
# as full member is the user just below moderator in the role hierarchy.
self . assertFalse ( self . test_user . is_provisional_member )
result = self . common_subscribe_to_streams (
self . test_user ,
[ " stream2 " ] ,
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
allow_fail = True ,
)
2021-04-07 21:18:33 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2021-03-28 21:47:50 +02:00
do_change_user_role ( self . test_user , UserProfile . ROLE_MODERATOR , acting_user = None )
self . common_subscribe_to_streams (
self . test_user , [ " stream2 " ] , { " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) }
)
self . unsubscribe ( user_profile , " stream2 " )
2021-03-01 11:33:24 +01:00
do_set_realm_property (
realm , " invite_to_stream_policy " , Realm . POLICY_MEMBERS_ONLY , acting_user = None
)
2021-03-27 05:13:46 +01:00
do_change_user_role ( self . test_user , UserProfile . ROLE_GUEST , acting_user = None )
2021-03-23 12:53:39 +01:00
result = self . common_subscribe_to_streams (
self . test_user ,
[ " stream2 " ] ,
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
allow_fail = True ,
)
self . assert_json_error ( result , " Not allowed for guest users " )
2021-03-27 05:13:46 +01:00
do_change_user_role ( self . test_user , UserProfile . ROLE_MEMBER , acting_user = None )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2021-02-12 08:19:30 +01:00
self . test_user ,
2021-02-12 08:20:45 +01:00
[ " stream2 " ] ,
2021-02-12 08:19:30 +01:00
{ " principals " : orjson . dumps ( [ self . test_user . id , invitee_user_id ] ) . decode ( ) } ,
)
2019-04-08 19:23:00 +02:00
self . unsubscribe ( user_profile , " stream2 " )
2021-03-01 11:33:24 +01:00
do_set_realm_property (
realm ,
" invite_to_stream_policy " ,
Realm . POLICY_FULL_MEMBERS_ONLY ,
acting_user = None ,
)
do_set_realm_property ( realm , " waiting_period_threshold " , 100000 , acting_user = None )
2019-04-08 19:23:00 +02:00
result = self . common_subscribe_to_streams (
2020-06-17 23:49:33 +02:00
self . test_user ,
2021-02-12 08:20:45 +01:00
[ " stream2 " ] ,
2020-08-07 01:09:47 +02:00
{ " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) } ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
)
2021-04-07 21:18:33 +02:00
self . assert_json_error ( result , " Insufficient permission " )
2019-04-08 19:23:00 +02:00
2021-03-01 11:33:24 +01:00
do_set_realm_property ( realm , " waiting_period_threshold " , 0 , acting_user = None )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2021-02-12 08:20:45 +01:00
self . test_user , [ " stream2 " ] , { " principals " : orjson . dumps ( [ invitee_user_id ] ) . decode ( ) }
2021-02-12 08:19:30 +01:00
)
2018-07-30 01:25:13 +02:00
def test_can_subscribe_other_users ( self ) - > None :
"""
2019-04-08 19:23:00 +02:00
You can ' t subscribe other people to streams if you are a guest or your account is not old
enough .
2018-07-30 01:25:13 +02:00
"""
2021-04-13 16:01:40 +02:00
def validation_func ( user_profile : UserProfile ) - > bool :
user_profile . refresh_from_db ( )
return user_profile . can_subscribe_other_users ( )
2021-03-23 10:58:28 +01:00
2021-04-13 16:01:40 +02:00
self . check_has_permission_policies ( " invite_to_stream_policy " , validation_func )
2018-07-30 01:25:13 +02:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_invalid_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on a stream whose name is invalid ( as
2014-01-29 22:03:40 +01:00
defined by valid_stream_name in zerver / views . py ) should return a JSON
error .
"""
# currently, the only invalid name is the empty string
invalid_stream_name = " "
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user , [ invalid_stream_name ] , allow_fail = True
)
2022-01-11 21:57:53 +01:00
self . assert_json_error ( result , " Stream name can ' t be empty! " )
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
def assert_adding_subscriptions_for_principal (
self ,
invitee_data : Union [ str , int ] ,
invitee_realm : Realm ,
streams : List [ str ] ,
2022-03-04 22:28:37 +01:00
policy_name : str ,
2021-02-12 08:19:30 +01:00
invite_only : bool = False ,
) - > None :
2014-01-29 22:03:40 +01:00
"""
2015-11-30 21:39:40 +01:00
Calling POST / json / users / me / subscriptions on behalf of another principal ( for
2014-01-29 22:03:40 +01:00
whom you have permission to add subscriptions ) should successfully add
those subscriptions and send a message to the subscribee notifying
them .
"""
2020-04-09 19:07:57 +02:00
if isinstance ( invitee_data , str ) :
other_profile = get_user ( invitee_data , invitee_realm )
else :
other_profile = get_user_profile_by_id_in_realm ( invitee_data , invitee_realm )
2020-03-09 21:41:26 +01:00
current_streams = self . get_streams ( other_profile )
2014-01-29 22:03:40 +01:00
self . assertIsInstance ( other_profile , UserProfile )
self . assertNotEqual ( len ( current_streams ) , 0 ) # necessary for full test coverage
self . assertNotEqual ( len ( streams ) , 0 ) # necessary for full test coverage
streams_to_sub = streams [ : 1 ] # just add one, to make the message easier to check
streams_to_sub . extend ( current_streams )
2021-02-12 08:19:30 +01:00
self . helper_check_subs_before_and_after_add (
streams_to_sub ,
{ " principals " : orjson . dumps ( [ invitee_data ] ) . decode ( ) } ,
streams [ : 1 ] ,
current_streams ,
other_profile . email ,
streams_to_sub ,
invitee_realm ,
invite_only = invite_only ,
)
2017-04-27 00:03:21 +02:00
# verify that a welcome message was sent to the stream
2016-04-13 23:59:08 +02:00
msg = self . get_last_message ( )
2017-04-27 00:03:21 +02:00
self . assertEqual ( msg . recipient . type , msg . recipient . STREAM )
2021-02-12 08:20:45 +01:00
self . assertEqual ( msg . topic_name ( ) , " stream events " )
2019-07-11 18:32:38 +02:00
self . assertEqual ( msg . sender . email , settings . NOTIFICATION_BOT )
2021-02-12 08:19:30 +01:00
self . assertIn (
2022-03-04 22:28:37 +01:00
f " ** { policy_name } ** stream created by @_** { self . test_user . full_name } | { self . test_user . id } **. **Description:** \n "
" ```` quote " ,
msg . content ,
2021-02-12 08:19:30 +01:00
)
2017-04-27 00:03:21 +02:00
2017-11-05 10:51:25 +01:00
def test_multi_user_subscription ( self ) - > None :
2017-10-07 16:00:39 +02:00
user1 = self . example_user ( " cordelia " )
user2 = self . example_user ( " iago " )
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2021-02-12 08:20:45 +01:00
streams_to_sub = [ " multi_user_stream " ]
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2017-11-30 08:43:12 +01:00
flush_per_request_caches ( )
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 5 ) :
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2014-01-29 22:03:40 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2017-01-24 06:34:26 +01:00
)
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 36 )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
for ev in [ x for x in events if x [ " event " ] [ " type " ] not in ( " message " , " stream " ) ] :
if ev [ " event " ] [ " op " ] == " add " :
2016-07-12 23:57:16 +02:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
set ( ev [ " event " ] [ " subscriptions " ] [ 0 ] [ " subscribers " ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id } ,
2016-07-12 23:57:16 +02:00
)
else :
# Check "peer_add" events for streams users were
# never subscribed to, in order for the neversubscribed
# structure to stay up-to-date.
2021-02-12 08:20:45 +01:00
self . assertEqual ( ev [ " event " ] [ " op " ] , " peer_add " )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream = get_stream ( " multi_user_stream " , realm )
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 2 )
2014-01-29 22:03:40 +01:00
# Now add ourselves
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2017-01-24 07:06:13 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ self . test_user . id ] ) . decode ( ) ) ,
2014-01-29 22:03:40 +01:00
)
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 12 )
2014-01-29 22:03:40 +01:00
add_event , add_peer_event = events
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_event [ " event " ] [ " op " ] , " add " )
self . assertEqual ( add_event [ " users " ] , [ get_user ( self . test_email , self . test_realm ) . id ] )
2014-01-29 22:03:40 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
set ( add_event [ " event " ] [ " subscriptions " ] [ 0 ] [ " subscribers " ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id , self . test_user . id } ,
2014-01-29 22:03:40 +01:00
)
2021-02-12 08:20:45 +01:00
self . assertNotIn ( self . example_user ( " polonius " ) . id , add_peer_event [ " users " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( add_peer_event [ " users " ] , 12 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_peer_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_peer_event [ " event " ] [ " op " ] , " peer_add " )
self . assertEqual ( add_peer_event [ " event " ] [ " user_ids " ] , [ self . user_profile . id ] )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
stream = get_stream ( " multi_user_stream " , realm )
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 3 )
2014-01-29 22:03:40 +01:00
2016-10-20 00:50:09 +02:00
# Finally, add othello.
2014-01-29 22:03:40 +01:00
events = [ ]
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " othello " )
2017-05-07 21:25:59 +02:00
email3 = user_profile . email
2017-10-07 16:00:39 +02:00
user3 = user_profile
2017-05-23 20:57:59 +02:00
realm3 = user_profile . realm
2021-02-12 08:20:45 +01:00
stream = get_stream ( " multi_user_stream " , realm )
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-04-02 18:33:28 +02:00
bulk_add_subscriptions ( realm , [ stream ] , [ user_profile ] , acting_user = None )
2014-01-29 22:03:40 +01:00
add_event , add_peer_event = events
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_event [ " event " ] [ " op " ] , " add " )
self . assertEqual ( add_event [ " users " ] , [ get_user ( email3 , realm3 ) . id ] )
2014-01-29 22:03:40 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
set ( add_event [ " event " ] [ " subscriptions " ] [ 0 ] [ " subscribers " ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user1 . id , user2 . id , user3 . id , self . test_user . id } ,
2014-01-29 22:03:40 +01:00
)
2016-10-20 00:50:09 +02:00
# We don't send a peer_add event to othello
2021-02-12 08:20:45 +01:00
self . assertNotIn ( user_profile . id , add_peer_event [ " users " ] )
self . assertNotIn ( self . example_user ( " polonius " ) . id , add_peer_event [ " users " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( add_peer_event [ " users " ] , 12 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_peer_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_peer_event [ " event " ] [ " op " ] , " peer_add " )
self . assertEqual ( add_peer_event [ " event " ] [ " user_ids " ] , [ user_profile . id ] )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_private_stream_subscription ( self ) - > None :
2017-01-29 01:21:31 +01:00
realm = get_realm ( " zulip " )
# Create a private stream with Hamlet subscribed
stream_name = " private "
2021-04-02 18:11:45 +02:00
stream = ensure_stream ( realm , stream_name , invite_only = True , acting_user = None )
2017-01-29 01:21:31 +01:00
2021-02-12 08:20:45 +01:00
existing_user_profile = self . example_user ( " hamlet " )
2021-04-02 18:33:28 +02:00
bulk_add_subscriptions ( realm , [ stream ] , [ existing_user_profile ] , acting_user = None )
2017-01-29 01:21:31 +01:00
# Now subscribe Cordelia to the stream, capturing events
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " cordelia " )
2017-01-29 01:21:31 +01:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 3 ) :
2021-04-02 18:33:28 +02:00
bulk_add_subscriptions ( realm , [ stream ] , [ user_profile ] , acting_user = None )
2017-01-29 01:21:31 +01:00
create_event , add_event , add_peer_event = events
2021-02-12 08:20:45 +01:00
self . assertEqual ( create_event [ " event " ] [ " type " ] , " stream " )
self . assertEqual ( create_event [ " event " ] [ " op " ] , " create " )
self . assertEqual ( create_event [ " users " ] , [ user_profile . id ] )
self . assertEqual ( create_event [ " event " ] [ " streams " ] [ 0 ] [ " name " ] , stream_name )
2017-01-29 01:21:31 +01:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_event [ " event " ] [ " op " ] , " add " )
self . assertEqual ( add_event [ " users " ] , [ user_profile . id ] )
2017-01-29 01:21:31 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
set ( add_event [ " event " ] [ " subscriptions " ] [ 0 ] [ " subscribers " ] ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ user_profile . id , existing_user_profile . id } ,
2017-01-29 01:21:31 +01:00
)
2018-02-14 17:59:01 +01:00
# We don't send a peer_add event to othello, but we do send peer_add event to
# all realm admins.
2021-02-12 08:20:45 +01:00
self . assertNotIn ( user_profile . id , add_peer_event [ " users " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( add_peer_event [ " users " ] , 3 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_peer_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_peer_event [ " event " ] [ " op " ] , " peer_add " )
self . assertEqual ( add_peer_event [ " event " ] [ " user_ids " ] , [ user_profile . id ] )
2017-01-29 01:21:31 +01:00
2018-03-16 10:57:17 +01:00
# Do not send stream creation event to realm admin users
# even if realm admin is subscribed to stream cause realm admin already get
# private stream creation event on stream creation.
2021-04-02 18:11:45 +02:00
new_stream = ensure_stream ( realm , " private stream " , invite_only = True , acting_user = None )
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 2 ) :
2021-04-02 18:33:28 +02:00
bulk_add_subscriptions (
realm , [ new_stream ] , [ self . example_user ( " iago " ) ] , acting_user = None
)
2018-03-16 10:57:17 +01:00
2020-10-13 17:56:18 +02:00
# Note that since iago is an admin, he won't get a stream/create
# event here.
self . assert_length ( events , 2 )
add_event , add_peer_event = events
2018-03-16 10:57:17 +01:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_event [ " event " ] [ " op " ] , " add " )
self . assertEqual ( add_event [ " users " ] , [ self . example_user ( " iago " ) . id ] )
2018-03-16 10:57:17 +01:00
2021-05-17 05:41:32 +02:00
self . assert_length ( add_peer_event [ " users " ] , 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( add_peer_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_peer_event [ " event " ] [ " op " ] , " peer_add " )
self . assertEqual ( add_peer_event [ " event " ] [ " user_ids " ] , [ self . example_user ( " iago " ) . id ] )
2020-05-17 18:46:14 +02:00
2020-02-04 21:50:55 +01:00
def test_subscribe_to_stream_post_policy_admins_stream ( self ) - > None :
2019-01-24 09:16:35 +01:00
"""
Members can subscribe to streams where only admins can post
"""
member = self . example_user ( " AARON " )
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " stream1 " )
2021-12-15 01:04:35 +01:00
do_change_stream_post_policy ( stream , Stream . STREAM_POST_POLICY_ADMINS , acting_user = member )
2020-05-15 00:33:24 +02:00
result = self . common_subscribe_to_streams ( member , [ " stream1 " ] )
2019-01-24 09:16:35 +01:00
self . assert_json_success ( result )
2020-05-15 00:33:24 +02:00
json = result . json ( )
self . assertEqual ( json [ " subscribed " ] , { member . email : [ " stream1 " ] } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
2020-02-04 21:50:55 +01:00
def test_subscribe_to_stream_post_policy_restrict_new_members_stream ( self ) - > None :
"""
2020-05-15 00:33:24 +02:00
New members can subscribe to streams where they can not post
2020-02-04 21:50:55 +01:00
"""
2021-02-12 08:20:45 +01:00
new_member_email = self . nonreg_email ( " test " )
2020-02-04 21:50:55 +01:00
self . register ( new_member_email , " test " )
2021-02-12 08:20:45 +01:00
new_member = self . nonreg_user ( " test " )
2020-02-04 21:50:55 +01:00
2021-03-01 11:33:24 +01:00
do_set_realm_property ( new_member . realm , " waiting_period_threshold " , 10 , acting_user = None )
2021-02-24 20:39:28 +01:00
self . assertTrue ( new_member . is_provisional_member )
2020-05-15 00:33:24 +02:00
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " stream1 " )
2021-12-15 01:04:35 +01:00
do_change_stream_post_policy (
stream , Stream . STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS , acting_user = new_member
)
2020-05-15 00:33:24 +02:00
result = self . common_subscribe_to_streams ( new_member , [ " stream1 " ] )
self . assert_json_success ( result )
json = result . json ( )
self . assertEqual ( json [ " subscribed " ] , { new_member . email : [ " stream1 " ] } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
2019-01-24 09:16:35 +01:00
2021-03-29 16:01:39 +02:00
def test_subscribe_to_stream_post_policy_moderators_stream ( self ) - > None :
"""
Members can subscribe to streams where only admins and moderators can post
"""
member = self . example_user ( " AARON " )
stream = self . make_stream ( " stream1 " )
# Make sure that we are testing this with full member which is just below the moderator
# in the role hierarchy.
self . assertFalse ( member . is_provisional_member )
2021-12-15 01:04:35 +01:00
do_change_stream_post_policy (
stream , Stream . STREAM_POST_POLICY_MODERATORS , acting_user = member
)
2021-03-29 16:01:39 +02:00
result = self . common_subscribe_to_streams ( member , [ " stream1 " ] )
self . assert_json_success ( result )
json = result . json ( )
self . assertEqual ( json [ " subscribed " ] , { member . email : [ " stream1 " ] } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
2018-05-04 19:14:29 +02:00
def test_guest_user_subscribe ( self ) - > None :
2018-05-02 17:00:06 +02:00
""" Guest users cannot subscribe themselves to anything """
guest_user = self . example_user ( " polonius " )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( guest_user , [ " Denmark " ] , allow_fail = True )
2018-05-04 19:14:29 +02:00
self . assert_json_error ( result , " Not allowed for guest users " )
2018-05-02 17:00:06 +02:00
2018-05-04 19:14:29 +02:00
# Verify the internal checks also block guest users.
stream = get_stream ( " Denmark " , guest_user . realm )
2021-02-12 08:19:30 +01:00
self . assertEqual ( filter_stream_authorization ( guest_user , [ stream ] ) , ( [ ] , [ stream ] ) )
2018-05-04 19:14:29 +02:00
2021-02-12 08:20:45 +01:00
stream = self . make_stream ( " private_stream " , invite_only = True )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( guest_user , [ " private_stream " ] , allow_fail = True )
2018-05-04 19:14:29 +02:00
self . assert_json_error ( result , " Not allowed for guest users " )
2021-02-12 08:19:30 +01:00
self . assertEqual ( filter_stream_authorization ( guest_user , [ stream ] ) , ( [ ] , [ stream ] ) )
2018-05-02 17:00:06 +02:00
2021-02-12 08:20:45 +01:00
web_public_stream = self . make_stream ( " web_public_stream " , is_web_public = True )
public_stream = self . make_stream ( " public_stream " , invite_only = False )
private_stream = self . make_stream ( " private_stream2 " , invite_only = True )
2020-07-24 05:30:58 +02:00
# This test should be added as soon as the subscription endpoint allows
2022-01-29 00:54:13 +01:00
# guest users to subscribe to web-public streams. Although they are already
2020-07-24 05:30:58 +02:00
# authorized, the decorator in "add_subscriptions_backend" still needs to be
# deleted.
#
# result = self.common_subscribe_to_streams(guest_user, ['web_public_stream'],
# is_web_public=True, allow_fail=True)
# self.assert_json_success(result)
streams_to_sub = [ web_public_stream , public_stream , private_stream ]
2021-02-12 08:19:30 +01:00
self . assertEqual (
filter_stream_authorization ( guest_user , streams_to_sub ) ,
( [ web_public_stream ] , [ public_stream , private_stream ] ) ,
)
2020-07-24 05:30:58 +02:00
2017-11-05 10:51:25 +01:00
def test_users_getting_add_peer_event ( self ) - > None :
2016-07-22 23:30:47 +02:00
"""
Check users getting add_peer_event is correct
"""
2021-02-12 08:20:45 +01:00
streams_to_sub = [ " multi_user_stream " ]
othello = self . example_user ( " othello " )
cordelia = self . example_user ( " cordelia " )
iago = self . example_user ( " iago " )
2020-04-09 19:07:57 +02:00
orig_user_ids_to_subscribe = [ self . test_user . id , othello . id ]
2016-07-22 23:30:47 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2016-07-22 23:30:47 +02:00
streams_to_sub ,
2021-02-12 08:19:30 +01:00
dict ( principals = orjson . dumps ( orig_user_ids_to_subscribe ) . decode ( ) ) ,
)
2016-07-22 23:30:47 +02:00
2020-04-09 19:07:57 +02:00
new_user_ids_to_subscribe = [ iago . id , cordelia . id ]
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 5 ) :
2016-07-22 23:30:47 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2017-01-24 07:06:13 +01:00
streams_to_sub ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( new_user_ids_to_subscribe ) . decode ( ) ) ,
2016-07-22 23:30:47 +02:00
)
2020-10-22 14:14:02 +02:00
add_peer_events = [ event for event in events if event [ " event " ] . get ( " op " ) == " peer_add " ]
( add_peer_event , ) = add_peer_events
self . assertEqual ( add_peer_event [ " event " ] [ " type " ] , " subscription " )
self . assertEqual ( add_peer_event [ " event " ] [ " op " ] , " peer_add " )
event_sent_to_ids = add_peer_event [ " users " ]
for user_id in new_user_ids_to_subscribe :
# Make sure new users subscribed to stream is not in
# peer_add event recipient list
self . assertNotIn ( user_id , event_sent_to_ids )
for old_user in orig_user_ids_to_subscribe :
# Check non new users are in peer_add event recipient list.
self . assertIn ( old_user , event_sent_to_ids )
2016-07-22 23:30:47 +02:00
2017-11-05 10:51:25 +01:00
def test_users_getting_remove_peer_event ( self ) - > None :
2016-10-20 20:12:39 +02:00
"""
Check users getting add_peer_event is correct
"""
2017-08-25 06:01:29 +02:00
user1 = self . example_user ( " othello " )
user2 = self . example_user ( " cordelia " )
user3 = self . example_user ( " hamlet " )
user4 = self . example_user ( " iago " )
2018-02-14 17:59:01 +01:00
user5 = self . example_user ( " AARON " )
2020-10-22 14:14:02 +02:00
guest = self . example_user ( " polonius " )
2016-10-20 20:12:39 +02:00
2021-12-24 14:29:40 +01:00
realm = user1 . realm
2021-02-12 08:20:45 +01:00
stream1 = self . make_stream ( " stream1 " )
stream2 = self . make_stream ( " stream2 " )
stream3 = self . make_stream ( " stream3 " )
private = self . make_stream ( " private_stream " , invite_only = True )
2016-10-20 20:12:39 +02:00
2021-02-12 08:20:45 +01:00
self . subscribe ( user1 , " stream1 " )
self . subscribe ( user2 , " stream1 " )
self . subscribe ( user3 , " stream1 " )
2016-10-20 20:12:39 +02:00
2021-02-12 08:20:45 +01:00
self . subscribe ( user2 , " stream2 " )
self . subscribe ( user2 , " stream3 " )
2016-10-20 20:12:39 +02:00
2021-02-12 08:20:45 +01:00
self . subscribe ( user1 , " private_stream " )
self . subscribe ( user2 , " private_stream " )
self . subscribe ( user3 , " private_stream " )
2016-10-20 20:12:39 +02:00
2021-05-28 10:47:43 +02:00
# Apart from 3 peer-remove events and 2 unsubscribe event, because `bulk_remove_subscriptions`
# also marks are read messages in those streams as read, so emits 8 `message_flags` events too
# (for each of the notification bot messages).
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 13 ) :
2020-10-15 15:31:20 +02:00
with queries_captured ( ) as query_count :
with cache_tries_captured ( ) as cache_count :
bulk_remove_subscriptions (
2021-12-24 14:29:40 +01:00
realm ,
2020-10-15 15:31:20 +02:00
[ user1 , user2 ] ,
[ stream1 , stream2 , stream3 , private ] ,
2021-04-02 18:48:08 +02:00
acting_user = None ,
2020-10-15 15:31:20 +02:00
)
2021-08-14 02:28:52 +02:00
self . assert_length ( query_count , 27 )
self . assert_length ( cache_count , 3 )
2016-10-20 20:12:39 +02:00
2021-02-12 08:20:45 +01:00
peer_events = [ e for e in events if e [ " event " ] . get ( " op " ) == " peer_remove " ]
2016-10-20 20:12:39 +02:00
2020-10-22 14:14:02 +02:00
# We only care about a subset of users when we inspect
# peer_remove events.
our_user_ids = {
user1 . id ,
user2 . id ,
user3 . id ,
user4 . id ,
user5 . id ,
guest . id ,
}
2016-10-20 20:12:39 +02:00
2020-10-22 14:14:02 +02:00
notifications = [ ]
for event in peer_events :
2020-10-26 13:16:10 +01:00
stream_ids = event [ " event " ] [ " stream_ids " ]
2021-02-12 08:19:30 +01:00
stream_names = sorted ( Stream . objects . get ( id = stream_id ) . name for stream_id in stream_ids )
2021-02-12 08:20:45 +01:00
removed_user_ids = set ( event [ " event " ] [ " user_ids " ] )
notified_user_ids = set ( event [ " users " ] ) & our_user_ids
notifications . append ( ( " , " . join ( stream_names ) , removed_user_ids , notified_user_ids ) )
2016-10-20 20:12:39 +02:00
2020-10-22 14:14:02 +02:00
notifications . sort ( key = lambda tup : tup [ 0 ] )
2016-10-20 20:12:39 +02:00
2020-10-22 14:14:02 +02:00
self . assertEqual (
notifications ,
[
( " private_stream " , { user1 . id , user2 . id } , { user3 . id , user4 . id } ) ,
( " stream1 " , { user1 . id , user2 . id } , { user3 . id , user4 . id , user5 . id } ) ,
2020-10-26 13:16:10 +01:00
( " stream2,stream3 " , { user2 . id } , { user1 . id , user3 . id , user4 . id , user5 . id } ) ,
2020-10-22 14:14:02 +02:00
] ,
)
2016-10-20 20:12:39 +02:00
2017-11-05 10:51:25 +01:00
def test_bulk_subscribe_MIT ( self ) - > None :
2021-02-12 08:20:45 +01:00
mit_user = self . mit_user ( " starnine " )
2018-08-21 19:20:31 +02:00
2017-03-04 09:19:37 +01:00
realm = get_realm ( " zephyr " )
2020-06-10 06:41:04 +02:00
stream_names = [ f " stream_ { i } " for i in range ( 40 ) ]
2021-02-12 08:19:30 +01:00
streams = [ self . make_stream ( stream_name , realm = realm ) for stream_name in stream_names ]
2018-08-21 19:20:31 +02:00
for stream in streams :
stream . is_in_zephyr_realm = True
stream . save ( )
2014-01-29 22:03:40 +01:00
2021-05-27 15:53:22 +02:00
# Make sure Zephyr mirroring realms such as MIT do not get
# any tornado subscription events
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
events : List [ Mapping [ str , Any ] ] = [ ]
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 0 ) :
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
mit_user ,
2018-08-21 19:20:31 +02:00
stream_names ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ mit_user . id ] ) . decode ( ) ) ,
2017-08-26 00:58:13 +02:00
subdomain = " zephyr " ,
2020-06-17 23:49:33 +02:00
allow_fail = True ,
2014-01-29 22:03:40 +01:00
)
2020-12-19 20:04:57 +01:00
self . assert_length ( queries , 4 )
2014-01-29 22:03:40 +01:00
2021-05-27 15:53:22 +02:00
with self . tornado_redirected_to_list ( events , expected_num_events = 0 ) :
2018-08-21 19:20:54 +02:00
bulk_remove_subscriptions (
2021-12-24 14:29:40 +01:00
realm ,
2018-08-21 19:20:54 +02:00
users = [ mit_user ] ,
streams = streams ,
2021-04-02 18:48:08 +02:00
acting_user = None ,
2018-08-21 19:20:54 +02:00
)
2017-11-05 10:51:25 +01:00
def test_bulk_subscribe_many ( self ) - > None :
2016-06-04 19:50:38 +02:00
2014-01-29 22:03:40 +01:00
# Create a whole bunch of streams
2020-10-13 21:35:46 +02:00
streams = [ f " stream_ { i } " for i in range ( 30 ) ]
2016-10-21 23:22:25 +02:00
for stream_name in streams :
self . make_stream ( stream_name )
2014-01-29 22:03:40 +01:00
2021-02-12 08:20:45 +01:00
desdemona = self . example_user ( " desdemona " )
2020-10-13 21:44:07 +02:00
test_users = [
desdemona ,
2021-02-12 08:20:45 +01:00
self . example_user ( " cordelia " ) ,
self . example_user ( " hamlet " ) ,
self . example_user ( " othello " ) ,
self . example_user ( " iago " ) ,
self . example_user ( " prospero " ) ,
2020-10-13 21:44:07 +02:00
]
# Subscribe out test users to some streams, including
2020-10-13 21:35:46 +02:00
# some that we may soon subscribe them to.
for stream_name in [ " Verona " , " Denmark " , * streams [ : 10 ] ] :
2020-10-13 21:44:07 +02:00
for user in test_users :
self . subscribe ( user , stream_name )
2020-10-14 12:55:22 +02:00
# Now unsubscribe users from the first few streams,
# so they have to reactivate.
for stream_name in streams [ : 5 ] :
for user in test_users :
self . unsubscribe ( user , stream_name )
2020-10-13 21:44:07 +02:00
test_user_ids = [ user . id for user in test_users ]
2020-10-13 21:35:46 +02:00
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2020-10-15 14:59:13 +02:00
with cache_tries_captured ( ) as cache_tries :
2021-02-12 08:20:45 +01:00
with mock . patch ( " zerver.views.streams.send_messages_for_new_subscribers " ) :
2020-10-15 14:59:13 +02:00
self . common_subscribe_to_streams (
desdemona ,
streams ,
dict ( principals = orjson . dumps ( test_user_ids ) . decode ( ) ) ,
)
2020-10-13 21:44:07 +02:00
# The only known O(N) behavior here is that we call
# principal_to_user_profile for each of our users.
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 19 )
2020-10-13 12:53:23 +02:00
self . assert_length ( cache_tries , 4 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_for_principal ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to streams .
"""
2020-04-09 19:07:57 +02:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
invite_streams = self . make_random_stream_names ( current_streams )
2022-03-04 22:28:37 +01:00
self . assert_adding_subscriptions_for_principal (
invitee . id , invitee . realm , invite_streams , policy_name = " Public "
)
2020-04-09 19:07:57 +02:00
def test_subscriptions_add_for_principal_legacy_emails ( self ) - > None :
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
2014-01-29 22:03:40 +01:00
invite_streams = self . make_random_stream_names ( current_streams )
2022-03-04 22:28:37 +01:00
self . assert_adding_subscriptions_for_principal (
invitee . email , invitee . realm , invite_streams , policy_name = " Public "
)
2014-01-29 22:03:40 +01:00
2018-05-21 03:54:42 +02:00
def test_subscriptions_add_for_principal_deactivated ( self ) - > None :
"""
You can ' t subscribe deactivated people to streams.
"""
target_profile = self . example_user ( " cordelia " )
2020-03-12 14:17:25 +01:00
post_data = dict (
2020-08-07 01:09:47 +02:00
principals = orjson . dumps ( [ target_profile . id ] ) . decode ( ) ,
2020-03-12 14:17:25 +01:00
)
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( self . test_user , " Verona " , post_data )
2018-05-21 03:54:42 +02:00
2021-03-27 06:02:12 +01:00
do_deactivate_user ( target_profile , acting_user = None )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user , " Denmark " , post_data , allow_fail = True
)
2020-03-12 14:17:25 +01:00
self . assert_json_error (
result ,
2020-06-09 00:25:09 +02:00
f " User not authorized to execute queries on behalf of ' { target_profile . id } ' " ,
2021-02-12 08:19:30 +01:00
status_code = 403 ,
)
2018-05-21 03:54:42 +02:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_add_for_principal_invite_only ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to invite only streams .
"""
2020-03-09 21:41:26 +01:00
invitee = self . example_user ( " iago " )
current_streams = self . get_streams ( invitee )
2014-01-29 22:03:40 +01:00
invite_streams = self . make_random_stream_names ( current_streams )
2021-02-12 08:19:30 +01:00
self . assert_adding_subscriptions_for_principal (
2022-03-04 22:28:37 +01:00
invitee . id ,
invitee . realm ,
invite_streams ,
invite_only = True ,
policy_name = " Private, protected history " ,
2021-02-12 08:19:30 +01:00
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_non_ascii_subscription_for_principal ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
You can subscribe other people to streams even if they containing
non - ASCII characters .
"""
2021-02-12 08:20:45 +01:00
iago = self . example_user ( " iago " )
2022-03-04 22:28:37 +01:00
self . assert_adding_subscriptions_for_principal (
iago . id , get_realm ( " zulip " ) , [ " hümbüǵ " ] , policy_name = " Public "
)
2014-01-29 22:03:40 +01:00
2020-04-09 19:07:57 +02:00
def test_subscription_add_invalid_principal_legacy_emails ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling subscribe on behalf of a principal that does not exist
should return a JSON error .
"""
invalid_principal = " rosencrantz-and-guildenstern@zulip.com "
2017-05-23 20:57:59 +02:00
invalid_principal_realm = get_realm ( " zulip " )
2014-01-29 22:03:40 +01:00
# verify that invalid_principal actually doesn't exist
with self . assertRaises ( UserProfile . DoesNotExist ) :
2017-05-23 20:57:59 +02:00
get_user ( invalid_principal , invalid_principal_realm )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user ,
self . streams ,
{ " principals " : orjson . dumps ( [ invalid_principal ] ) . decode ( ) } ,
allow_fail = True ,
)
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { invalid_principal } ' " ,
status_code = 403 ,
)
2014-01-29 22:03:40 +01:00
2020-04-09 19:07:57 +02:00
def test_subscription_add_invalid_principal ( self ) - > None :
invalid_principal = 999
invalid_principal_realm = get_realm ( " zulip " )
with self . assertRaises ( UserProfile . DoesNotExist ) :
get_user_profile_by_id_in_realm ( invalid_principal , invalid_principal_realm )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user ,
self . streams ,
{ " principals " : orjson . dumps ( [ invalid_principal ] ) . decode ( ) } ,
allow_fail = True ,
)
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { invalid_principal } ' " ,
status_code = 403 ,
)
2020-04-09 19:07:57 +02:00
2017-11-05 10:51:25 +01:00
def test_subscription_add_principal_other_realm ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling subscribe on behalf of a principal in another realm
should return a JSON error .
"""
2021-02-12 08:20:45 +01:00
profile = self . mit_user ( " starnine " )
2020-04-09 19:07:57 +02:00
principal = profile . id
2014-01-29 22:03:40 +01:00
# verify that principal exists (thus, the reason for the error is the cross-realming)
self . assertIsInstance ( profile , UserProfile )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
self . test_user ,
self . streams ,
{ " principals " : orjson . dumps ( [ principal ] ) . decode ( ) } ,
allow_fail = True ,
)
2020-06-14 02:57:50 +02:00
self . assert_json_error (
result ,
f " User not authorized to execute queries on behalf of ' { principal } ' " ,
status_code = 403 ,
)
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
def helper_check_subs_before_and_after_remove (
self ,
subscriptions : List [ str ] ,
json_dict : Dict [ str , Any ] ,
email : str ,
new_subs : List [ str ] ,
realm : Realm ,
) - > None :
2014-01-29 22:03:40 +01:00
"""
Check result of removing subscriptions .
Unlike adding subscriptions , you can only remove subscriptions
for yourself , so the result format is different .
{ " msg " : " " ,
" removed " : [ " Denmark " , " Scotland " , " Verona " ] ,
2019-10-13 05:30:34 +02:00
" not_removed " : [ " Rome " ] , " result " : " success " }
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:19:30 +01:00
result = self . client_delete (
" /json/users/me/subscriptions " , { " subscriptions " : orjson . dumps ( subscriptions ) . decode ( ) }
)
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2017-09-27 10:11:59 +02:00
for key , val in json_dict . items ( ) :
2020-09-02 02:50:08 +02:00
# we don't care about the order of the items
self . assertEqual ( sorted ( val ) , sorted ( json [ key ] ) )
2020-03-09 21:41:26 +01:00
user = get_user ( email , realm )
new_streams = self . get_streams ( user )
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( new_streams ) , sorted ( new_subs ) )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_remove ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-12-23 02:37:10 +01:00
Calling DELETE / json / users / me / subscriptions should successfully remove streams ,
2014-01-29 22:03:40 +01:00
and should determine which were removed vs which weren ' t subscribed to.
We cannot randomly generate stream names because the remove code
verifies whether streams exist .
"""
2017-03-05 08:57:51 +01:00
self . assertGreaterEqual ( len ( self . streams ) , 2 )
2014-01-29 22:03:40 +01:00
streams_to_remove = self . streams [ 1 : ]
not_subbed = [ ]
2021-04-27 16:56:45 +02:00
for stream in Stream . objects . filter ( realm = get_realm ( " zulip " ) ) :
2016-05-10 01:55:43 +02:00
if stream . name not in self . streams :
2014-01-29 22:03:40 +01:00
not_subbed . append ( stream . name )
random . shuffle ( not_subbed )
self . assertNotEqual ( len ( not_subbed ) , 0 ) # necessary for full test coverage
try_to_remove = not_subbed [ : 3 ] # attempt to remove up to 3 streams not already subbed to
streams_to_remove . extend ( try_to_remove )
2021-02-12 08:19:30 +01:00
self . helper_check_subs_before_and_after_remove (
streams_to_remove ,
{ " removed " : self . streams [ 1 : ] , " not_removed " : try_to_remove } ,
self . test_email ,
[ self . streams [ 0 ] ] ,
self . test_realm ,
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_remove_fake_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-12-23 02:37:10 +01:00
Calling DELETE / json / users / me / subscriptions on a stream that doesn ' t exist
2014-01-29 22:03:40 +01:00
should return a JSON error .
"""
random_streams = self . make_random_stream_names ( self . streams )
self . assertNotEqual ( len ( random_streams ) , 0 ) # necessary for full test coverage
2020-09-02 02:50:08 +02:00
# pick only one fake stream, to make checking the error message easy
streams_to_remove = random_streams [ : 1 ]
2021-02-12 08:19:30 +01:00
result = self . client_delete (
" /json/users/me/subscriptions " ,
{ " subscriptions " : orjson . dumps ( streams_to_remove ) . decode ( ) } ,
)
2020-06-10 06:41:04 +02:00
self . assert_json_error ( result , f " Stream(s) ( { random_streams [ 0 ] } ) do not exist " )
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
def helper_subscriptions_exists (
self , stream : str , expect_success : bool , subscribed : bool
) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-07-25 02:15:40 +02:00
Call / json / subscriptions / exists on a stream and expect a certain result .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:19:30 +01:00
result = self . client_post ( " /json/subscriptions/exists " , { " stream " : stream } )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2017-07-25 02:15:40 +02:00
if expect_success :
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
else :
2016-12-16 02:01:34 +01:00
self . assertEqual ( result . status_code , 404 )
2016-06-04 19:50:38 +02:00
if subscribed :
2014-01-29 22:03:40 +01:00
self . assertIn ( " subscribed " , json )
self . assertEqual ( json [ " subscribed " ] , subscribed )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_exists_subbed ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream to which you are subbed
should return that it exists and that you are subbed .
"""
self . assertNotEqual ( len ( self . streams ) , 0 ) # necessary for full test coverage
self . helper_subscriptions_exists ( self . streams [ 0 ] , True , True )
2017-11-05 10:51:25 +01:00
def test_successful_subscriptions_exists_not_subbed ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream to which you are not
subbed should return that it exists and that you are not subbed .
"""
2017-07-12 12:32:14 +02:00
all_stream_names = [ stream . name for stream in Stream . objects . filter ( realm = self . test_realm ) ]
2014-01-29 22:03:40 +01:00
streams_not_subbed = list ( set ( all_stream_names ) - set ( self . streams ) )
self . assertNotEqual ( len ( streams_not_subbed ) , 0 ) # necessary for full test coverage
self . helper_subscriptions_exists ( streams_not_subbed [ 0 ] , True , False )
2017-11-05 10:51:25 +01:00
def test_subscriptions_does_not_exist ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream that doesn ' t exist should
return that it doesn ' t exist.
"""
random_streams = self . make_random_stream_names ( self . streams )
self . assertNotEqual ( len ( random_streams ) , 0 ) # necessary for full test coverage
2016-06-04 19:50:38 +02:00
self . helper_subscriptions_exists ( random_streams [ 0 ] , False , False )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriptions_exist_invalid_name ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Calling / json / subscriptions / exist on a stream whose name is invalid ( as
defined by valid_stream_name in zerver / views . py ) should return a JSON
error .
"""
# currently, the only invalid stream name is the empty string
invalid_stream_name = " "
2021-02-12 08:19:30 +01:00
result = self . client_post ( " /json/subscriptions/exists " , { " stream " : invalid_stream_name } )
2022-01-11 21:57:53 +01:00
self . assert_json_error ( result , " Stream name can ' t be empty! " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_existing_subscriptions_autosubscription ( self ) - > None :
2016-06-21 15:54:18 +02:00
"""
Call / json / subscriptions / exist on an existing stream and autosubscribe to it .
"""
2017-01-12 01:41:16 +01:00
stream_name = " new_public_stream "
2021-02-12 08:20:45 +01:00
cordelia = self . example_user ( " cordelia " )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( cordelia , [ stream_name ] , invite_only = False )
2021-02-12 08:19:30 +01:00
result = self . client_post (
" /json/subscriptions/exists " , { " stream " : stream_name , " autosubscribe " : " false " }
)
2016-06-21 15:54:18 +02:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertFalse ( result . json ( ) [ " subscribed " ] )
2017-01-12 01:41:16 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_post (
" /json/subscriptions/exists " , { " stream " : stream_name , " autosubscribe " : " true " }
)
2017-01-12 01:41:16 +01:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertTrue ( result . json ( ) [ " subscribed " ] )
2016-06-21 15:54:18 +02:00
2017-11-05 10:51:25 +01:00
def test_existing_subscriptions_autosubscription_private_stream ( self ) - > None :
2017-01-23 05:22:40 +01:00
""" Call /json/subscriptions/exist on an existing private stream with
autosubscribe should fail .
"""
stream_name = " Saxony "
2021-02-12 08:20:45 +01:00
cordelia = self . example_user ( " cordelia " )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( cordelia , [ stream_name ] , invite_only = True )
2017-07-12 12:32:14 +02:00
stream = get_stream ( stream_name , self . test_realm )
2017-01-23 05:22:40 +01:00
2021-02-12 08:19:30 +01:00
result = self . client_post (
" /json/subscriptions/exists " , { " stream " : stream_name , " autosubscribe " : " true " }
)
2017-01-12 01:41:16 +01:00
# We can't see invite-only streams here
self . assert_json_error ( result , " Invalid stream name ' Saxony ' " , status_code = 404 )
# Importantly, we are not now subscribed
2017-10-29 15:40:07 +01:00
self . assertEqual ( num_subscribers_for_stream_id ( stream . id ) , 1 )
2017-01-12 01:41:16 +01:00
# A user who is subscribed still sees the stream exists
2021-02-12 08:20:45 +01:00
self . login ( " cordelia " )
2021-02-12 08:19:30 +01:00
result = self . client_post (
" /json/subscriptions/exists " , { " stream " : stream_name , " autosubscribe " : " false " }
)
2017-01-23 05:22:40 +01:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscribed " , result . json ( ) )
self . assertTrue ( result . json ( ) [ " subscribed " ] )
2017-01-23 05:22:40 +01:00
2018-05-11 01:39:38 +02:00
def get_subscription ( self , user_profile : UserProfile , stream_name : str ) - > Subscription :
2017-07-12 12:32:14 +02:00
stream = get_stream ( stream_name , self . test_realm )
2014-01-29 22:03:40 +01:00
return Subscription . objects . get (
user_profile = user_profile ,
recipient__type = Recipient . STREAM ,
recipient__type_id = stream . id ,
)
2019-02-13 10:22:16 +01:00
def test_subscriptions_add_notification_default_none ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2017-08-17 16:55:32 +02:00
When creating a subscription , the desktop , push , and audible notification
2019-02-13 10:22:16 +01:00
settings for that stream are none . A value of None means to use the values
inherited from the global notification settings .
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " iago " )
2020-04-09 19:07:57 +02:00
invitee_user_id = user_profile . id
2017-05-23 20:57:59 +02:00
invitee_realm = user_profile . realm
2014-02-05 22:56:30 +01:00
user_profile . enable_stream_desktop_notifications = True
2017-08-17 16:55:32 +02:00
user_profile . enable_stream_push_notifications = True
2019-06-11 08:47:49 +02:00
user_profile . enable_stream_audible_notifications = True
2017-11-21 04:35:26 +01:00
user_profile . enable_stream_email_notifications = True
2014-01-29 22:03:40 +01:00
user_profile . save ( )
2020-03-09 21:41:26 +01:00
current_stream = self . get_streams ( user_profile ) [ 0 ]
2016-06-04 19:50:38 +02:00
invite_streams = self . make_random_stream_names ( [ current_stream ] )
2021-02-12 08:19:30 +01:00
self . assert_adding_subscriptions_for_principal (
2022-03-04 22:28:37 +01:00
invitee_user_id , invitee_realm , invite_streams , policy_name = " Public "
2021-02-12 08:19:30 +01:00
)
2014-01-29 22:03:40 +01:00
subscription = self . get_subscription ( user_profile , invite_streams [ 0 ] )
2016-10-04 01:05:44 +02:00
2021-02-12 08:20:45 +01:00
with mock . patch ( " zerver.models.Recipient.__str__ " , return_value = " recip " ) :
2021-02-12 08:19:30 +01:00
self . assertEqual (
str ( subscription ) ,
2021-02-12 08:20:45 +01:00
" <Subscription: "
f " <UserProfile: { user_profile . email } { user_profile . realm } > -> recip> " ,
2021-02-12 08:19:30 +01:00
)
2016-10-04 01:05:44 +02:00
2019-02-13 10:22:16 +01:00
self . assertIsNone ( subscription . desktop_notifications )
self . assertIsNone ( subscription . push_notifications )
self . assertIsNone ( subscription . audible_notifications )
self . assertIsNone ( subscription . email_notifications )
2014-01-29 22:03:40 +01:00
2017-11-13 21:24:51 +01:00
def test_mark_messages_as_unread_on_unsubscribe ( self ) - > None :
realm = get_realm ( " zulip " )
user = self . example_user ( " iago " )
random_user = self . example_user ( " hamlet " )
2021-04-02 18:11:45 +02:00
stream1 = ensure_stream ( realm , " stream1 " , invite_only = False , acting_user = None )
stream2 = ensure_stream ( realm , " stream2 " , invite_only = False , acting_user = None )
private = ensure_stream ( realm , " private_stream " , invite_only = True , acting_user = None )
2017-11-13 21:24:51 +01:00
self . subscribe ( user , " stream1 " )
self . subscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
self . subscribe ( random_user , " stream1 " )
self . subscribe ( random_user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( random_user , " private_stream " )
2017-11-13 21:24:51 +01:00
2020-03-07 11:43:05 +01:00
self . send_stream_message ( random_user , " stream1 " , " test " , " test " )
self . send_stream_message ( random_user , " stream2 " , " test " , " test " )
self . send_stream_message ( random_user , " private_stream " , " test " , " test " )
2017-11-13 21:24:51 +01:00
2022-03-07 15:12:24 +01:00
def get_unread_stream_data ( ) - > List [ UnreadStreamInfo ] :
2017-11-13 21:24:51 +01:00
raw_unread_data = get_raw_unread_data ( user )
aggregated_data = aggregate_unread_data ( raw_unread_data )
2021-02-12 08:20:45 +01:00
return aggregated_data [ " streams " ]
2017-11-13 21:24:51 +01:00
result = get_unread_stream_data ( )
2017-11-29 23:35:33 +01:00
self . assert_length ( result , 3 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 0 ] [ " stream_id " ] , stream1 . id )
self . assertEqual ( result [ 1 ] [ " stream_id " ] , stream2 . id )
self . assertEqual ( result [ 2 ] [ " stream_id " ] , private . id )
2017-11-13 21:24:51 +01:00
# Unsubscribing should mark all the messages in stream2 as read
self . unsubscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . unsubscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
self . subscribe ( user , " stream2 " )
2017-11-29 23:35:33 +01:00
self . subscribe ( user , " private_stream " )
2017-11-13 21:24:51 +01:00
result = get_unread_stream_data ( )
self . assert_length ( result , 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result [ 0 ] [ " stream_id " ] , stream1 . id )
2014-01-29 22:03:40 +01:00
2018-05-15 17:33:16 +02:00
def test_gather_subscriptions_excludes_deactivated_streams ( self ) - > None :
"""
Check that gather_subscriptions_helper does not include deactivated streams in its
results .
"""
realm = get_realm ( " zulip " )
admin_user = self . example_user ( " iago " )
non_admin_user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( admin_user )
2018-05-15 17:33:16 +02:00
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
for stream_name in [ " stream1 " , " stream2 " , " stream3 " ] :
2018-05-15 17:33:16 +02:00
self . make_stream ( stream_name , realm = realm , invite_only = False )
self . subscribe ( admin_user , stream_name )
self . subscribe ( non_admin_user , stream_name )
self . subscribe ( self . example_user ( " othello " ) , stream_name )
2021-03-31 16:15:24 +02:00
def archive_stream ( stream_name : str ) - > None :
2018-05-15 17:33:16 +02:00
stream_id = get_stream ( stream_name , realm ) . id
2021-02-12 08:20:45 +01:00
result = self . client_delete ( f " /json/streams/ { stream_id } " )
2018-05-15 17:33:16 +02:00
self . assert_json_success ( result )
# Deleted/deactivated stream should not be returned in the helper results
admin_before_delete = gather_subscriptions_helper ( admin_user )
non_admin_before_delete = gather_subscriptions_helper ( non_admin_user )
# Delete our stream
2021-03-31 16:15:24 +02:00
archive_stream ( " stream1 " )
2018-05-15 17:33:16 +02:00
# Get subs after delete
admin_after_delete = gather_subscriptions_helper ( admin_user )
non_admin_after_delete = gather_subscriptions_helper ( non_admin_user )
# Compare results - should be 1 stream less
self . assertTrue (
2021-01-14 21:44:56 +01:00
len ( admin_before_delete . subscriptions ) == len ( admin_after_delete . subscriptions ) + 1 ,
2021-02-12 08:20:45 +01:00
" Expected exactly 1 less stream from gather_subscriptions_helper " ,
2021-02-12 08:19:30 +01:00
)
2018-05-15 17:33:16 +02:00
self . assertTrue (
2021-02-12 08:19:30 +01:00
len ( non_admin_before_delete . subscriptions )
== len ( non_admin_after_delete . subscriptions ) + 1 ,
2021-02-12 08:20:45 +01:00
" Expected exactly 1 less stream from gather_subscriptions_helper " ,
2021-02-12 08:19:30 +01:00
)
2018-05-15 17:33:16 +02:00
2018-05-16 03:36:18 +02:00
def test_validate_user_access_to_subscribers_helper ( self ) - > None :
"""
Ensure the validate_user_access_to_subscribers_helper is properly raising
ValidationError on missing user , user not - in - realm .
"""
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " othello " )
realm_name = " no_othello_allowed "
realm = do_create_realm ( realm_name , " Everyone but Othello is allowed " )
2018-05-16 03:36:18 +02:00
stream_dict = {
2021-02-12 08:20:45 +01:00
" name " : " publicstream " ,
" description " : " Public stream with public history " ,
" realm_id " : realm . id ,
2018-05-16 03:36:18 +02:00
}
# For this test to work, othello can't be in the no_othello_here realm
2021-02-12 08:19:30 +01:00
self . assertNotEqual (
2021-02-12 08:20:45 +01:00
user_profile . realm . id , realm . id , " Expected othello user to not be in this realm. "
2021-02-12 08:19:30 +01:00
)
2018-05-16 03:36:18 +02:00
# This should result in missing user
with self . assertRaises ( ValidationError ) :
2020-06-23 00:33:46 +02:00
validate_user_access_to_subscribers_helper ( None , stream_dict , lambda user_profile : True )
2018-05-16 03:36:18 +02:00
# This should result in user not in realm
with self . assertRaises ( ValidationError ) :
2021-02-12 08:19:30 +01:00
validate_user_access_to_subscribers_helper (
user_profile , stream_dict , lambda user_profile : True
)
2018-05-15 17:33:16 +02:00
2018-08-17 03:33:16 +02:00
def test_subscriptions_query_count ( self ) - > None :
"""
Test database query count when creating stream with api / v1 / users / me / subscriptions .
"""
user1 = self . example_user ( " cordelia " )
user2 = self . example_user ( " iago " )
new_streams = [
2021-02-12 08:20:45 +01:00
" query_count_stream_1 " ,
" query_count_stream_2 " ,
" query_count_stream_3 " ,
2018-08-17 03:33:16 +02:00
]
# Test creating a public stream when realm does not have a notification stream.
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 0 ] ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2018-08-17 03:33:16 +02:00
)
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 36 )
2018-08-17 03:33:16 +02:00
# Test creating private stream.
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 1 ] ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ) ,
2018-08-17 03:33:16 +02:00
invite_only = True ,
)
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 35 )
2018-08-17 03:33:16 +02:00
# Test creating a public stream with announce when realm has a notification stream.
notifications_stream = get_stream ( self . streams [ 0 ] , self . test_realm )
self . test_realm . notifications_stream_id = notifications_stream . id
self . test_realm . save ( )
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . test_user ,
2018-08-17 03:33:16 +02:00
[ new_streams [ 2 ] ] ,
dict (
2021-02-12 08:20:45 +01:00
announce = " true " ,
2020-08-07 01:09:47 +02:00
principals = orjson . dumps ( [ user1 . id , user2 . id ] ) . decode ( ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
) ,
2018-08-17 03:33:16 +02:00
)
2021-12-24 17:35:59 +01:00
self . assert_length ( queries , 44 )
2018-08-17 03:33:16 +02:00
2021-02-12 08:19:30 +01:00
2020-03-01 13:05:05 +01:00
class GetStreamsTest ( ZulipTestCase ) :
2019-02-28 22:20:24 +01:00
def test_streams_api_for_bot_owners ( self ) - > None :
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
test_bot = self . create_test_bot ( " foo " , hamlet )
2019-02-28 22:20:24 +01:00
assert test_bot is not None
2021-02-12 08:20:45 +01:00
realm = get_realm ( " zulip " )
2020-03-06 18:40:46 +01:00
self . login_user ( hamlet )
2019-02-28 22:20:24 +01:00
# Check it correctly lists the bot owner's subs with
# include_owner_subscribed=true
2020-07-30 00:43:58 +02:00
filters = dict (
2021-02-12 08:19:30 +01:00
include_owner_subscribed = " true " ,
include_public = " false " ,
include_subscribed = " false " ,
2020-07-30 00:43:58 +02:00
)
2020-09-13 00:11:30 +02:00
result = self . api_get ( test_bot , " /api/v1/streams " , filters )
2020-03-10 11:48:26 +01:00
owner_subs = self . api_get ( hamlet , " /api/v1/users/me/subscriptions " )
2019-02-28 22:20:24 +01:00
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
self . assert_json_success ( owner_subs )
2020-08-07 01:09:47 +02:00
owner_subs_json = orjson . loads ( owner_subs . content )
2019-02-28 22:20:24 +01:00
2021-02-12 08:19:30 +01:00
self . assertEqual (
sorted ( s [ " name " ] for s in json [ " streams " ] ) ,
sorted ( s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ) ,
)
2019-02-28 22:20:24 +01:00
# Check it correctly lists the bot owner's subs and the
# bot's subs
2021-02-12 08:20:45 +01:00
self . subscribe ( test_bot , " Scotland " )
2020-07-30 00:43:58 +02:00
filters = dict (
2021-02-12 08:19:30 +01:00
include_owner_subscribed = " true " ,
include_public = " false " ,
include_subscribed = " true " ,
2020-07-30 00:43:58 +02:00
)
2020-09-13 00:11:30 +02:00
result = self . api_get ( test_bot , " /api/v1/streams " , filters )
2019-02-28 22:20:24 +01:00
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
2020-09-02 06:20:26 +02:00
actual = sorted ( s [ " name " ] for s in json [ " streams " ] )
2019-02-28 22:20:24 +01:00
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
2021-02-12 08:20:45 +01:00
expected . append ( " Scotland " )
2019-02-28 22:20:24 +01:00
expected . sort ( )
self . assertEqual ( actual , expected )
# Check it correctly lists the bot owner's subs + all public streams
2021-02-12 08:20:45 +01:00
self . make_stream ( " private_stream " , realm = realm , invite_only = True )
self . subscribe ( test_bot , " private_stream " )
2019-02-28 22:20:24 +01:00
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
2020-09-13 00:11:30 +02:00
" /api/v1/streams " ,
2021-02-12 08:19:30 +01:00
{
" include_owner_subscribed " : " true " ,
" include_public " : " true " ,
" include_subscribed " : " false " ,
} ,
2019-02-28 22:20:24 +01:00
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
2020-09-02 06:20:26 +02:00
actual = sorted ( s [ " name " ] for s in json [ " streams " ] )
2019-02-28 22:20:24 +01:00
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
2021-02-12 08:20:45 +01:00
expected . extend ( [ " Rome " , " Venice " , " Scotland " ] )
2019-02-28 22:20:24 +01:00
expected . sort ( )
self . assertEqual ( actual , expected )
# Check it correctly lists the bot owner's subs + all public streams +
# the bot's subs
result = self . api_get (
2020-03-10 11:48:26 +01:00
test_bot ,
2020-09-13 00:11:30 +02:00
" /api/v1/streams " ,
2021-02-12 08:19:30 +01:00
{
" include_owner_subscribed " : " true " ,
" include_public " : " true " ,
" include_subscribed " : " true " ,
} ,
2019-02-28 22:20:24 +01:00
)
self . assert_json_success ( result )
json = result . json ( )
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
2020-09-02 06:20:26 +02:00
actual = sorted ( s [ " name " ] for s in json [ " streams " ] )
2019-02-28 22:20:24 +01:00
expected = [ s [ " name " ] for s in owner_subs_json [ " subscriptions " ] ]
2021-02-12 08:20:45 +01:00
expected . extend ( [ " Rome " , " Venice " , " Scotland " , " private_stream " ] )
2019-02-28 22:20:24 +01:00
expected . sort ( )
self . assertEqual ( actual , expected )
2020-03-01 13:05:05 +01:00
def test_all_active_streams_api ( self ) - > None :
2020-09-13 00:11:30 +02:00
url = " /api/v1/streams "
data = { " include_all_active " : " true " }
2020-03-01 13:05:05 +01:00
# Check non-superuser can't use include_all_active
2021-02-12 08:20:45 +01:00
normal_user = self . example_user ( " cordelia " )
2020-09-13 00:11:30 +02:00
result = self . api_get ( normal_user , url , data )
2020-03-01 13:05:05 +01:00
self . assertEqual ( result . status_code , 400 )
2021-04-02 23:31:17 +02:00
# Realm admin users can see all active streams.
2021-02-12 08:20:45 +01:00
admin_user = self . example_user ( " iago " )
2020-03-01 13:05:05 +01:00
self . assertTrue ( admin_user . is_realm_admin )
2021-04-02 23:31:17 +02:00
result = self . api_get ( admin_user , url , data )
2020-03-01 13:05:05 +01:00
self . assert_json_success ( result )
json = result . json ( )
2021-02-12 08:20:45 +01:00
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
2020-03-01 13:05:05 +01:00
2021-02-12 08:20:45 +01:00
stream_names = { s [ " name " ] for s in json [ " streams " ] }
2020-03-01 13:05:05 +01:00
self . assertEqual (
stream_names ,
2021-04-29 17:22:48 +02:00
{ " Venice " , " Denmark " , " Scotland " , " Verona " , " Rome " , " core team " } ,
2020-03-01 13:05:05 +01:00
)
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_public_streams_api ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2016-04-17 14:47:43 +02:00
Ensure that the query we use to get public streams successfully returns
a list of streams
2014-01-29 22:03:40 +01:00
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
realm = get_realm ( " zulip " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# Check it correctly lists the user's subs with include_public=false
2020-09-13 00:11:30 +02:00
result = self . api_get ( user , " /api/v1/streams " , { " include_public " : " false " } )
2020-03-10 11:48:26 +01:00
result2 = self . api_get ( user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertIn ( " streams " , json )
self . assertIsInstance ( json [ " streams " ] , list )
self . assert_json_success ( result2 )
2020-08-07 01:09:47 +02:00
json2 = orjson . loads ( result2 . content )
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
self . assertEqual (
sorted ( s [ " name " ] for s in json [ " streams " ] ) ,
sorted ( s [ " name " ] for s in json2 [ " subscriptions " ] ) ,
)
2014-01-29 22:03:40 +01:00
# Check it correctly lists all public streams with include_subscribed=false
2021-02-12 08:19:30 +01:00
filters = dict ( include_public = " true " , include_subscribed = " false " )
2020-09-13 00:11:30 +02:00
result = self . api_get ( user , " /api/v1/streams " , filters )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2021-04-29 17:22:48 +02:00
all_streams = [
stream . name for stream in Stream . objects . filter ( realm = realm , invite_only = False )
]
2021-02-12 08:19:30 +01:00
self . assertEqual ( sorted ( s [ " name " ] for s in json [ " streams " ] ) , sorted ( all_streams ) )
2014-01-29 22:03:40 +01:00
2017-01-03 18:31:43 +01:00
class StreamIdTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_get_stream_id ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
stream = gather_subscriptions ( user ) [ 0 ] [ 0 ]
2021-02-12 08:20:45 +01:00
result = self . client_get ( " /json/get_stream_id " , { " stream " : stream [ " name " ] } )
2017-01-03 18:31:43 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
self . assertEqual ( result . json ( ) [ " stream_id " ] , stream [ " stream_id " ] )
2017-01-03 18:31:43 +01:00
2017-11-05 10:51:25 +01:00
def test_get_stream_id_wrong_name ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-09-13 00:11:30 +02:00
result = self . client_get ( " /json/get_stream_id " , { " stream " : " wrongname " } )
2020-04-09 21:51:58 +02:00
self . assert_json_error ( result , " Invalid stream name ' wrongname ' " )
2017-01-03 18:31:43 +01:00
2021-02-12 08:19:30 +01:00
2016-08-23 02:08:42 +02:00
class InviteOnlyStreamTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_must_be_subbed_to_send ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
If you try to send a message to an invite - only stream to which
you aren ' t subscribed, you ' ll get a 400.
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2014-01-29 22:03:40 +01:00
# Create Saxony as an invite-only stream.
self . assert_json_success (
2021-02-12 08:19:30 +01:00
self . common_subscribe_to_streams ( user , [ " Saxony " ] , invite_only = True )
)
2014-01-29 22:03:40 +01:00
2020-03-07 11:43:05 +01:00
cordelia = self . example_user ( " cordelia " )
2014-01-29 22:03:40 +01:00
with self . assertRaises ( JsonableError ) :
2020-03-07 11:43:05 +01:00
self . send_stream_message ( cordelia , " Saxony " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_list_respects_invite_only_bit ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Make sure that / api / v1 / users / me / subscriptions properly returns
the invite - only bit for streams that are invite - only
"""
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2020-03-09 21:41:26 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams ( user , [ " Saxony " ] , invite_only = True )
self . common_subscribe_to_streams ( user , [ " Normandy " ] , invite_only = False )
2020-03-10 11:48:26 +01:00
result = self . api_get ( user , " /api/v1/users/me/subscriptions " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-16 09:52:04 +02:00
self . assertIn ( " subscriptions " , result . json ( ) )
for sub in result . json ( ) [ " subscriptions " ] :
2021-02-12 08:20:45 +01:00
if sub [ " name " ] == " Normandy " :
2021-02-12 08:19:30 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
sub [ " invite_only " ] , False , " Normandy was mistakenly marked private "
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
if sub [ " name " ] == " Saxony " :
self . assertEqual ( sub [ " invite_only " ] , True , " Saxony was not properly marked private " )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_inviteonly ( self ) - > None :
2014-01-29 22:03:40 +01:00
# Creating an invite-only stream is allowed
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
othello = self . example_user ( " othello " )
2020-03-12 14:17:25 +01:00
2014-01-29 22:03:40 +01:00
stream_name = " Saxony "
2020-03-12 14:17:25 +01:00
result = self . common_subscribe_to_streams ( hamlet , [ stream_name ] , invite_only = True )
2014-01-29 22:03:40 +01:00
2017-08-17 08:45:20 +02:00
json = result . json ( )
2020-03-12 14:17:25 +01:00
self . assertEqual ( json [ " subscribed " ] , { hamlet . email : [ stream_name ] } )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Subscribing oneself to an invite-only stream is not allowed
2020-03-12 14:17:25 +01:00
self . login_user ( othello )
2020-06-17 23:49:33 +02:00
result = self . common_subscribe_to_streams ( othello , [ stream_name ] , allow_fail = True )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Unable to access stream (Saxony). " )
2014-01-29 22:03:40 +01:00
# authorization_errors_fatal=False works
2020-03-12 14:17:25 +01:00
self . login_user ( othello )
2021-02-12 08:19:30 +01:00
result = self . common_subscribe_to_streams (
othello ,
[ stream_name ] ,
2021-02-12 08:20:45 +01:00
extra_post_data = { " authorization_errors_fatal " : orjson . dumps ( False ) . decode ( ) } ,
2021-02-12 08:19:30 +01:00
)
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " unauthorized " ] , [ stream_name ] )
self . assertEqual ( json [ " subscribed " ] , { } )
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Inviting another user to an invite-only stream is allowed
2020-03-12 14:17:25 +01:00
self . login_user ( hamlet )
2014-01-29 22:03:40 +01:00
result = self . common_subscribe_to_streams (
2021-02-12 08:19:30 +01:00
hamlet ,
[ stream_name ] ,
2021-02-12 08:20:45 +01:00
extra_post_data = { " principals " : orjson . dumps ( [ othello . id ] ) . decode ( ) } ,
2021-02-12 08:19:30 +01:00
)
2017-08-17 08:45:20 +02:00
json = result . json ( )
2020-03-12 14:17:25 +01:00
self . assertEqual ( json [ " subscribed " ] , { othello . email : [ stream_name ] } )
2014-01-29 22:03:40 +01:00
self . assertEqual ( json [ " already_subscribed " ] , { } )
# Make sure both users are subscribed to this stream
2020-03-12 14:17:25 +01:00
stream_id = get_stream ( stream_name , hamlet . realm ) . id
2020-06-13 08:59:37 +02:00
result = self . api_get ( hamlet , f " /api/v1/streams/ { stream_id } /members " )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
json = result . json ( )
2014-01-29 22:03:40 +01:00
2021-07-17 00:29:45 +02:00
self . assertTrue ( othello . id in json [ " subscribers " ] )
self . assertTrue ( hamlet . id in json [ " subscribers " ] )
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
class GetSubscribersTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2021-02-12 08:20:45 +01:00
self . user_profile = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( self . user_profile )
2014-01-29 22:03:40 +01:00
2022-03-11 19:32:25 +01:00
def verify_sub_fields ( self , sub_data : SubscriptionInfo ) - > None :
other_fields = {
" email_address " ,
" is_announcement_only " ,
" in_home_view " ,
" stream_id " ,
" stream_weekly_traffic " ,
" subscribers " ,
}
expected_fields = set ( Stream . API_FIELDS ) | set ( Subscription . API_FIELDS ) | other_fields
expected_fields - = { " id " }
for lst in [ sub_data . subscriptions , sub_data . unsubscribed ] :
for sub in lst :
self . assertEqual ( set ( sub ) , expected_fields )
other_fields = {
" is_announcement_only " ,
" stream_id " ,
" stream_weekly_traffic " ,
" subscribers " ,
}
expected_fields = set ( Stream . API_FIELDS ) | other_fields
expected_fields - = { " id " }
for never_sub in sub_data . never_subscribed :
self . assertEqual ( set ( never_sub ) , expected_fields )
2021-12-29 20:10:36 +01:00
def assert_user_got_subscription_notification (
self , user : UserProfile , expected_msg : str
) - > None :
2017-05-16 01:32:50 +02:00
# verify that the user was sent a message informing them about the subscription
2021-12-29 20:10:36 +01:00
realm = user . realm
msg = most_recent_message ( user )
2017-05-16 01:32:50 +02:00
self . assertEqual ( msg . recipient . type , msg . recipient . PERSONAL )
2021-03-08 11:39:48 +01:00
self . assertEqual ( msg . sender_id , self . notification_bot ( realm ) . id )
2017-05-16 01:32:50 +02:00
2018-05-11 01:39:38 +02:00
def non_ws ( s : str ) - > str :
2021-02-12 08:20:45 +01:00
return s . replace ( " \n " , " " ) . replace ( " " , " " )
2017-05-16 01:32:50 +02:00
self . assertEqual ( non_ws ( msg . content ) , non_ws ( expected_msg ) )
2021-02-12 08:19:30 +01:00
def check_well_formed_result (
self , result : Dict [ str , Any ] , stream_name : str , realm : Realm
) - > None :
2014-01-29 22:03:40 +01:00
"""
A successful call to get_subscribers returns the list of subscribers in
the form :
{ " msg " : " " ,
" result " : " success " ,
2021-07-17 00:29:45 +02:00
" subscribers " : [ hamlet_user . id , prospero_user . id ] }
2014-01-29 22:03:40 +01:00
"""
self . assertIn ( " subscribers " , result )
self . assertIsInstance ( result [ " subscribers " ] , list )
2021-02-12 08:19:30 +01:00
true_subscribers = [
2021-07-17 00:29:45 +02:00
user_profile . id for user_profile in self . users_subscribed_to_stream ( stream_name , realm )
2021-02-12 08:19:30 +01:00
]
2016-07-10 20:43:58 +02:00
self . assertEqual ( sorted ( result [ " subscribers " ] ) , sorted ( true_subscribers ) )
2014-01-29 22:03:40 +01:00
2021-02-12 08:19:30 +01:00
def make_subscriber_request (
self , stream_id : int , user : Optional [ UserProfile ] = None
) - > HttpResponse :
2020-03-10 11:48:26 +01:00
if user is None :
user = self . user_profile
2020-06-13 08:59:37 +02:00
return self . api_get ( user , f " /api/v1/streams/ { stream_id } /members " )
2014-01-29 22:03:40 +01:00
2018-05-11 01:39:38 +02:00
def make_successful_subscriber_request ( self , stream_name : str ) - > None :
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
2016-12-30 11:42:59 +01:00
result = self . make_subscriber_request ( stream_id )
2014-01-29 22:03:40 +01:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
self . check_well_formed_result ( result . json ( ) , stream_name , self . user_profile . realm )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_subscriber ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
get_subscribers returns the list of subscribers .
"""
2021-02-12 08:20:45 +01:00
stream_name = gather_subscriptions ( self . user_profile ) [ 0 ] [ 0 ] [ " name " ]
2014-01-29 22:03:40 +01:00
self . make_successful_subscriber_request ( stream_name )
2017-11-05 10:51:25 +01:00
def test_gather_subscriptions ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
gather_subscriptions returns correct results with only 3 queries
2017-05-16 01:32:50 +02:00
( We also use this test to verify subscription notifications to
folks who get subscribed to streams . )
2014-01-29 22:03:40 +01:00
"""
2021-12-29 19:55:30 +01:00
hamlet = self . example_user ( " hamlet " )
2021-12-29 20:10:36 +01:00
cordelia = self . example_user ( " cordelia " )
othello = self . example_user ( " othello " )
polonius = self . example_user ( " polonius " )
2020-06-10 06:41:04 +02:00
streams = [ f " stream_ { i } " for i in range ( 10 ) ]
2016-10-21 23:22:25 +02:00
for stream_name in streams :
self . make_stream ( stream_name )
2020-03-12 14:17:25 +01:00
users_to_subscribe = [
2020-04-09 19:07:57 +02:00
self . user_profile . id ,
2021-12-29 20:10:36 +01:00
othello . id ,
cordelia . id ,
polonius . id ,
2020-03-12 14:17:25 +01:00
]
2021-12-29 20:10:36 +01:00
with queries_captured ( ) as queries :
self . common_subscribe_to_streams (
self . user_profile ,
streams ,
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
)
2021-12-30 15:02:07 +01:00
self . assert_length ( queries , 46 )
2017-05-16 01:32:50 +02:00
2021-12-29 19:55:30 +01:00
msg = f """
@ * * King Hamlet | { hamlet . id } * * subscribed you to the following streams :
2017-05-16 01:32:50 +02:00
* #**stream_0**
* #**stream_1**
* #**stream_2**
* #**stream_3**
* #**stream_4**
* #**stream_5**
* #**stream_6**
* #**stream_7**
* #**stream_8**
* #**stream_9**
2021-02-12 08:20:45 +01:00
"""
2017-05-16 01:32:50 +02:00
2021-12-29 20:10:36 +01:00
for user in [ cordelia , othello , polonius ] :
self . assert_user_got_subscription_notification ( user , msg )
2017-05-16 01:32:50 +02:00
# Subscribe ourself first.
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-05-16 01:32:50 +02:00
[ " stream_invite_only_1 " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( [ self . user_profile . id ] ) . decode ( ) ) ,
2021-02-12 08:19:30 +01:00
invite_only = True ,
)
2017-05-16 01:32:50 +02:00
# Now add in other users, and this should trigger messages
# to notify the user.
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2014-01-29 22:03:40 +01:00
[ " stream_invite_only_1 " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2021-02-12 08:19:30 +01:00
invite_only = True ,
)
2014-01-29 22:03:40 +01:00
2021-12-29 19:55:30 +01:00
msg = f """
@ * * King Hamlet | { hamlet . id } * * subscribed you to the stream #**stream_invite_only_1**.
2021-02-12 08:20:45 +01:00
"""
2021-12-29 20:10:36 +01:00
for user in [ cordelia , othello , polonius ] :
self . assert_user_got_subscription_notification ( user , msg )
2017-05-16 01:32:50 +02:00
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2019-08-08 21:58:38 +02:00
subscribed_streams , _ = gather_subscriptions (
2021-02-12 08:19:30 +01:00
self . user_profile , include_subscribers = True
)
2021-07-13 19:39:37 +02:00
self . assertGreaterEqual ( len ( subscribed_streams ) , 11 )
2019-08-08 21:58:38 +02:00
for sub in subscribed_streams :
2014-01-29 22:03:40 +01:00
if not sub [ " name " ] . startswith ( " stream_ " ) :
continue
2021-07-13 19:39:37 +02:00
self . assert_length ( sub [ " subscribers " ] , len ( users_to_subscribe ) )
2021-07-17 00:29:45 +02:00
self . assert_length ( queries , 4 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_never_subscribed_streams ( self ) - > None :
2016-07-12 23:57:16 +02:00
"""
2020-07-23 23:18:32 +02:00
Check never_subscribed streams are fetched correctly and not include invite_only streams ,
or invite_only and public streams to guest users .
2016-07-12 23:57:16 +02:00
"""
2017-01-04 05:30:48 +01:00
realm = get_realm ( " zulip " )
2017-08-22 16:51:07 +02:00
users_to_subscribe = [
2020-04-09 19:07:57 +02:00
self . example_user ( " othello " ) . id ,
self . example_user ( " cordelia " ) . id ,
2017-08-22 16:51:07 +02:00
]
public_streams = [
2021-02-12 08:20:45 +01:00
" test_stream_public_1 " ,
" test_stream_public_2 " ,
" test_stream_public_3 " ,
" test_stream_public_4 " ,
" test_stream_public_5 " ,
2017-08-22 16:51:07 +02:00
]
private_streams = [
2021-02-12 08:20:45 +01:00
" test_stream_invite_only_1 " ,
" test_stream_invite_only_2 " ,
2017-08-22 16:51:07 +02:00
]
2020-07-23 23:18:32 +02:00
web_public_streams = [
2021-02-12 08:20:45 +01:00
" test_stream_web_public_1 " ,
" test_stream_web_public_2 " ,
2020-07-23 23:18:32 +02:00
]
2017-11-05 10:51:25 +01:00
def create_public_streams ( ) - > None :
2017-08-22 16:51:07 +02:00
for stream_name in public_streams :
self . make_stream ( stream_name , realm = realm )
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-08-22 16:51:07 +02:00
public_streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2017-08-22 16:51:07 +02:00
)
create_public_streams ( )
2020-07-23 23:18:32 +02:00
def create_web_public_streams ( ) - > None :
for stream_name in web_public_streams :
self . make_stream ( stream_name , realm = realm , is_web_public = True )
ret = self . common_subscribe_to_streams (
self . user_profile ,
web_public_streams ,
2021-02-12 08:19:30 +01:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2020-07-23 23:18:32 +02:00
)
self . assert_json_success ( ret )
create_web_public_streams ( )
2017-11-05 10:51:25 +01:00
def create_private_streams ( ) - > None :
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
self . user_profile ,
2017-08-22 16:51:07 +02:00
private_streams ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
invite_only = True ,
2017-08-22 16:51:07 +02:00
)
create_private_streams ( )
2022-03-11 19:32:25 +01:00
def get_never_subscribed ( ) - > List [ NeverSubscribedStreamDict ] :
2017-08-22 16:51:07 +02:00
with queries_captured ( ) as queries :
sub_data = gather_subscriptions_helper ( self . user_profile )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( sub_data )
2021-01-14 21:44:56 +01:00
never_subscribed = sub_data . never_subscribed
2020-10-18 14:51:25 +02:00
self . assert_length ( queries , 4 )
2017-08-22 16:51:07 +02:00
# Ignore old streams.
2021-02-12 08:20:45 +01:00
never_subscribed = [ dct for dct in never_subscribed if dct [ " name " ] . startswith ( " test_ " ) ]
2017-08-22 16:51:07 +02:00
return never_subscribed
never_subscribed = get_never_subscribed ( )
2016-07-12 23:57:16 +02:00
# Invite only stream should not be there in never_subscribed streams
2021-05-17 05:41:32 +02:00
self . assert_length ( never_subscribed , len ( public_streams ) + len ( web_public_streams ) )
2016-07-12 23:57:16 +02:00
for stream_dict in never_subscribed :
2021-02-12 08:20:45 +01:00
name = stream_dict [ " name " ]
self . assertFalse ( " invite_only " in name )
2021-07-13 19:39:37 +02:00
self . assert_length ( stream_dict [ " subscribers " ] , len ( users_to_subscribe ) )
2016-07-12 23:57:16 +02:00
2018-03-16 12:28:19 +01:00
# Send private stream subscribers to all realm admins.
2017-11-05 10:51:25 +01:00
def test_admin_case ( ) - > None :
2019-10-05 02:35:07 +02:00
self . user_profile . role = UserProfile . ROLE_REALM_ADMINISTRATOR
2018-03-16 12:28:19 +01:00
# Test realm admins can get never subscribed private stream's subscribers.
2017-08-22 16:18:35 +02:00
never_subscribed = get_never_subscribed ( )
self . assertEqual (
len ( never_subscribed ) ,
2020-07-23 23:18:32 +02:00
len ( public_streams ) + len ( private_streams ) + len ( web_public_streams ) ,
2017-08-22 16:18:35 +02:00
)
for stream_dict in never_subscribed :
2021-07-13 19:39:37 +02:00
self . assert_length ( stream_dict [ " subscribers " ] , len ( users_to_subscribe ) )
2017-08-22 16:18:35 +02:00
test_admin_case ( )
2020-07-23 23:18:32 +02:00
def test_guest_user_case ( ) - > None :
self . user_profile . role = UserProfile . ROLE_GUEST
2021-01-14 21:44:56 +01:00
helper_result = gather_subscriptions_helper ( self . user_profile )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( helper_result )
2021-01-14 21:44:56 +01:00
sub = helper_result . subscriptions
unsub = helper_result . unsubscribed
never_sub = helper_result . never_subscribed
2020-07-23 23:18:32 +02:00
# It's +1 because of the stream Rome.
2021-05-17 05:41:32 +02:00
self . assert_length ( never_sub , len ( web_public_streams ) + 1 )
2020-07-23 23:18:32 +02:00
sub_ids = list ( map ( lambda stream : stream [ " stream_id " ] , sub ) )
unsub_ids = list ( map ( lambda stream : stream [ " stream_id " ] , unsub ) )
for stream_dict in never_sub :
self . assertTrue ( stream_dict [ " is_web_public " ] )
self . assertTrue ( stream_dict [ " stream_id " ] not in sub_ids )
self . assertTrue ( stream_dict [ " stream_id " ] not in unsub_ids )
2020-07-24 00:45:26 +02:00
# The Rome stream has is_web_public=True, with default
docs: Add missing space to compound verbs “log in”, “set up”, etc.
Noun: backup, checkout, cleanup, login, logout, setup, shutdown, signup,
timeout.
Verb: back up, check out, clean up, log in, log out, set up, shut
down, sign up, time out.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2021-04-25 23:05:38 +02:00
# subscribers not set up by this test, so we do the
2020-07-24 00:45:26 +02:00
# following check only for the streams we created.
if stream_dict [ " name " ] in web_public_streams :
2021-05-17 05:41:32 +02:00
self . assert_length ( stream_dict [ " subscribers " ] , len ( users_to_subscribe ) )
2020-07-24 00:45:26 +02:00
2020-07-23 23:18:32 +02:00
test_guest_user_case ( )
2018-06-02 09:25:39 +02:00
def test_gather_subscribed_streams_for_guest_user ( self ) - > None :
guest_user = self . example_user ( " polonius " )
stream_name_sub = " public_stream_1 "
self . make_stream ( stream_name_sub , realm = get_realm ( " zulip " ) )
self . subscribe ( guest_user , stream_name_sub )
stream_name_unsub = " public_stream_2 "
self . make_stream ( stream_name_unsub , realm = get_realm ( " zulip " ) )
self . subscribe ( guest_user , stream_name_unsub )
self . unsubscribe ( guest_user , stream_name_unsub )
stream_name_never_sub = " public_stream_3 "
self . make_stream ( stream_name_never_sub , realm = get_realm ( " zulip " ) )
normal_user = self . example_user ( " aaron " )
self . subscribe ( normal_user , stream_name_sub )
self . subscribe ( normal_user , stream_name_unsub )
self . subscribe ( normal_user , stream_name_unsub )
2021-01-14 21:44:56 +01:00
helper_result = gather_subscriptions_helper ( guest_user )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( helper_result )
2021-01-14 21:44:56 +01:00
subs = helper_result . subscriptions
neversubs = helper_result . never_subscribed
2018-06-02 09:25:39 +02:00
# Guest users get info about subscribed public stream's subscribers
expected_stream_exists = False
for sub in subs :
if sub [ " name " ] == stream_name_sub :
expected_stream_exists = True
2021-05-17 05:41:32 +02:00
self . assert_length ( sub [ " subscribers " ] , 2 )
2018-06-02 09:25:39 +02:00
self . assertTrue ( expected_stream_exists )
2020-07-23 23:18:32 +02:00
# Guest user only get data about never subscribed streams if they're
# web-public.
for stream in neversubs :
2021-02-12 08:20:45 +01:00
self . assertTrue ( stream [ " is_web_public " ] )
2018-06-02 09:25:39 +02:00
2020-07-23 23:18:32 +02:00
# Guest user only get data about never subscribed web-public streams
2021-05-17 05:41:32 +02:00
self . assert_length ( neversubs , 1 )
2018-06-02 09:25:39 +02:00
2022-03-11 19:32:25 +01:00
def test_api_fields_present ( self ) - > None :
user = self . example_user ( " cordelia " )
sub_data = gather_subscriptions_helper ( user )
subscribed = sub_data . subscriptions
self . assertGreaterEqual ( len ( subscribed ) , 1 )
self . verify_sub_fields ( sub_data )
2018-03-16 12:28:19 +01:00
def test_previously_subscribed_private_streams ( self ) - > None :
admin_user = self . example_user ( " iago " )
non_admin_user = self . example_user ( " cordelia " )
2020-07-23 23:18:32 +02:00
guest_user = self . example_user ( " polonius " )
2018-03-16 12:28:19 +01:00
stream_name = " private_stream "
self . make_stream ( stream_name , realm = get_realm ( " zulip " ) , invite_only = True )
self . subscribe ( admin_user , stream_name )
self . subscribe ( non_admin_user , stream_name )
2020-07-23 23:18:32 +02:00
self . subscribe ( guest_user , stream_name )
2018-03-16 12:28:19 +01:00
self . subscribe ( self . example_user ( " othello " ) , stream_name )
self . unsubscribe ( admin_user , stream_name )
self . unsubscribe ( non_admin_user , stream_name )
2020-07-23 23:18:32 +02:00
self . unsubscribe ( guest_user , stream_name )
2018-03-16 12:28:19 +01:00
2018-06-01 22:17:47 +02:00
# Test admin user gets previously subscribed private stream's subscribers.
2018-03-16 12:28:19 +01:00
sub_data = gather_subscriptions_helper ( admin_user )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( sub_data )
2021-01-14 21:44:56 +01:00
unsubscribed_streams = sub_data . unsubscribed
2021-05-17 05:41:32 +02:00
self . assert_length ( unsubscribed_streams , 1 )
self . assert_length ( unsubscribed_streams [ 0 ] [ " subscribers " ] , 1 )
2018-03-16 12:28:19 +01:00
2018-06-01 22:17:47 +02:00
# Test non admin users cannot get previously subscribed private stream's subscribers.
2018-03-16 12:28:19 +01:00
sub_data = gather_subscriptions_helper ( non_admin_user )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( sub_data )
2021-01-14 21:44:56 +01:00
unsubscribed_streams = sub_data . unsubscribed
2021-05-17 05:41:32 +02:00
self . assert_length ( unsubscribed_streams , 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( unsubscribed_streams [ 0 ] [ " subscribers " ] , [ ] )
2018-03-16 12:28:19 +01:00
2020-07-23 23:18:32 +02:00
sub_data = gather_subscriptions_helper ( guest_user )
2022-03-11 19:32:25 +01:00
self . verify_sub_fields ( sub_data )
2021-01-14 21:44:56 +01:00
unsubscribed_streams = sub_data . unsubscribed
2021-05-17 05:41:32 +02:00
self . assert_length ( unsubscribed_streams , 1 )
2021-02-12 08:20:45 +01:00
self . assertEqual ( unsubscribed_streams [ 0 ] [ " subscribers " ] , [ ] )
2020-07-23 23:18:32 +02:00
2017-11-05 10:51:25 +01:00
def test_gather_subscriptions_mit ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
gather_subscriptions returns correct results with only 3 queries
"""
# Subscribe only ourself because invites are disabled on mit.edu
2021-02-12 08:20:45 +01:00
mit_user_profile = self . mit_user ( " starnine " )
2020-04-09 19:07:57 +02:00
user_id = mit_user_profile . id
users_to_subscribe = [ user_id , self . mit_user ( " espuser " ) . id ]
2014-01-29 22:03:40 +01:00
for email in users_to_subscribe :
2020-03-09 21:41:26 +01:00
stream = self . subscribe ( mit_user_profile , " mit_stream " )
2017-10-08 21:16:51 +02:00
self . assertTrue ( stream . is_in_zephyr_realm )
2014-01-29 22:03:40 +01:00
2020-06-17 23:49:33 +02:00
self . common_subscribe_to_streams (
2020-03-09 21:41:26 +01:00
mit_user_profile ,
2014-01-29 22:03:40 +01:00
[ " mit_invite_only " ] ,
2020-08-07 01:09:47 +02:00
dict ( principals = orjson . dumps ( users_to_subscribe ) . decode ( ) ) ,
2017-08-26 00:58:13 +02:00
invite_only = True ,
2021-02-12 08:19:30 +01:00
subdomain = " zephyr " ,
)
2014-01-29 22:03:40 +01:00
with queries_captured ( ) as queries :
2021-02-12 08:19:30 +01:00
subscribed_streams , _ = gather_subscriptions ( mit_user_profile , include_subscribers = True )
2014-01-29 22:03:40 +01:00
2021-07-13 19:43:29 +02:00
self . assertGreaterEqual ( len ( subscribed_streams ) , 2 )
2019-08-08 21:58:38 +02:00
for sub in subscribed_streams :
2014-01-29 22:03:40 +01:00
if not sub [ " name " ] . startswith ( " mit_ " ) :
2017-03-05 08:07:56 +01:00
raise AssertionError ( " Unexpected stream! " )
2014-01-29 22:03:40 +01:00
if sub [ " name " ] == " mit_invite_only " :
2021-07-13 19:39:37 +02:00
self . assert_length ( sub [ " subscribers " ] , len ( users_to_subscribe ) )
2014-01-29 22:03:40 +01:00
else :
2021-07-13 19:39:37 +02:00
self . assert_length ( sub [ " subscribers " ] , 0 )
2021-07-17 00:29:45 +02:00
self . assert_length ( queries , 4 )
2014-01-29 22:03:40 +01:00
2017-11-05 10:51:25 +01:00
def test_nonsubscriber ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
Even a non - subscriber to a public stream can query a stream ' s membership
with get_subscribers .
"""
# Create a stream for which Hamlet is the only subscriber.
stream_name = " Saxony "
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] )
2020-03-06 18:40:46 +01:00
other_user = self . example_user ( " othello " )
2014-01-29 22:03:40 +01:00
# Fetch the subscriber list as a non-member.
2020-03-06 18:40:46 +01:00
self . login_user ( other_user )
2014-01-29 22:03:40 +01:00
self . make_successful_subscriber_request ( stream_name )
2017-11-05 10:51:25 +01:00
def test_subscriber_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
A subscriber to a private stream can query that stream ' s membership.
"""
stream_name = " Saxony "
2021-02-12 08:19:30 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] , invite_only = True )
2014-01-29 22:03:40 +01:00
self . make_successful_subscriber_request ( stream_name )
2018-02-14 17:59:01 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
# Verify another user can't get the data.
2021-02-12 08:20:45 +01:00
self . login ( " cordelia " )
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2018-02-14 17:59:01 +01:00
# But an organization administrator can
2021-02-12 08:20:45 +01:00
self . login ( " iago " )
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2018-02-14 17:59:01 +01:00
self . assert_json_success ( result )
2017-11-05 10:51:25 +01:00
def test_json_get_subscribers_stream_not_exist ( self ) - > None :
2016-07-16 18:50:41 +02:00
"""
json_get_subscribers also returns the list of subscribers for a stream .
"""
2016-12-30 11:42:59 +01:00
stream_id = 99999999
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2016-07-16 18:50:41 +02:00
2017-11-05 10:51:25 +01:00
def test_json_get_subscribers ( self ) - > None :
2016-06-21 18:20:15 +02:00
"""
json_get_subscribers in zerver / views / streams . py
2019-08-08 21:58:38 +02:00
also returns the list of subscribers for a stream , when requested .
2016-06-21 18:20:15 +02:00
"""
2021-02-12 08:20:45 +01:00
stream_name = gather_subscriptions ( self . user_profile ) [ 0 ] [ 0 ] [ " name " ]
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , self . user_profile . realm ) . id
2021-02-12 08:19:30 +01:00
expected_subscribers = gather_subscriptions ( self . user_profile , include_subscribers = True ) [ 0 ] [
0
2021-02-12 08:20:45 +01:00
] [ " subscribers " ]
2020-06-13 08:59:37 +02:00
result = self . client_get ( f " /json/streams/ { stream_id } /members " )
2016-06-21 18:20:15 +02:00
self . assert_json_success ( result )
2017-08-17 08:45:20 +02:00
result_dict = result . json ( )
2021-02-12 08:20:45 +01:00
self . assertIn ( " subscribers " , result_dict )
self . assertIsInstance ( result_dict [ " subscribers " ] , list )
2021-07-17 00:29:45 +02:00
subscribers : List [ int ] = [ ]
2021-02-12 08:20:45 +01:00
for subscriber in result_dict [ " subscribers " ] :
2021-07-17 00:29:45 +02:00
self . assertIsInstance ( subscriber , int )
2016-06-21 18:20:15 +02:00
subscribers . append ( subscriber )
self . assertEqual ( set ( subscribers ) , set ( expected_subscribers ) )
2020-07-24 00:45:26 +02:00
def test_json_get_subscribers_for_guest_user ( self ) - > None :
"""
Guest users should have access to subscribers of web - public streams , even
if they aren ' t subscribed or have never subscribed to that stream.
"""
guest_user = self . example_user ( " polonius " )
2021-01-14 21:44:56 +01:00
never_subscribed = gather_subscriptions_helper ( guest_user , True ) . never_subscribed
2020-07-24 00:45:26 +02:00
# A guest user can only see never subscribed streams that are web-public.
2022-01-29 00:54:13 +01:00
# For Polonius, the only web-public stream that he is not subscribed at
2020-07-24 00:45:26 +02:00
# this point is Rome.
2021-07-13 19:39:37 +02:00
self . assert_length ( never_subscribed , 1 )
2020-07-24 00:45:26 +02:00
2021-02-12 08:20:45 +01:00
web_public_stream_id = never_subscribed [ 0 ] [ " stream_id " ]
2020-07-24 00:45:26 +02:00
result = self . client_get ( f " /json/streams/ { web_public_stream_id } /members " )
self . assert_json_success ( result )
result_dict = result . json ( )
2021-02-12 08:20:45 +01:00
self . assertIn ( " subscribers " , result_dict )
self . assertIsInstance ( result_dict [ " subscribers " ] , list )
2021-07-13 19:42:37 +02:00
self . assertGreater ( len ( result_dict [ " subscribers " ] ) , 0 )
2020-07-24 00:45:26 +02:00
2017-11-05 10:51:25 +01:00
def test_nonsubscriber_private_stream ( self ) - > None :
2014-01-29 22:03:40 +01:00
"""
2018-02-14 17:59:01 +01:00
A non - subscriber non realm admin user to a private stream can ' t query that stream ' s membership .
But unsubscribed realm admin users can query private stream ' s membership.
2014-01-29 22:03:40 +01:00
"""
# Create a private stream for which Hamlet is the only subscriber.
stream_name = " NewStream "
2021-02-12 08:19:30 +01:00
self . common_subscribe_to_streams ( self . user_profile , [ stream_name ] , invite_only = True )
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " othello " )
2014-01-29 22:03:40 +01:00
2018-02-14 17:59:01 +01:00
# Try to fetch the subscriber list as a non-member & non-realm-admin-user.
2017-01-13 15:50:17 +01:00
stream_id = get_stream ( stream_name , user_profile . realm ) . id
2020-03-10 11:48:26 +01:00
result = self . make_subscriber_request ( stream_id , user = user_profile )
2017-01-30 02:01:53 +01:00
self . assert_json_error ( result , " Invalid stream id " )
2017-01-30 00:48:45 +01:00
2018-02-14 17:59:01 +01:00
# Try to fetch the subscriber list as a non-member & realm-admin-user.
2021-02-12 08:20:45 +01:00
self . login ( " iago " )
2018-02-14 17:59:01 +01:00
self . make_successful_subscriber_request ( stream_name )
2021-02-12 08:19:30 +01:00
2017-01-30 00:48:45 +01:00
class AccessStreamTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_access_stream ( self ) - > None :
2017-01-30 00:48:45 +01:00
"""
A comprehensive security test for the access_stream_by_ * API functions .
"""
# Create a private stream for which Hamlet is the only subscriber.
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2017-01-30 00:48:45 +01:00
stream_name = " new_private_stream "
2020-03-06 18:40:46 +01:00
self . login_user ( hamlet )
2021-02-12 08:19:30 +01:00
self . common_subscribe_to_streams ( hamlet , [ stream_name ] , invite_only = True )
2017-01-30 00:48:45 +01:00
stream = get_stream ( stream_name , hamlet . realm )
2021-02-12 08:20:45 +01:00
othello = self . example_user ( " othello " )
2017-01-30 00:48:45 +01:00
# Nobody can access a stream that doesn't exist
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( hamlet , 501232 )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' invalid stream ' " ) :
access_stream_by_name ( hamlet , " invalid stream " )
# Hamlet can access the private stream
2020-10-16 17:25:48 +02:00
( stream_ret , sub_ret ) = access_stream_by_id ( hamlet , stream . id )
2017-09-17 19:53:38 +02:00
self . assertEqual ( stream . id , stream_ret . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2017-01-30 00:48:45 +01:00
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
2020-10-16 18:00:07 +02:00
( stream_ret2 , sub_ret2 ) = access_stream_by_name ( hamlet , stream . name )
2017-09-17 19:53:38 +02:00
self . assertEqual ( stream_ret . id , stream_ret2 . id )
2017-01-30 00:48:45 +01:00
self . assertEqual ( sub_ret , sub_ret2 )
# Othello cannot access the private stream
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( othello , stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' new_private_stream ' " ) :
access_stream_by_name ( othello , stream . name )
# Both Othello and Hamlet can access a public stream that only
# Hamlet is subscribed to in this realm
public_stream_name = " public_stream "
2021-02-12 08:19:30 +01:00
self . common_subscribe_to_streams ( hamlet , [ public_stream_name ] , invite_only = False )
2017-01-30 00:48:45 +01:00
public_stream = get_stream ( public_stream_name , hamlet . realm )
access_stream_by_id ( othello , public_stream . id )
access_stream_by_name ( othello , public_stream . name )
access_stream_by_id ( hamlet , public_stream . id )
access_stream_by_name ( hamlet , public_stream . name )
# Nobody can access a public stream in another realm
2017-03-04 09:19:37 +01:00
mit_realm = get_realm ( " zephyr " )
2021-04-02 18:11:45 +02:00
mit_stream = ensure_stream ( mit_realm , " mit_stream " , invite_only = False , acting_user = None )
2017-05-23 01:27:31 +02:00
sipbtest = self . mit_user ( " sipbtest " )
2017-01-30 00:48:45 +01:00
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( hamlet , mit_stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' mit_stream ' " ) :
access_stream_by_name ( hamlet , mit_stream . name )
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( sipbtest , stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' new_private_stream ' " ) :
access_stream_by_name ( sipbtest , stream . name )
# MIT realm users cannot access even public streams in their realm
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( sipbtest , mit_stream . id )
with self . assertRaisesRegex ( JsonableError , " Invalid stream name ' mit_stream ' " ) :
access_stream_by_name ( sipbtest , mit_stream . name )
# But they can access streams they are subscribed to
2020-03-09 21:41:26 +01:00
self . common_subscribe_to_streams ( sipbtest , [ mit_stream . name ] , subdomain = " zephyr " )
2017-01-30 00:48:45 +01:00
access_stream_by_id ( sipbtest , mit_stream . id )
access_stream_by_name ( sipbtest , mit_stream . name )
2018-05-02 17:00:06 +02:00
def test_stream_access_by_guest ( self ) - > None :
2021-02-12 08:20:45 +01:00
guest_user_profile = self . example_user ( " polonius " )
2020-03-06 18:40:46 +01:00
self . login_user ( guest_user_profile )
2018-05-02 17:00:06 +02:00
stream_name = " public_stream_1 "
stream = self . make_stream ( stream_name , guest_user_profile . realm , invite_only = False )
# Guest user don't have access to unsubscribed public streams
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( guest_user_profile , stream . id )
# Guest user have access to subscribed public streams
self . subscribe ( guest_user_profile , stream_name )
2020-10-16 17:25:48 +02:00
( stream_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2018-05-02 17:00:06 +02:00
self . assertEqual ( stream . id , stream_ret . id )
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
stream_name = " private_stream_1 "
stream = self . make_stream ( stream_name , guest_user_profile . realm , invite_only = True )
# Obviously, a guest user doesn't have access to unsubscribed private streams either
with self . assertRaisesRegex ( JsonableError , " Invalid stream id " ) :
access_stream_by_id ( guest_user_profile , stream . id )
# Guest user have access to subscribed private streams
self . subscribe ( guest_user_profile , stream_name )
2020-10-16 17:25:48 +02:00
( stream_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
2018-05-16 21:09:52 +02:00
assert sub_ret is not None
2018-05-02 17:00:06 +02:00
self . assertEqual ( stream . id , stream_ret . id )
self . assertEqual ( sub_ret . recipient . type_id , stream . id )
2018-06-20 23:03:03 +02:00
2020-07-24 04:56:12 +02:00
stream_name = " web_public_stream "
stream = self . make_stream ( stream_name , guest_user_profile . realm , is_web_public = True )
2022-01-29 00:54:13 +01:00
# Guest users have access to web-public streams even if they aren't subscribed.
2020-10-16 17:25:48 +02:00
( stream_ret , sub_ret ) = access_stream_by_id ( guest_user_profile , stream . id )
2020-07-24 04:56:12 +02:00
self . assertTrue ( can_access_stream_history ( guest_user_profile , stream ) )
assert sub_ret is None
self . assertEqual ( stream . id , stream_ret . id )
2021-02-12 08:19:30 +01:00
2018-06-20 23:03:03 +02:00
class StreamTrafficTest ( ZulipTestCase ) :
def test_average_weekly_stream_traffic_calculation ( self ) - > None :
# No traffic data for the stream
self . assertEqual (
2021-02-12 08:19:30 +01:00
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 300 ) , { 1 : 4003 } ) ,
0 ,
)
2018-06-20 23:03:03 +02:00
# using high numbers here to make it more likely to catch small errors in the denominators
# of the calculations. That being said we don't want to go over 100, since then the 2
# significant digits calculation gets applied
# old stream
self . assertEqual (
2021-02-12 08:19:30 +01:00
get_average_weekly_stream_traffic (
42 , timezone_now ( ) - timedelta ( days = 300 ) , { 42 : 98 * 4 + 3 }
) ,
98 ,
)
2018-06-20 23:03:03 +02:00
# stream between 7 and 27 days old
self . assertEqual (
2021-02-12 08:19:30 +01:00
get_average_weekly_stream_traffic (
42 , timezone_now ( ) - timedelta ( days = 10 ) , { 42 : ( 98 * 10 + 9 ) / / 7 }
) ,
98 ,
)
2018-06-20 23:03:03 +02:00
# stream less than 7 days old
self . assertEqual (
2021-02-12 08:19:30 +01:00
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 5 ) , { 42 : 100 } ) ,
None ,
)
2018-06-20 23:03:03 +02:00
# average traffic between 0 and 1
self . assertEqual (
2021-02-12 08:19:30 +01:00
get_average_weekly_stream_traffic ( 42 , timezone_now ( ) - timedelta ( days = 300 ) , { 42 : 1 } ) , 1
)
2018-06-20 23:03:03 +02:00
def test_round_to_2_significant_digits ( self ) - > None :
self . assertEqual ( 120 , round_to_2_significant_digits ( 116 ) )
2020-07-08 01:51:44 +02:00
2021-02-12 08:19:30 +01:00
2020-07-08 01:51:44 +02:00
class NoRecipientIDsTest ( ZulipTestCase ) :
def test_no_recipient_ids ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " cordelia " )
2020-07-08 01:51:44 +02:00
2021-02-12 08:19:30 +01:00
Subscription . objects . filter (
user_profile = user_profile , recipient__type = Recipient . STREAM
) . delete ( )
2021-01-14 21:44:56 +01:00
subs = gather_subscriptions_helper ( user_profile ) . subscriptions
2020-07-08 01:51:44 +02:00
# Checks that gather_subscriptions_helper will not return anything
# since there will not be any recipients, without crashing.
#
# This covers a rare corner case.
2021-05-17 05:41:32 +02:00
self . assert_length ( subs , 0 )