2017-11-16 19:54:24 +01:00
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
2017-02-12 01:59:28 +01:00
# high-level documentation on how this system works.
2019-04-09 04:07:03 +02:00
import copy
2017-05-22 23:36:12 +02:00
import os
import shutil
2017-10-09 16:20:14 +02:00
import sys
2020-06-11 00:54:34 +02:00
import time
from io import StringIO
from typing import Any , Callable , Dict , List , Optional , Set , Tuple
from unittest import mock
2016-06-03 08:00:04 +02:00
2020-06-11 00:54:34 +02:00
import ujson
2016-11-17 16:52:28 +01:00
from django . conf import settings
2016-06-04 19:54:34 +02:00
from django . http import HttpRequest , HttpResponse
2017-04-15 04:03:56 +02:00
from django . utils . timezone import now as timezone_now
2014-01-31 23:23:39 +01:00
from zerver . lib . actions import (
2020-06-11 00:54:34 +02:00
bulk_add_members_to_user_group ,
2017-03-24 05:49:23 +01:00
bulk_add_subscriptions ,
2016-10-20 16:53:22 +02:00
bulk_remove_subscriptions ,
2016-02-12 21:08:56 +01:00
check_add_realm_emoji ,
2020-06-11 00:54:34 +02:00
check_add_user_group ,
check_delete_user_group ,
2017-10-28 16:40:28 +02:00
check_send_message ,
2017-03-18 03:50:41 +01:00
check_send_typing_notification ,
2017-06-09 20:10:43 +02:00
do_add_alert_words ,
do_add_default_stream ,
2017-10-08 09:34:59 +02:00
do_add_reaction ,
2017-10-16 22:02:20 +02:00
do_add_reaction_legacy ,
2017-06-09 20:10:43 +02:00
do_add_realm_domain ,
do_add_realm_filter ,
2017-11-01 18:20:34 +01:00
do_add_streams_to_default_stream_group ,
2018-02-12 10:53:36 +01:00
do_add_submessage ,
2017-01-28 19:05:20 +01:00
do_change_avatar_fields ,
2017-06-09 20:10:43 +02:00
do_change_bot_owner ,
2014-03-06 16:34:54 +01:00
do_change_default_all_public_streams ,
do_change_default_events_register_stream ,
do_change_default_sending_stream ,
2017-11-14 20:51:34 +01:00
do_change_default_stream_group_description ,
2017-11-14 21:06:02 +01:00
do_change_default_stream_group_name ,
2014-01-31 23:23:39 +01:00
do_change_full_name ,
2017-06-09 20:10:43 +02:00
do_change_icon_source ,
2019-03-01 15:52:44 +01:00
do_change_logo_source ,
2017-06-09 20:10:43 +02:00
do_change_notification_settings ,
2019-06-11 12:43:08 +02:00
do_change_plan_type ,
2017-06-09 20:10:43 +02:00
do_change_realm_domain ,
2014-01-31 23:23:39 +01:00
do_change_stream_description ,
2019-05-02 19:43:27 +02:00
do_change_stream_invite_only ,
2020-06-14 18:57:02 +02:00
do_change_stream_message_retention_days ,
2020-02-04 21:50:55 +01:00
do_change_stream_post_policy ,
2016-07-01 07:26:09 +02:00
do_change_subscription_property ,
2018-08-02 08:47:13 +02:00
do_change_user_delivery_email ,
2020-06-11 00:54:34 +02:00
do_change_user_role ,
2017-11-01 18:20:34 +01:00
do_create_default_stream_group ,
2019-02-15 19:09:25 +01:00
do_create_multiuse_invite_link ,
2020-06-11 00:54:34 +02:00
do_create_user ,
2016-07-12 23:57:16 +02:00
do_deactivate_stream ,
2014-03-06 16:34:54 +01:00
do_deactivate_user ,
2019-01-18 10:37:59 +01:00
do_delete_messages ,
2017-12-14 22:22:17 +01:00
do_invite_users ,
2017-01-24 01:48:35 +01:00
do_mark_hotspot_as_read ,
2017-08-30 02:19:34 +02:00
do_mute_topic ,
2017-02-15 21:06:07 +01:00
do_reactivate_user ,
2014-03-06 16:34:54 +01:00
do_regenerate_api_key ,
2014-01-31 23:23:39 +01:00
do_remove_alert_words ,
2017-06-09 20:10:43 +02:00
do_remove_default_stream ,
2017-11-01 18:20:34 +01:00
do_remove_default_stream_group ,
2017-10-08 09:34:59 +02:00
do_remove_reaction ,
2017-10-16 22:05:00 +02:00
do_remove_reaction_legacy ,
2017-06-09 20:10:43 +02:00
do_remove_realm_domain ,
2014-01-31 23:23:39 +01:00
do_remove_realm_emoji ,
do_remove_realm_filter ,
2017-11-01 18:20:34 +01:00
do_remove_streams_from_default_stream_group ,
2014-01-31 23:23:39 +01:00
do_rename_stream ,
2019-02-15 19:09:25 +01:00
do_revoke_multi_use_invite ,
2017-12-14 22:22:17 +01:00
do_revoke_user_invite ,
2016-11-02 21:51:56 +01:00
do_set_realm_authentication_methods ,
2017-03-21 18:08:40 +01:00
do_set_realm_message_editing ,
2017-06-09 20:50:38 +02:00
do_set_realm_notifications_stream ,
2020-06-11 00:54:34 +02:00
do_set_realm_property ,
2017-10-20 16:55:04 +02:00
do_set_realm_signup_notifications_stream ,
2020-06-11 00:54:34 +02:00
do_set_user_display_setting ,
2019-11-16 09:26:28 +01:00
do_set_zoom_token ,
2017-08-30 02:19:34 +02:00
do_unmute_topic ,
2017-03-24 05:54:20 +01:00
do_update_embedded_data ,
2014-03-11 15:14:32 +01:00
do_update_message ,
2017-03-24 03:19:23 +01:00
do_update_message_flags ,
2018-01-16 20:34:12 +01:00
do_update_outgoing_webhook_service ,
2014-02-02 15:14:57 +01:00
do_update_pointer ,
2020-06-11 00:54:34 +02:00
do_update_user_custom_profile_data_if_changed ,
do_update_user_group_description ,
do_update_user_group_name ,
2017-03-24 05:26:32 +01:00
do_update_user_presence ,
2019-01-21 18:19:59 +01:00
do_update_user_status ,
2017-05-22 23:02:24 +02:00
log_event ,
2017-11-14 20:33:09 +01:00
lookup_default_stream_groups ,
2017-06-09 20:10:43 +02:00
notify_realm_custom_profile_fields ,
2017-11-14 08:01:50 +01:00
remove_members_from_user_group ,
2020-06-11 00:54:34 +02:00
try_update_realm_custom_profile_field ,
2016-11-30 10:42:58 +01:00
)
2019-11-28 11:26:57 +01:00
from zerver . lib . bugdown import MentionData
2017-02-10 23:04:46 +01:00
from zerver . lib . events import (
apply_events ,
2014-01-31 23:23:39 +01:00
fetch_initial_state_data ,
2019-07-24 08:38:25 +02:00
get_raw_user_data ,
2019-04-09 04:07:03 +02:00
post_process_state ,
2014-01-31 23:23:39 +01:00
)
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
from zerver . lib . message import (
2020-06-11 00:54:34 +02:00
MessageDict ,
UnreadMessagesResult ,
2017-11-10 15:14:13 +01:00
aggregate_unread_data ,
2020-03-17 23:17:12 +01:00
apply_unread_message_event ,
2017-11-10 15:14:13 +01:00
get_raw_unread_data ,
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
render_markdown ,
)
2020-06-11 00:54:34 +02:00
from zerver . lib . test_classes import ZulipTestCase
from zerver . lib . test_helpers import (
POSTRequestMock ,
create_dummy_file ,
get_subscription ,
get_test_image_file ,
queries_captured ,
reset_emails_in_zulip_realm ,
stdout_suppressed ,
stub_event_queue_user_events ,
2016-11-10 19:30:09 +01:00
)
2017-10-07 00:29:18 +02:00
from zerver . lib . test_runner import slow
2020-06-11 00:54:34 +02:00
from zerver . lib . topic import ORIG_TOPIC , TOPIC_LINKS , TOPIC_NAME
from zerver . lib . topic_mutes import add_topic_mute
from zerver . lib . users import get_api_key
2014-02-04 20:52:02 +01:00
from zerver . lib . validator import (
2020-06-11 00:54:34 +02:00
Validator ,
check_bool ,
check_dict ,
check_dict_only ,
check_float ,
check_int ,
check_int_in ,
check_list ,
check_none_or ,
check_string ,
2020-06-24 16:38:35 +02:00
check_tuple ,
2020-06-11 00:54:34 +02:00
check_url ,
equals ,
)
from zerver . models import (
Attachment ,
Message ,
MultiuseInvite ,
PreregistrationUser ,
Realm ,
RealmAuditLog ,
RealmDomain ,
Recipient ,
Service ,
Stream ,
Subscription ,
UserGroup ,
UserMessage ,
UserPresence ,
UserProfile ,
flush_per_request_caches ,
get_client ,
get_realm ,
get_stream ,
get_system_bot ,
get_user_by_delivery_email ,
2014-02-04 20:52:02 +01:00
)
2017-10-12 01:37:44 +02:00
from zerver . tornado . event_queue import (
allocate_client_descriptor ,
clear_client_event_queues_for_testing ,
2017-10-26 22:10:52 +02:00
get_client_info_for_message_event ,
process_message_event ,
2017-10-12 01:37:44 +02:00
)
2018-07-13 13:10:12 +02:00
from zerver . tornado . views import get_events
2020-06-11 00:54:34 +02:00
from zerver . views . events_register import _default_all_public_streams , _default_narrow
2014-01-31 23:23:39 +01:00
2017-05-22 23:02:24 +02:00
class LogEventsTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_with_missing_event_log_dir_setting ( self ) - > None :
2017-05-22 23:02:24 +02:00
with self . settings ( EVENT_LOG_DIR = None ) :
2017-08-06 23:20:51 +02:00
log_event ( dict ( ) )
2017-05-22 23:02:24 +02:00
2017-11-05 10:51:25 +01:00
def test_log_event_mkdir ( self ) - > None :
2019-07-06 02:07:56 +02:00
dir_name = os . path . join ( settings . TEST_WORKER_DIR , " test-log-dir " )
2017-05-22 23:36:12 +02:00
try :
shutil . rmtree ( dir_name )
2017-05-30 00:26:33 +02:00
except OSError : # nocoverage
2017-05-22 23:36:12 +02:00
# assume it doesn't exist already
pass
self . assertFalse ( os . path . exists ( dir_name ) )
with self . settings ( EVENT_LOG_DIR = dir_name ) :
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
event : Dict [ str , int ] = { }
2017-05-22 23:36:12 +02:00
log_event ( event )
self . assertTrue ( os . path . exists ( dir_name ) )
2017-05-22 23:02:24 +02:00
2017-03-06 17:46:32 +01:00
class EventsEndpointTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_events_register_endpoint ( self ) - > None :
2017-03-06 17:46:32 +01:00
2017-03-24 06:38:06 +01:00
# This test is intended to get minimal coverage on the
# events_register code paths
2020-03-10 11:48:26 +01:00
user = self . example_user ( " hamlet " )
2017-03-06 17:46:32 +01:00
with mock . patch ( ' zerver.views.events_register.do_events_register ' , return_value = { } ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' )
2017-03-06 17:46:32 +01:00
self . assert_json_success ( result )
2017-03-24 06:38:06 +01:00
with mock . patch ( ' zerver.lib.events.request_event_queue ' , return_value = None ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' )
2017-03-24 06:38:06 +01:00
self . assert_json_error ( result , " Could not allocate event queue " )
2017-07-27 06:31:26 +02:00
return_event_queue = ' 15:11 '
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
return_user_events : List [ Dict [ str , Any ] ] = [ ]
2017-07-16 09:41:38 +02:00
# Test that call is made to deal with a returning soft deactivated user.
2019-03-12 02:48:01 +01:00
with mock . patch ( ' zerver.lib.events.reactivate_user_if_soft_deactivated ' ) as fa :
2017-07-16 09:41:38 +02:00
with stub_event_queue_user_events ( return_event_queue , return_user_events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' , dict ( event_types = ujson . dumps ( [ ' pointer ' ] ) ) )
2017-07-16 09:41:38 +02:00
self . assertEqual ( fa . call_count , 1 )
2017-07-27 06:31:26 +02:00
with stub_event_queue_user_events ( return_event_queue , return_user_events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' , dict ( event_types = ujson . dumps ( [ ' pointer ' ] ) ) )
2017-03-24 06:38:06 +01:00
self . assert_json_success ( result )
2017-08-17 08:39:59 +02:00
result_dict = result . json ( )
2017-03-24 06:38:06 +01:00
self . assertEqual ( result_dict [ ' last_event_id ' ] , - 1 )
self . assertEqual ( result_dict [ ' queue_id ' ] , ' 15:11 ' )
2017-07-27 06:31:26 +02:00
return_event_queue = ' 15:12 '
return_user_events = [
{
' id ' : 6 ,
' type ' : ' pointer ' ,
' pointer ' : 15 ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
2017-07-27 06:31:26 +02:00
]
with stub_event_queue_user_events ( return_event_queue , return_user_events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' , dict ( event_types = ujson . dumps ( [ ' pointer ' ] ) ) )
2017-07-27 06:31:26 +02:00
2017-03-24 06:38:06 +01:00
self . assert_json_success ( result )
2017-08-17 08:39:59 +02:00
result_dict = result . json ( )
2017-03-24 06:38:06 +01:00
self . assertEqual ( result_dict [ ' last_event_id ' ] , 6 )
self . assertEqual ( result_dict [ ' pointer ' ] , 15 )
self . assertEqual ( result_dict [ ' queue_id ' ] , ' 15:12 ' )
2017-04-26 23:29:25 +02:00
# Now test with `fetch_event_types` not matching the event
2017-07-27 06:31:26 +02:00
return_event_queue = ' 15:13 '
with stub_event_queue_user_events ( return_event_queue , return_user_events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' ,
2017-12-14 19:02:31 +01:00
dict ( event_types = ujson . dumps ( [ ' pointer ' ] ) ,
fetch_event_types = ujson . dumps ( [ ' message ' ] ) ) )
2017-04-26 23:29:25 +02:00
self . assert_json_success ( result )
2017-08-17 08:39:59 +02:00
result_dict = result . json ( )
2017-04-26 23:29:25 +02:00
self . assertEqual ( result_dict [ ' last_event_id ' ] , 6 )
# Check that the message event types data is in there
self . assertIn ( ' max_message_id ' , result_dict )
# Check that the pointer event types data is not in there
self . assertNotIn ( ' pointer ' , result_dict )
self . assertEqual ( result_dict [ ' queue_id ' ] , ' 15:13 ' )
# Now test with `fetch_event_types` matching the event
2017-07-27 06:31:26 +02:00
with stub_event_queue_user_events ( return_event_queue , return_user_events ) :
2020-03-10 11:48:26 +01:00
result = self . api_post ( user , ' /json/register ' ,
2017-12-14 19:02:31 +01:00
dict ( fetch_event_types = ujson . dumps ( [ ' pointer ' ] ) ,
event_types = ujson . dumps ( [ ' message ' ] ) ) )
2017-04-26 23:29:25 +02:00
self . assert_json_success ( result )
2017-08-17 08:39:59 +02:00
result_dict = result . json ( )
2017-04-26 23:29:25 +02:00
self . assertEqual ( result_dict [ ' last_event_id ' ] , 6 )
# Check that we didn't fetch the messages data
self . assertNotIn ( ' max_message_id ' , result_dict )
# Check that the pointer data is in there, and is correctly
# updated (presering our atomicity guaranteed), though of
# course any future pointer events won't be distributed
self . assertIn ( ' pointer ' , result_dict )
self . assertEqual ( result_dict [ ' pointer ' ] , 15 )
self . assertEqual ( result_dict [ ' queue_id ' ] , ' 15:13 ' )
2017-11-05 10:51:25 +01:00
def test_tornado_endpoint ( self ) - > None :
2016-11-17 16:52:28 +01:00
# This test is mostly intended to get minimal coverage on
# the /notify_tornado endpoint, so we can have 100% URL coverage,
# but it does exercise a little bit of the codepath.
post_data = dict (
data = ujson . dumps (
dict (
event = dict (
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
type = ' other ' ,
2016-11-17 16:52:28 +01:00
) ,
2017-05-07 17:21:26 +02:00
users = [ self . example_user ( ' hamlet ' ) . id ] ,
2016-11-17 16:52:28 +01:00
) ,
) ,
)
req = POSTRequestMock ( post_data , user_profile = None )
req . META [ ' REMOTE_ADDR ' ] = ' 127.0.0.1 '
result = self . client_post_request ( ' /notify_tornado ' , req )
self . assert_json_error ( result , ' Access denied ' , status_code = 403 )
post_data [ ' secret ' ] = settings . SHARED_SECRET
req = POSTRequestMock ( post_data , user_profile = None )
req . META [ ' REMOTE_ADDR ' ] = ' 127.0.0.1 '
result = self . client_post_request ( ' /notify_tornado ' , req )
self . assert_json_success ( result )
2014-01-31 23:23:39 +01:00
2016-08-23 02:08:42 +02:00
class GetEventsTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def tornado_call ( self , view_func : Callable [ [ HttpRequest , UserProfile ] , HttpResponse ] ,
user_profile : UserProfile ,
post_data : Dict [ str , Any ] ) - > HttpResponse :
2016-07-14 01:28:40 +02:00
request = POSTRequestMock ( post_data , user_profile )
2014-01-31 23:23:39 +01:00
return view_func ( request , user_profile )
2017-11-05 10:51:25 +01:00
def test_get_events ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
email = user_profile . email
2017-05-07 21:25:59 +02:00
recipient_user_profile = self . example_user ( ' othello ' )
recipient_email = recipient_user_profile . email
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-31 23:23:39 +01:00
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2014-01-31 23:23:39 +01:00
{ " apply_markdown " : ujson . dumps ( True ) ,
2017-10-31 18:36:18 +01:00
" client_gravatar " : ujson . dumps ( True ) ,
2014-01-31 23:23:39 +01:00
" event_types " : ujson . dumps ( [ " message " ] ) ,
" user_client " : " website " ,
" dont_block " : ujson . dumps ( True ) ,
} )
self . assert_json_success ( result )
queue_id = ujson . loads ( result . content ) [ " queue_id " ]
2018-07-13 13:10:12 +02:00
recipient_result = self . tornado_call ( get_events , recipient_user_profile ,
2014-01-31 23:23:39 +01:00
{ " apply_markdown " : ujson . dumps ( True ) ,
2017-10-31 18:36:18 +01:00
" client_gravatar " : ujson . dumps ( True ) ,
2014-01-31 23:23:39 +01:00
" event_types " : ujson . dumps ( [ " message " ] ) ,
" user_client " : " website " ,
" dont_block " : ujson . dumps ( True ) ,
} )
self . assert_json_success ( recipient_result )
recipient_queue_id = ujson . loads ( recipient_result . content ) [ " queue_id " ]
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2014-01-31 23:23:39 +01:00
{ " queue_id " : queue_id ,
" user_client " : " website " ,
" last_event_id " : - 1 ,
" dont_block " : ujson . dumps ( True ) ,
} )
events = ujson . loads ( result . content ) [ " events " ]
self . assert_json_success ( result )
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 0 )
2014-01-31 23:23:39 +01:00
2017-10-28 16:40:28 +02:00
local_id = ' 10.01 '
check_send_message (
sender = user_profile ,
client = get_client ( ' whatever ' ) ,
message_type_name = ' private ' ,
message_to = [ recipient_email ] ,
topic_name = None ,
message_content = ' hello ' ,
local_id = local_id ,
sender_queue_id = queue_id ,
)
2014-01-31 23:23:39 +01:00
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2014-01-31 23:23:39 +01:00
{ " queue_id " : queue_id ,
" user_client " : " website " ,
" last_event_id " : - 1 ,
" dont_block " : ujson . dumps ( True ) ,
} )
events = ujson . loads ( result . content ) [ " events " ]
self . assert_json_success ( result )
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 1 )
2014-01-31 23:23:39 +01:00
self . assertEqual ( events [ 0 ] [ " type " ] , " message " )
self . assertEqual ( events [ 0 ] [ " message " ] [ " sender_email " ] , email )
2017-07-14 19:30:23 +02:00
self . assertEqual ( events [ 0 ] [ " local_message_id " ] , local_id )
2014-07-18 00:18:06 +02:00
self . assertEqual ( events [ 0 ] [ " message " ] [ " display_recipient " ] [ 0 ] [ " is_mirror_dummy " ] , False )
self . assertEqual ( events [ 0 ] [ " message " ] [ " display_recipient " ] [ 1 ] [ " is_mirror_dummy " ] , False )
2014-01-31 23:23:39 +01:00
last_event_id = events [ 0 ] [ " id " ]
2017-10-28 16:40:28 +02:00
local_id = ' 10.02 '
check_send_message (
sender = user_profile ,
client = get_client ( ' whatever ' ) ,
message_type_name = ' private ' ,
message_to = [ recipient_email ] ,
topic_name = None ,
message_content = ' hello ' ,
local_id = local_id ,
sender_queue_id = queue_id ,
)
2014-01-31 23:23:39 +01:00
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2014-01-31 23:23:39 +01:00
{ " queue_id " : queue_id ,
" user_client " : " website " ,
" last_event_id " : last_event_id ,
" dont_block " : ujson . dumps ( True ) ,
} )
events = ujson . loads ( result . content ) [ " events " ]
self . assert_json_success ( result )
2016-09-25 21:30:10 +02:00
self . assert_length ( events , 1 )
2014-01-31 23:23:39 +01:00
self . assertEqual ( events [ 0 ] [ " type " ] , " message " )
self . assertEqual ( events [ 0 ] [ " message " ] [ " sender_email " ] , email )
2017-07-14 19:30:23 +02:00
self . assertEqual ( events [ 0 ] [ " local_message_id " ] , local_id )
2014-01-31 23:23:39 +01:00
# Test that the received message in the receiver's event queue
# exists and does not contain a local id
2018-07-13 13:10:12 +02:00
recipient_result = self . tornado_call ( get_events , recipient_user_profile ,
2014-01-31 23:23:39 +01:00
{ " queue_id " : recipient_queue_id ,
" user_client " : " website " ,
" last_event_id " : - 1 ,
" dont_block " : ujson . dumps ( True ) ,
} )
recipient_events = ujson . loads ( recipient_result . content ) [ " events " ]
self . assert_json_success ( recipient_result )
self . assertEqual ( len ( recipient_events ) , 2 )
self . assertEqual ( recipient_events [ 0 ] [ " type " ] , " message " )
self . assertEqual ( recipient_events [ 0 ] [ " message " ] [ " sender_email " ] , email )
2017-07-14 19:30:23 +02:00
self . assertTrue ( " local_message_id " not in recipient_events [ 0 ] )
2014-01-31 23:23:39 +01:00
self . assertEqual ( recipient_events [ 1 ] [ " type " ] , " message " )
self . assertEqual ( recipient_events [ 1 ] [ " message " ] [ " sender_email " ] , email )
2017-07-14 19:30:23 +02:00
self . assertTrue ( " local_message_id " not in recipient_events [ 1 ] )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_get_events_narrow ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-03-06 18:40:46 +01:00
self . login_user ( user_profile )
2014-01-31 23:23:39 +01:00
2017-11-17 10:47:43 +01:00
def get_message ( apply_markdown : bool , client_gravatar : bool ) - > Dict [ str , Any ] :
2017-11-02 17:08:32 +01:00
result = self . tornado_call (
2018-07-13 13:10:12 +02:00
get_events ,
2017-11-02 17:08:32 +01:00
user_profile ,
dict (
apply_markdown = ujson . dumps ( apply_markdown ) ,
2017-10-31 18:36:18 +01:00
client_gravatar = ujson . dumps ( client_gravatar ) ,
2017-11-02 17:08:32 +01:00
event_types = ujson . dumps ( [ " message " ] ) ,
narrow = ujson . dumps ( [ [ " stream " , " denmark " ] ] ) ,
user_client = " website " ,
dont_block = ujson . dumps ( True ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
) ,
2017-11-02 17:08:32 +01:00
)
self . assert_json_success ( result )
queue_id = ujson . loads ( result . content ) [ " queue_id " ]
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2017-11-02 17:08:32 +01:00
{ " queue_id " : queue_id ,
" user_client " : " website " ,
" last_event_id " : - 1 ,
" dont_block " : ujson . dumps ( True ) ,
} )
events = ujson . loads ( result . content ) [ " events " ]
self . assert_json_success ( result )
self . assert_length ( events , 0 )
2020-03-07 11:43:05 +01:00
self . send_personal_message ( user_profile , self . example_user ( " othello " ) , " hello " )
self . send_stream_message ( user_profile , " Denmark " , " **hello** " )
2017-11-02 17:08:32 +01:00
2018-07-13 13:10:12 +02:00
result = self . tornado_call ( get_events , user_profile ,
2017-11-02 17:08:32 +01:00
{ " queue_id " : queue_id ,
" user_client " : " website " ,
" last_event_id " : - 1 ,
" dont_block " : ujson . dumps ( True ) ,
} )
events = ujson . loads ( result . content ) [ " events " ]
self . assert_json_success ( result )
self . assert_length ( events , 1 )
self . assertEqual ( events [ 0 ] [ " type " ] , " message " )
return events [ 0 ] [ ' message ' ]
2017-10-31 18:36:18 +01:00
message = get_message ( apply_markdown = False , client_gravatar = False )
2017-11-02 17:08:32 +01:00
self . assertEqual ( message [ " display_recipient " ] , " Denmark " )
self . assertEqual ( message [ " content " ] , " **hello** " )
2017-10-31 18:36:18 +01:00
self . assertIn ( ' gravatar.com ' , message [ " avatar_url " ] )
2017-11-02 17:08:32 +01:00
2017-10-31 18:36:18 +01:00
message = get_message ( apply_markdown = True , client_gravatar = False )
2017-11-02 17:08:32 +01:00
self . assertEqual ( message [ " display_recipient " ] , " Denmark " )
self . assertEqual ( message [ " content " ] , " <p><strong>hello</strong></p> " )
2017-10-31 18:36:18 +01:00
self . assertIn ( ' gravatar.com ' , message [ " avatar_url " ] )
message = get_message ( apply_markdown = False , client_gravatar = True )
self . assertEqual ( message [ " display_recipient " ] , " Denmark " )
self . assertEqual ( message [ " content " ] , " **hello** " )
self . assertEqual ( message [ " avatar_url " ] , None )
message = get_message ( apply_markdown = True , client_gravatar = True )
self . assertEqual ( message [ " display_recipient " ] , " Denmark " )
self . assertEqual ( message [ " content " ] , " <p><strong>hello</strong></p> " )
self . assertEqual ( message [ " avatar_url " ] , None )
2014-01-31 23:23:39 +01:00
2016-08-23 02:08:42 +02:00
class EventsRegisterTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2017-05-24 08:33:30 +02:00
self . user_profile = self . example_user ( ' hamlet ' )
2014-02-26 19:55:29 +01:00
2018-01-30 17:08:35 +01:00
def create_bot ( self , email : str , * * extras : Any ) - > Optional [ UserProfile ] :
return self . create_test_bot ( email , self . user_profile , * * extras )
2014-02-26 19:55:29 +01:00
2020-06-21 02:36:20 +02:00
def realm_bot_schema ( self , field_name : str , check : Validator [ object ] ) - > Validator [ Dict [ str , object ] ] :
2017-04-20 17:31:41 +02:00
return self . check_events_dict ( [
2014-02-26 19:55:29 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-03-26 08:13:01 +02:00
( ' bot ' , check_dict_only ( [
2016-10-26 05:18:50 +02:00
( ' user_id ' , check_int ) ,
2014-02-26 19:55:29 +01:00
( field_name , check ) ,
] ) ) ,
] )
2014-01-31 23:23:39 +01:00
2019-08-10 00:30:34 +02:00
def do_test ( self , action : Callable [ [ ] , object ] , event_types : Optional [ List [ str ] ] = None ,
2017-11-17 10:47:43 +01:00
include_subscribers : bool = True , state_change_expected : bool = True ,
2020-06-13 10:10:05 +02:00
notification_settings_null : bool = False , client_gravatar : bool = True ,
user_avatar_url_field_optional : bool = False , slim_presence : bool = False ,
2020-06-11 12:12:12 +02:00
num_events : int = 1 , bulk_message_deletion : bool = True ) - > List [ Dict [ str , Any ] ] :
2017-10-12 01:37:44 +02:00
'''
Make sure we have a clean slate of client descriptors for these tests .
If we don ' t do this, then certain failures will only manifest when you
2018-08-10 22:43:58 +02:00
run multiple tests within a single test function .
2019-03-01 18:21:31 +01:00
See also https : / / zulip . readthedocs . io / en / latest / subsystems / events - system . html #testing
for details on the design of this test system .
2017-10-12 01:37:44 +02:00
'''
clear_client_event_queues_for_testing ( )
2014-01-28 18:11:08 +01:00
client = allocate_client_descriptor (
dict ( user_profile_id = self . user_profile . id ,
2017-01-03 21:04:55 +01:00
realm_id = self . user_profile . realm_id ,
2014-01-28 18:11:08 +01:00
event_types = event_types ,
client_type_name = " website " ,
apply_markdown = True ,
2017-10-31 18:36:18 +01:00
client_gravatar = client_gravatar ,
2020-02-02 17:29:05 +01:00
slim_presence = slim_presence ,
2014-01-28 18:11:08 +01:00
all_public_streams = False ,
queue_timeout = 600 ,
last_connection_time = time . time ( ) ,
2020-06-11 12:12:12 +02:00
narrow = [ ] ,
bulk_message_deletion = bulk_message_deletion )
2017-01-24 06:34:26 +01:00
)
2020-06-10 13:47:08 +02:00
2014-01-31 23:23:39 +01:00
# hybrid_state = initial fetch state + re-applying events triggered by our action
# normal_state = do action then fetch at the end (the "normal" code path)
2017-11-02 20:55:44 +01:00
hybrid_state = fetch_initial_state_data (
self . user_profile , event_types , " " ,
2019-11-05 21:17:15 +01:00
client_gravatar = client_gravatar ,
2020-06-13 10:10:05 +02:00
user_avatar_url_field_optional = user_avatar_url_field_optional ,
2020-02-02 17:29:05 +01:00
slim_presence = slim_presence ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
include_subscribers = include_subscribers ,
2017-11-02 20:55:44 +01:00
)
2014-01-31 23:23:39 +01:00
action ( )
events = client . event_queue . contents ( )
2018-12-07 02:38:10 +01:00
self . assertEqual ( len ( events ) , num_events )
2017-02-21 19:35:17 +01:00
2019-04-09 04:07:03 +02:00
initial_state = copy . deepcopy ( hybrid_state )
2019-02-13 10:22:16 +01:00
post_process_state ( self . user_profile , initial_state , notification_settings_null )
2019-04-09 04:07:03 +02:00
before = ujson . dumps ( initial_state )
2017-11-02 21:40:12 +01:00
apply_events ( hybrid_state , events , self . user_profile ,
2020-02-02 17:29:05 +01:00
client_gravatar = client_gravatar ,
slim_presence = slim_presence ,
include_subscribers = include_subscribers )
2019-02-13 10:22:16 +01:00
post_process_state ( self . user_profile , hybrid_state , notification_settings_null )
2017-02-21 19:35:17 +01:00
after = ujson . dumps ( hybrid_state )
if state_change_expected :
2019-05-09 02:38:29 +02:00
if before == after : # nocoverage
print ( ujson . dumps ( initial_state , indent = 2 ) )
print ( events )
2017-03-05 08:12:19 +01:00
raise AssertionError ( ' Test does not exercise enough code -- events do not change state. ' )
2017-02-21 19:35:17 +01:00
else :
2019-04-09 04:07:03 +02:00
try :
self . match_states ( initial_state , copy . deepcopy ( hybrid_state ) , events )
except AssertionError : # nocoverage
2017-03-05 08:12:19 +01:00
raise AssertionError ( ' Test is invalid--state actually does change here. ' )
2014-01-31 23:23:39 +01:00
2017-11-02 20:55:44 +01:00
normal_state = fetch_initial_state_data (
self . user_profile , event_types , " " ,
2019-11-05 21:17:15 +01:00
client_gravatar = client_gravatar ,
2020-06-13 10:10:05 +02:00
user_avatar_url_field_optional = user_avatar_url_field_optional ,
2020-02-02 17:29:05 +01:00
slim_presence = slim_presence ,
2019-04-09 04:07:03 +02:00
include_subscribers = include_subscribers ,
2017-11-02 20:55:44 +01:00
)
2019-02-13 10:22:16 +01:00
post_process_state ( self . user_profile , normal_state , notification_settings_null )
2017-10-06 21:24:56 +02:00
self . match_states ( hybrid_state , normal_state , events )
2014-02-04 20:52:02 +01:00
return events
2017-11-05 10:51:25 +01:00
def match_states ( self , state1 : Dict [ str , Any ] , state2 : Dict [ str , Any ] ,
events : List [ Dict [ str , Any ] ] ) - > None :
def normalize ( state : Dict [ str , Any ] ) - > None :
2017-09-13 20:00:36 +02:00
for u in state [ ' never_subscribed ' ] :
if ' subscribers ' in u :
u [ ' subscribers ' ] . sort ( )
2016-07-01 01:52:51 +02:00
for u in state [ ' subscriptions ' ] :
2017-02-20 08:30:09 +01:00
if ' subscribers ' in u :
u [ ' subscribers ' ] . sort ( )
2014-02-04 19:09:30 +01:00
state [ ' subscriptions ' ] = { u [ ' name ' ] : u for u in state [ ' subscriptions ' ] }
state [ ' unsubscribed ' ] = { u [ ' name ' ] : u for u in state [ ' unsubscribed ' ] }
2014-02-26 00:12:14 +01:00
if ' realm_bots ' in state :
state [ ' realm_bots ' ] = { u [ ' email ' ] : u for u in state [ ' realm_bots ' ] }
2014-02-04 19:09:30 +01:00
normalize ( state1 )
normalize ( state2 )
2017-10-06 21:24:56 +02:00
# If this assertions fails, we have unusual problems.
self . assertEqual ( state1 . keys ( ) , state2 . keys ( ) )
# The far more likely scenario is that some section of
2017-10-06 23:08:41 +02:00
# our enormous payload does not get updated properly. We
2017-10-06 21:24:56 +02:00
# want the diff here to be developer-friendly, hence
# the somewhat tedious code to provide useful output.
2017-10-06 22:59:26 +02:00
if state1 != state2 : # nocoverage
2017-10-06 21:24:56 +02:00
print ( ' \n ---States DO NOT MATCH--- ' )
print ( ' \n EVENTS: \n ' )
# Printing out the events is a big help to
# developers.
import json
for event in events :
print ( json . dumps ( event , indent = 4 ) )
print ( ' \n MISMATCHES: \n ' )
for k in state1 :
if state1 [ k ] != state2 [ k ] :
print ( ' \n key = ' + k )
try :
self . assertEqual ( { k : state1 [ k ] } , { k : state2 [ k ] } )
except AssertionError as e :
print ( e )
print ( '''
NOTE :
This is an advanced test that verifies how
we apply events after fetching data . If you
do not know how to debug it , you can ask for
help on chat .
''' )
2017-10-09 16:20:14 +02:00
sys . stdout . flush ( )
2017-10-06 21:24:56 +02:00
raise AssertionError ( ' Mismatching states ' )
2014-01-31 23:23:39 +01:00
2020-06-21 02:36:20 +02:00
def check_events_dict ( self , required_keys : List [ Tuple [ str , Validator [ object ] ] ] ) - > Validator [ Dict [ str , object ] ] :
2017-04-20 17:31:41 +02:00
required_keys . append ( ( ' id ' , check_int ) )
2018-07-25 05:54:29 +02:00
# Raise AssertionError if `required_keys` contains duplicate items.
keys = [ key [ 0 ] for key in required_keys ]
self . assertEqual ( len ( keys ) , len ( set ( keys ) ) , ' Duplicate items found in required_keys. ' )
2017-04-20 17:31:41 +02:00
return check_dict_only ( required_keys )
2017-11-05 10:51:25 +01:00
def test_mentioned_send_message_events ( self ) - > None :
2017-07-21 20:31:25 +02:00
user = self . example_user ( ' hamlet ' )
2017-08-10 10:58:39 +02:00
for i in range ( 3 ) :
content = ' mentioning... @** ' + user . full_name + ' ** hello ' + str ( i )
self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( self . example_user ( ' cordelia ' ) ,
2017-10-28 16:40:28 +02:00
" Verona " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
content ) ,
2017-08-10 10:58:39 +02:00
)
2017-07-21 20:31:25 +02:00
2019-08-26 05:11:18 +02:00
def test_wildcard_mentioned_send_message_events ( self ) - > None :
for i in range ( 3 ) :
content = ' mentioning... @**all** hello ' + str ( i )
self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( self . example_user ( ' cordelia ' ) ,
2019-08-26 05:11:18 +02:00
" Verona " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
content ) ,
2019-08-26 05:11:18 +02:00
)
2017-11-05 10:51:25 +01:00
def test_pm_send_message_events ( self ) - > None :
2017-05-23 03:02:01 +02:00
self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_personal_message ( self . example_user ( ' cordelia ' ) ,
self . example_user ( ' hamlet ' ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
' hola ' ) ,
2017-05-23 03:02:01 +02:00
)
2017-11-05 10:51:25 +01:00
def test_huddle_send_message_events ( self ) - > None :
2017-05-23 03:02:01 +02:00
huddle = [
2020-03-07 11:43:05 +01:00
self . example_user ( ' hamlet ' ) ,
self . example_user ( ' othello ' ) ,
2017-05-23 03:02:01 +02:00
]
self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_huddle_message ( self . example_user ( ' cordelia ' ) ,
2017-10-28 16:40:28 +02:00
huddle ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
' hola ' ) ,
2017-05-23 03:02:01 +02:00
)
2017-11-05 10:51:25 +01:00
def test_stream_send_message_events ( self ) - > None :
2020-06-21 02:36:20 +02:00
def get_checker ( check_gravatar : Validator [ Optional [ str ] ] ) - > Validator [ Dict [ str , object ] ] :
2017-10-31 18:36:18 +01:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' message ' ) ) ,
( ' flags ' , check_list ( None ) ) ,
2020-01-29 22:02:05 +01:00
( ' message ' , check_dict_only ( [
2017-10-31 18:36:18 +01:00
( ' avatar_url ' , check_gravatar ) ,
( ' client ' , check_string ) ,
( ' content ' , check_string ) ,
( ' content_type ' , equals ( ' text/html ' ) ) ,
( ' display_recipient ' , check_string ) ,
2020-01-29 22:02:05 +01:00
( ' id ' , check_int ) ,
2017-10-31 18:36:18 +01:00
( ' is_me_message ' , check_bool ) ,
( ' reactions ' , check_list ( None ) ) ,
( ' recipient_id ' , check_int ) ,
( ' sender_realm_str ' , check_string ) ,
( ' sender_email ' , check_string ) ,
( ' sender_full_name ' , check_string ) ,
( ' sender_id ' , check_int ) ,
( ' sender_short_name ' , check_string ) ,
( ' stream_id ' , check_int ) ,
2018-11-10 16:21:14 +01:00
( TOPIC_NAME , check_string ) ,
( TOPIC_LINKS , check_list ( None ) ) ,
2018-02-11 14:09:17 +01:00
( ' submessages ' , check_list ( None ) ) ,
2017-10-31 18:36:18 +01:00
( ' timestamp ' , check_int ) ,
( ' type ' , check_string ) ,
] ) ) ,
] )
return schema_checker
events = self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " ) ,
2017-10-31 18:36:18 +01:00
client_gravatar = False ,
)
schema_checker = get_checker ( check_gravatar = check_string )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-04-20 17:31:41 +02:00
2017-02-21 19:35:17 +01:00
events = self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " ) ,
2017-10-31 18:36:18 +01:00
client_gravatar = True ,
2017-02-21 19:35:17 +01:00
)
2019-09-20 20:13:16 +02:00
schema_checker = get_checker ( check_gravatar = equals ( None ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2017-03-24 05:54:20 +01:00
# Verify message editing
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-11 15:14:32 +01:00
( ' type ' , equals ( ' update_message ' ) ) ,
( ' flags ' , check_list ( None ) ) ,
( ' content ' , check_string ) ,
( ' edit_timestamp ' , check_int ) ,
( ' message_id ' , check_int ) ,
( ' message_ids ' , check_list ( check_int ) ) ,
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
( ' prior_mention_user_ids ' , check_list ( check_int ) ) ,
( ' mention_user_ids ' , check_list ( check_int ) ) ,
2019-09-03 23:27:45 +02:00
( ' wildcard_mention_user_ids ' , check_list ( check_int ) ) ,
2017-10-07 17:59:19 +02:00
( ' presence_idle_user_ids ' , check_list ( check_int ) ) ,
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
( ' stream_push_user_ids ' , check_list ( check_int ) ) ,
2017-11-21 04:35:26 +01:00
( ' stream_email_user_ids ' , check_list ( check_int ) ) ,
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
( ' push_notify_user_ids ' , check_list ( check_int ) ) ,
2014-03-11 15:14:32 +01:00
( ' orig_content ' , check_string ) ,
( ' orig_rendered_content ' , check_string ) ,
2018-11-10 16:21:14 +01:00
( ORIG_TOPIC , check_string ) ,
2017-04-20 17:31:41 +02:00
( ' prev_rendered_content_version ' , check_int ) ,
2014-03-11 21:33:50 +01:00
( ' propagate_mode ' , check_string ) ,
2014-03-11 15:14:32 +01:00
( ' rendered_content ' , check_string ) ,
2014-03-11 14:40:22 +01:00
( ' stream_id ' , check_int ) ,
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
( ' stream_name ' , check_string ) ,
2018-11-10 16:21:14 +01:00
( TOPIC_NAME , check_string ) ,
( TOPIC_LINKS , check_list ( None ) ) ,
2017-02-20 00:23:42 +01:00
( ' user_id ' , check_int ) ,
2018-01-21 19:27:36 +01:00
( ' is_me_message ' , check_bool ) ,
2014-03-11 15:14:32 +01:00
] )
2016-06-21 21:34:41 +02:00
message = Message . objects . order_by ( ' -id ' ) [ 0 ]
2014-03-11 15:14:32 +01:00
topic = ' new_topic '
propagate_mode = ' change_all '
content = ' new content '
2016-10-04 18:32:46 +02:00
rendered_content = render_markdown ( message , content )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
prior_mention_user_ids : Set [ int ] = set ( )
mentioned_user_ids : Set [ int ] = set ( )
2019-11-28 11:26:57 +01:00
mention_data = MentionData (
realm_id = self . user_profile . realm_id ,
content = content ,
)
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
2017-02-21 19:35:17 +01:00
events = self . do_test (
2020-02-19 01:38:34 +01:00
lambda : do_update_message ( self . user_profile , message , None , topic ,
2020-06-03 16:44:57 +02:00
propagate_mode , False , False , content , rendered_content ,
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
prior_mention_user_ids ,
2019-11-28 11:26:57 +01:00
mentioned_user_ids , mention_data ) ,
2017-05-23 03:02:01 +02:00
state_change_expected = True ,
2017-02-21 19:35:17 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-03-11 15:14:32 +01:00
2017-03-24 05:54:20 +01:00
# Verify do_update_embedded_data
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-03-24 05:54:20 +01:00
( ' type ' , equals ( ' update_message ' ) ) ,
( ' flags ' , check_list ( None ) ) ,
( ' content ' , check_string ) ,
( ' message_id ' , check_int ) ,
( ' message_ids ' , check_list ( check_int ) ) ,
( ' rendered_content ' , check_string ) ,
( ' sender ' , check_string ) ,
] )
events = self . do_test (
lambda : do_update_embedded_data ( self . user_profile , message ,
2020-04-09 21:51:58 +02:00
" embed_content " , " <p>embed_content</p> " ) ,
2017-03-24 05:54:20 +01:00
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-24 05:54:20 +01:00
2017-11-05 10:51:25 +01:00
def test_update_message_flags ( self ) - > None :
2017-03-24 03:19:23 +01:00
# Test message flag update events
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
( ' all ' , check_bool ) ,
2017-03-24 03:19:23 +01:00
( ' type ' , equals ( ' update_message_flags ' ) ) ,
( ' flag ' , check_string ) ,
( ' messages ' , check_list ( check_int ) ) ,
( ' operation ' , equals ( " add " ) ) ,
] )
2017-10-28 16:40:28 +02:00
message = self . send_personal_message (
2020-03-07 11:43:05 +01:00
self . example_user ( " cordelia " ) ,
self . example_user ( " hamlet " ) ,
2017-10-28 16:40:28 +02:00
" hello " ,
)
2017-05-07 17:21:26 +02:00
user_profile = self . example_user ( ' hamlet ' )
2017-03-24 03:19:23 +01:00
events = self . do_test (
2018-03-14 00:05:55 +01:00
lambda : do_update_message_flags ( user_profile , get_client ( " website " ) , ' add ' , ' starred ' , [ message ] ) ,
2018-08-14 23:57:20 +02:00
state_change_expected = True ,
2017-03-24 03:19:23 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
( ' all ' , check_bool ) ,
2017-03-24 03:19:23 +01:00
( ' type ' , equals ( ' update_message_flags ' ) ) ,
( ' flag ' , check_string ) ,
( ' messages ' , check_list ( check_int ) ) ,
( ' operation ' , equals ( " remove " ) ) ,
] )
events = self . do_test (
2018-03-14 00:05:55 +01:00
lambda : do_update_message_flags ( user_profile , get_client ( " website " ) , ' remove ' , ' starred ' , [ message ] ) ,
2018-08-14 23:57:20 +02:00
state_change_expected = True ,
2017-03-24 03:19:23 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-24 03:19:23 +01:00
2017-11-05 10:51:25 +01:00
def test_update_read_flag_removes_unread_msg_ids ( self ) - > None :
2017-05-23 03:02:01 +02:00
user_profile = self . example_user ( ' hamlet ' )
2017-07-21 20:31:25 +02:00
mention = ' @** ' + user_profile . full_name + ' ** '
for content in [ ' hello ' , mention ] :
2017-10-28 16:40:28 +02:00
message = self . send_stream_message (
2020-03-07 11:43:05 +01:00
self . example_user ( ' cordelia ' ) ,
2017-07-21 20:31:25 +02:00
" Verona " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
content ,
2017-07-21 20:31:25 +02:00
)
self . do_test (
2018-03-14 00:05:55 +01:00
lambda : do_update_message_flags ( user_profile , get_client ( " website " ) , ' add ' , ' read ' , [ message ] ) ,
2017-07-21 20:31:25 +02:00
state_change_expected = True ,
)
2017-05-23 03:02:01 +02:00
2017-11-05 10:51:25 +01:00
def test_send_message_to_existing_recipient ( self ) - > None :
2020-03-07 11:43:05 +01:00
sender = self . example_user ( ' cordelia ' )
2017-10-28 16:40:28 +02:00
self . send_stream_message (
2020-03-07 11:43:05 +01:00
sender ,
2017-05-23 03:02:01 +02:00
" Verona " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" hello 1 " ,
2017-05-23 03:02:01 +02:00
)
self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( sender , " Verona " , " hello 2 " ) ,
2017-05-23 03:02:01 +02:00
state_change_expected = True ,
)
2017-11-05 10:51:25 +01:00
def test_add_reaction_legacy ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-03-23 04:04:19 +01:00
( ' type ' , equals ( ' reaction ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' message_id ' , check_int ) ,
( ' emoji_name ' , check_string ) ,
2017-05-01 07:29:56 +02:00
( ' emoji_code ' , check_string ) ,
( ' reaction_type ' , check_string ) ,
2020-04-22 23:24:28 +02:00
( ' user_id ' , check_int ) ,
2017-04-20 17:31:41 +02:00
( ' user ' , check_dict_only ( [
2017-03-23 04:04:19 +01:00
( ' email ' , check_string ) ,
( ' full_name ' , check_string ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' user_id ' , check_int ) ,
2017-03-23 04:04:19 +01:00
] ) ) ,
] )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " )
2017-03-23 04:04:19 +01:00
message = Message . objects . get ( id = message_id )
events = self . do_test (
2017-10-16 22:02:20 +02:00
lambda : do_add_reaction_legacy (
2017-03-23 04:04:19 +01:00
self . user_profile , message , " tada " ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-23 04:04:19 +01:00
2017-11-05 10:51:25 +01:00
def test_remove_reaction_legacy ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-03-23 04:04:19 +01:00
( ' type ' , equals ( ' reaction ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
( ' message_id ' , check_int ) ,
( ' emoji_name ' , check_string ) ,
2017-05-01 07:29:56 +02:00
( ' emoji_code ' , check_string ) ,
( ' reaction_type ' , check_string ) ,
2020-04-22 23:24:28 +02:00
( ' user_id ' , check_int ) ,
2017-04-20 17:31:41 +02:00
( ' user ' , check_dict_only ( [
2017-03-23 04:04:19 +01:00
( ' email ' , check_string ) ,
( ' full_name ' , check_string ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' user_id ' , check_int ) ,
2017-03-23 04:04:19 +01:00
] ) ) ,
] )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " )
2017-03-23 04:04:19 +01:00
message = Message . objects . get ( id = message_id )
2017-10-16 22:02:20 +02:00
do_add_reaction_legacy ( self . user_profile , message , " tada " )
2017-03-23 04:04:19 +01:00
events = self . do_test (
2017-10-16 22:05:00 +02:00
lambda : do_remove_reaction_legacy (
2017-03-23 04:04:19 +01:00
self . user_profile , message , " tada " ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-23 04:04:19 +01:00
2017-11-05 10:51:25 +01:00
def test_add_reaction ( self ) - > None :
2017-10-08 09:34:59 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' reaction ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' message_id ' , check_int ) ,
( ' emoji_name ' , check_string ) ,
( ' emoji_code ' , check_string ) ,
( ' reaction_type ' , check_string ) ,
2020-04-22 23:24:28 +02:00
( ' user_id ' , check_int ) ,
2017-10-08 09:34:59 +02:00
( ' user ' , check_dict_only ( [
( ' email ' , check_string ) ,
( ' full_name ' , check_string ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' user_id ' , check_int ) ,
2017-10-08 09:34:59 +02:00
] ) ) ,
] )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " )
2017-10-08 09:34:59 +02:00
message = Message . objects . get ( id = message_id )
events = self . do_test (
lambda : do_add_reaction (
self . user_profile , message , " tada " , " 1f389 " , " unicode_emoji " ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-10-08 09:34:59 +02:00
2018-02-12 10:53:36 +01:00
def test_add_submessage ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' submessage ' ) ) ,
( ' message_id ' , check_int ) ,
2018-05-30 22:41:15 +02:00
( ' submessage_id ' , check_int ) ,
2018-02-12 10:53:36 +01:00
( ' sender_id ' , check_int ) ,
( ' msg_type ' , check_string ) ,
2018-05-30 21:12:16 +02:00
( ' content ' , check_string ) ,
2018-02-12 10:53:36 +01:00
] )
cordelia = self . example_user ( ' cordelia ' )
stream_name = ' Verona '
message_id = self . send_stream_message (
2020-03-07 11:43:05 +01:00
sender = cordelia ,
2018-02-12 10:53:36 +01:00
stream_name = stream_name ,
)
events = self . do_test (
lambda : do_add_submessage (
2018-11-02 23:33:54 +01:00
realm = cordelia . realm ,
2018-02-12 10:53:36 +01:00
sender_id = cordelia . id ,
message_id = message_id ,
msg_type = ' whatever ' ,
content = ' " stuff " ' ,
) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-02-12 10:53:36 +01:00
2017-11-05 10:51:25 +01:00
def test_remove_reaction ( self ) - > None :
2017-10-08 09:34:59 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' reaction ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
( ' message_id ' , check_int ) ,
( ' emoji_name ' , check_string ) ,
( ' emoji_code ' , check_string ) ,
( ' reaction_type ' , check_string ) ,
2020-04-22 23:24:28 +02:00
( ' user_id ' , check_int ) ,
2017-10-08 09:34:59 +02:00
( ' user ' , check_dict_only ( [
( ' email ' , check_string ) ,
( ' full_name ' , check_string ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' user_id ' , check_int ) ,
2017-10-08 09:34:59 +02:00
] ) ) ,
] )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( self . example_user ( " hamlet " ) , " Verona " , " hello " )
2017-10-08 09:34:59 +02:00
message = Message . objects . get ( id = message_id )
do_add_reaction ( self . user_profile , message , " tada " , " 1f389 " , " unicode_emoji " )
events = self . do_test (
lambda : do_remove_reaction (
self . user_profile , message , " 1f389 " , " unicode_emoji " ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-10-08 09:34:59 +02:00
2017-12-14 22:22:17 +01:00
def test_invite_user_event ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' invites_changed ' ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
streams = [ ]
for stream_name in [ " Denmark " , " Scotland " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
events = self . do_test (
lambda : do_invite_users ( self . user_profile , [ " foo@zulip.com " ] , streams , False ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-12-14 22:22:17 +01:00
2019-02-15 19:09:25 +01:00
def test_create_multiuse_invite_event ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' invites_changed ' ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
streams = [ ]
for stream_name in [ " Denmark " , " Verona " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
events = self . do_test (
lambda : do_create_multiuse_invite_link ( self . user_profile , PreregistrationUser . INVITE_AS [ ' MEMBER ' ] , streams ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-02-15 19:09:25 +01:00
2017-12-14 22:22:17 +01:00
def test_revoke_user_invite_event ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' invites_changed ' ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
streams = [ ]
for stream_name in [ " Denmark " , " Verona " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
do_invite_users ( self . user_profile , [ " foo@zulip.com " ] , streams , False )
prereg_users = PreregistrationUser . objects . filter ( referred_by__realm = self . user_profile . realm )
events = self . do_test (
lambda : do_revoke_user_invite ( prereg_users [ 0 ] ) ,
state_change_expected = False ,
2019-02-15 19:09:25 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-02-15 19:09:25 +01:00
def test_revoke_multiuse_invite_event ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' invites_changed ' ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
streams = [ ]
for stream_name in [ " Denmark " , " Verona " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
do_create_multiuse_invite_link ( self . user_profile , PreregistrationUser . INVITE_AS [ ' MEMBER ' ] , streams )
multiuse_object = MultiuseInvite . objects . get ( )
events = self . do_test (
lambda : do_revoke_multi_use_invite ( multiuse_object ) ,
state_change_expected = False ,
2017-12-14 22:22:17 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-12-14 22:22:17 +01:00
def test_invitation_accept_invite_event ( self ) - > None :
2020-03-12 14:17:25 +01:00
reset_emails_in_zulip_realm ( )
2017-12-14 22:22:17 +01:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' invites_changed ' ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
streams = [ ]
for stream_name in [ " Denmark " , " Scotland " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
do_invite_users ( self . user_profile , [ " foo@zulip.com " ] , streams , False )
2019-11-23 18:15:53 +01:00
prereg_user = PreregistrationUser . objects . get ( email = " foo@zulip.com " )
2017-12-14 22:22:17 +01:00
events = self . do_test (
lambda : do_create_user ( ' foo@zulip.com ' , ' password ' , self . user_profile . realm ,
2019-11-23 18:15:53 +01:00
' full name ' , ' short name ' , prereg_user = prereg_user ) ,
2017-12-14 22:22:17 +01:00
state_change_expected = True ,
num_events = 5 ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[4] ' , events [ 4 ] )
2017-12-14 22:22:17 +01:00
2017-11-05 10:51:25 +01:00
def test_typing_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-03-18 03:50:41 +01:00
( ' type ' , equals ( ' typing ' ) ) ,
( ' op ' , equals ( ' start ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' sender ' , check_dict_only ( [
2017-03-18 03:50:41 +01:00
( ' email ' , check_string ) ,
( ' user_id ' , check_int ) ] ) ) ,
2017-04-20 17:31:41 +02:00
( ' recipients ' , check_list ( check_dict_only ( [
2017-03-18 03:50:41 +01:00
( ' email ' , check_string ) ,
( ' user_id ' , check_int ) ,
] ) ) ) ,
] )
events = self . do_test (
lambda : check_send_typing_notification (
2020-02-22 13:38:09 +01:00
self . user_profile , [ self . example_user ( " cordelia " ) . id ] , " start " ) ,
2017-03-18 03:50:41 +01:00
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-18 03:50:41 +01:00
2017-11-05 10:51:25 +01:00
def test_custom_profile_fields_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-03-17 10:07:22 +01:00
( ' type ' , equals ( ' custom_profile_fields ' ) ) ,
2017-12-11 07:24:44 +01:00
( ' op ' , equals ( ' add ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' fields ' , check_list ( check_dict_only ( [
2018-03-20 06:35:35 +01:00
( ' id ' , check_int ) ,
2017-03-17 10:07:22 +01:00
( ' type ' , check_int ) ,
( ' name ' , check_string ) ,
2018-03-31 07:30:24 +02:00
( ' hint ' , check_string ) ,
2018-04-08 09:50:05 +02:00
( ' field_data ' , check_string ) ,
2018-04-08 18:13:37 +02:00
( ' order ' , check_int ) ,
2017-03-17 10:07:22 +01:00
] ) ) ) ,
] )
events = self . do_test (
lambda : notify_realm_custom_profile_fields (
2017-12-11 07:24:44 +01:00
self . user_profile . realm , ' add ' ) ,
2017-03-17 10:07:22 +01:00
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-03-31 07:30:24 +02:00
realm = self . user_profile . realm
field = realm . customprofilefield_set . get ( realm = realm , name = ' Biography ' )
name = field . name
hint = ' Biography of the user '
try_update_realm_custom_profile_field ( realm , field , name , hint = hint )
events = self . do_test (
lambda : notify_realm_custom_profile_fields (
self . user_profile . realm , ' add ' ) ,
state_change_expected = False ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-03-17 10:07:22 +01:00
2018-07-09 11:49:08 +02:00
def test_custom_profile_field_data_events ( self ) - > None :
2018-12-31 07:45:33 +01:00
schema_checker_basic = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
( ' custom_profile_field ' , check_dict ( [
( ' id ' , check_int ) ,
( ' value ' , check_none_or ( check_string ) ) ,
2019-01-18 11:12:59 +01:00
] ) ) ,
2018-12-31 07:45:33 +01:00
] ) ) ,
] )
schema_checker_with_rendered_value = self . check_events_dict ( [
2018-07-09 11:49:08 +02:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
2018-11-06 10:05:31 +01:00
( ' custom_profile_field ' , check_dict ( [
2018-07-09 11:49:08 +02:00
( ' id ' , check_int ) ,
( ' value ' , check_none_or ( check_string ) ) ,
2018-12-31 07:45:33 +01:00
( ' rendered_value ' , check_none_or ( check_string ) ) ,
2019-01-18 11:12:59 +01:00
] ) ) ,
2018-07-09 11:49:08 +02:00
] ) ) ,
] )
2019-03-07 21:29:16 +01:00
field_id = self . user_profile . realm . customprofilefield_set . get (
realm = self . user_profile . realm , name = ' Biography ' ) . id
2018-07-09 11:49:08 +02:00
field = {
" id " : field_id ,
" value " : " New value " ,
}
2019-10-01 04:22:50 +02:00
events = self . do_test ( lambda : do_update_user_custom_profile_data_if_changed ( self . user_profile , [ field ] ) )
2020-06-21 02:36:20 +02:00
schema_checker_with_rendered_value ( ' events[0] ' , events [ 0 ] )
2018-07-09 11:49:08 +02:00
2018-08-09 14:02:32 +02:00
# Test we pass correct stringify value in custom-user-field data event
2019-03-07 21:29:16 +01:00
field_id = self . user_profile . realm . customprofilefield_set . get (
realm = self . user_profile . realm , name = ' Mentor ' ) . id
2018-08-09 14:02:32 +02:00
field = {
" id " : field_id ,
" value " : [ self . example_user ( " ZOE " ) . id ] ,
}
2019-10-01 04:22:50 +02:00
events = self . do_test ( lambda : do_update_user_custom_profile_data_if_changed ( self . user_profile , [ field ] ) )
2020-06-21 02:36:20 +02:00
schema_checker_basic ( ' events[0] ' , events [ 0 ] )
2018-08-09 14:02:32 +02:00
2017-11-05 10:51:25 +01:00
def test_presence_events ( self ) - > None :
2020-02-03 17:09:18 +01:00
fields = [
2017-04-25 11:50:30 +02:00
( ' type ' , equals ( ' presence ' ) ) ,
2020-02-03 16:25:13 +01:00
( ' user_id ' , check_int ) ,
2017-04-25 11:50:30 +02:00
( ' server_timestamp ' , check_float ) ,
( ' presence ' , check_dict_only ( [
( ' website ' , check_dict_only ( [
( ' status ' , equals ( ' active ' ) ) ,
( ' timestamp ' , check_int ) ,
( ' client ' , check_string ) ,
( ' pushable ' , check_bool ) ,
] ) ) ,
] ) ) ,
2020-02-03 17:09:18 +01:00
]
email_field = ( ' email ' , check_string )
2020-02-02 17:29:05 +01:00
events = self . do_test ( lambda : do_update_user_presence (
self . user_profile , get_client ( " website " ) ,
timezone_now ( ) , UserPresence . ACTIVE ) ,
slim_presence = False )
2020-02-03 17:09:18 +01:00
schema_checker = self . check_events_dict ( fields + [ email_field ] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2020-02-02 17:29:05 +01:00
2017-04-25 11:50:30 +02:00
events = self . do_test ( lambda : do_update_user_presence (
2020-02-02 17:29:05 +01:00
self . example_user ( ' cordelia ' ) , get_client ( " website " ) ,
timezone_now ( ) , UserPresence . ACTIVE ) ,
slim_presence = True )
2020-02-03 17:09:18 +01:00
schema_checker = self . check_events_dict ( fields )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-04-25 11:50:30 +02:00
2017-11-05 10:51:25 +01:00
def test_presence_events_multiple_clients ( self ) - > None :
2017-04-25 11:50:30 +02:00
schema_checker_android = self . check_events_dict ( [
( ' type ' , equals ( ' presence ' ) ) ,
2017-03-24 05:26:32 +01:00
( ' email ' , check_string ) ,
2020-02-03 16:25:13 +01:00
( ' user_id ' , check_int ) ,
2017-04-25 11:50:30 +02:00
( ' server_timestamp ' , check_float ) ,
2017-04-20 17:31:41 +02:00
( ' presence ' , check_dict_only ( [
2017-04-25 11:50:30 +02:00
( ' ZulipAndroid/1.0 ' , check_dict_only ( [
( ' status ' , equals ( ' idle ' ) ) ,
( ' timestamp ' , check_int ) ,
( ' client ' , check_string ) ,
( ' pushable ' , check_bool ) ,
] ) ) ,
2017-03-24 05:26:32 +01:00
] ) ) ,
] )
2020-02-03 17:09:18 +01:00
2020-03-10 11:48:26 +01:00
self . api_post ( self . user_profile , " /api/v1/users/me/presence " , { ' status ' : ' idle ' } ,
2017-12-14 19:02:31 +01:00
HTTP_USER_AGENT = " ZulipAndroid/1.0 " )
2017-04-25 11:50:30 +02:00
self . do_test ( lambda : do_update_user_presence (
self . user_profile , get_client ( " website " ) , timezone_now ( ) , UserPresence . ACTIVE ) )
events = self . do_test ( lambda : do_update_user_presence (
self . user_profile , get_client ( " ZulipAndroid/1.0 " ) , timezone_now ( ) , UserPresence . IDLE ) )
2020-06-21 02:36:20 +02:00
schema_checker_android ( ' events[0] ' , events [ 0 ] )
2017-03-24 05:26:32 +01:00
2017-11-05 10:51:25 +01:00
def test_pointer_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' pointer ' ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' pointer ' , check_int ) ,
2014-03-06 17:07:43 +01:00
] )
2018-03-14 00:01:04 +01:00
events = self . do_test ( lambda : do_update_pointer ( self . user_profile , get_client ( " website " ) , 1500 ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_register_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
realm_user_add_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' person ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
2014-03-06 17:07:43 +01:00
( ' email ' , check_string ) ,
2017-11-02 21:40:12 +01:00
( ' avatar_url ' , check_none_or ( check_string ) ) ,
2020-04-07 20:09:30 +02:00
( ' avatar_version ' , check_int ) ,
2014-03-06 17:07:43 +01:00
( ' full_name ' , check_string ) ,
( ' is_admin ' , check_bool ) ,
2020-06-01 21:47:18 +02:00
( ' is_owner ' , check_bool ) ,
2014-03-06 17:07:43 +01:00
( ' is_bot ' , check_bool ) ,
2018-10-19 12:29:46 +02:00
( ' is_guest ' , check_bool ) ,
2020-01-31 23:41:41 +01:00
( ' is_active ' , check_bool ) ,
2018-07-31 19:53:56 +02:00
( ' profile_data ' , check_dict_only ( [ ] ) ) ,
2017-04-20 17:31:41 +02:00
( ' timezone ' , check_string ) ,
2018-05-15 13:00:52 +02:00
( ' date_joined ' , check_string ) ,
2014-03-06 17:07:43 +01:00
] ) ) ,
] )
2017-01-04 09:00:26 +01:00
events = self . do_test ( lambda : self . register ( " test1@zulip.com " , " test1 " ) )
2017-03-24 05:49:23 +01:00
self . assert_length ( events , 1 )
2020-06-21 02:36:20 +02:00
realm_user_add_checker ( ' events[0] ' , events [ 0 ] )
2018-12-06 23:17:46 +01:00
new_user_profile = get_user_by_delivery_email ( " test1@zulip.com " , self . user_profile . realm )
2020-03-12 14:17:25 +01:00
self . assertEqual ( new_user_profile . delivery_email , " test1@zulip.com " )
2018-12-06 23:17:46 +01:00
def test_register_events_email_address_visibility ( self ) - > None :
realm_user_add_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' person ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
( ' email ' , check_string ) ,
( ' avatar_url ' , check_none_or ( check_string ) ) ,
2020-04-07 20:09:30 +02:00
( ' avatar_version ' , check_int ) ,
2018-12-06 23:17:46 +01:00
( ' full_name ' , check_string ) ,
2020-01-31 23:41:41 +01:00
( ' is_active ' , check_bool ) ,
2018-12-06 23:17:46 +01:00
( ' is_admin ' , check_bool ) ,
2020-06-01 21:47:18 +02:00
( ' is_owner ' , check_bool ) ,
2018-12-06 23:17:46 +01:00
( ' is_bot ' , check_bool ) ,
( ' is_guest ' , check_bool ) ,
( ' profile_data ' , check_dict_only ( [ ] ) ) ,
( ' timezone ' , check_string ) ,
( ' date_joined ' , check_string ) ,
] ) ) ,
] )
do_set_realm_property ( self . user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_ADMINS )
events = self . do_test ( lambda : self . register ( " test1@zulip.com " , " test1 " ) )
self . assert_length ( events , 1 )
2020-06-21 02:36:20 +02:00
realm_user_add_checker ( ' events[0] ' , events [ 0 ] )
2018-12-06 23:17:46 +01:00
new_user_profile = get_user_by_delivery_email ( " test1@zulip.com " , self . user_profile . realm )
2020-06-10 06:41:04 +02:00
self . assertEqual ( new_user_profile . email , f " user { new_user_profile . id } @zulip.testserver " )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_alert_words_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
alert_words_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' alert_words ' ) ) ,
( ' alert_words ' , check_list ( check_string ) ) ,
] )
events = self . do_test ( lambda : do_add_alert_words ( self . user_profile , [ " alert_word " ] ) )
2020-06-21 02:36:20 +02:00
alert_words_checker ( ' events[0] ' , events [ 0 ] )
2014-03-06 17:07:43 +01:00
events = self . do_test ( lambda : do_remove_alert_words ( self . user_profile , [ " alert_word " ] ) )
2020-06-21 02:36:20 +02:00
alert_words_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2018-12-18 17:17:08 +01:00
def test_away_events ( self ) - > None :
checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_status ' ) ) ,
( ' user_id ' , check_int ) ,
( ' away ' , check_bool ) ,
2019-01-21 19:06:03 +01:00
( ' status_text ' , check_string ) ,
2018-12-18 17:17:08 +01:00
] )
client = get_client ( " website " )
2019-01-21 18:19:59 +01:00
events = self . do_test ( lambda : do_update_user_status ( user_profile = self . user_profile ,
away = True ,
2019-01-21 19:06:03 +01:00
status_text = ' out to lunch ' ,
2019-01-21 18:19:59 +01:00
client_id = client . id ) )
2020-06-21 02:36:20 +02:00
checker ( ' events[0] ' , events [ 0 ] )
2018-12-18 17:17:08 +01:00
2019-01-21 18:19:59 +01:00
events = self . do_test ( lambda : do_update_user_status ( user_profile = self . user_profile ,
away = False ,
2019-01-21 19:06:03 +01:00
status_text = ' ' ,
2019-01-21 18:19:59 +01:00
client_id = client . id ) )
2020-06-21 02:36:20 +02:00
checker ( ' events[0] ' , events [ 0 ] )
2018-12-18 17:17:08 +01:00
2017-11-14 07:31:31 +01:00
def test_user_group_events ( self ) - > None :
user_group_add_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' group ' , check_dict_only ( [
( ' id ' , check_int ) ,
( ' name ' , check_string ) ,
( ' members ' , check_list ( check_int ) ) ,
( ' description ' , check_string ) ,
] ) ) ,
] )
othello = self . example_user ( ' othello ' )
2019-03-07 21:29:16 +01:00
events = self . do_test ( lambda : check_add_user_group ( self . user_profile . realm ,
' backend ' , [ othello ] ,
2017-11-14 07:31:31 +01:00
' Backend team ' ) )
2020-06-21 02:36:20 +02:00
user_group_add_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 07:31:31 +01:00
2017-11-14 08:00:18 +01:00
# Test name update
user_group_update_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' group_id ' , check_int ) ,
( ' data ' , check_dict_only ( [
( ' name ' , check_string ) ,
] ) ) ,
] )
backend = UserGroup . objects . get ( name = ' backend ' )
events = self . do_test ( lambda : do_update_user_group_name ( backend , ' backendteam ' ) )
2020-06-21 02:36:20 +02:00
user_group_update_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 08:00:18 +01:00
2017-11-14 08:00:53 +01:00
# Test description update
user_group_update_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' group_id ' , check_int ) ,
( ' data ' , check_dict_only ( [
( ' description ' , check_string ) ,
] ) ) ,
] )
description = " Backend team to deal with backend code. "
events = self . do_test ( lambda : do_update_user_group_description ( backend , description ) )
2020-06-21 02:36:20 +02:00
user_group_update_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 08:00:53 +01:00
2017-11-14 08:01:39 +01:00
# Test add members
user_group_add_member_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' add_members ' ) ) ,
( ' group_id ' , check_int ) ,
( ' user_ids ' , check_list ( check_int ) ) ,
] )
hamlet = self . example_user ( ' hamlet ' )
events = self . do_test ( lambda : bulk_add_members_to_user_group ( backend , [ hamlet ] ) )
2020-06-21 02:36:20 +02:00
user_group_add_member_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 08:01:39 +01:00
2017-11-14 08:01:50 +01:00
# Test remove members
user_group_remove_member_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' remove_members ' ) ) ,
( ' group_id ' , check_int ) ,
( ' user_ids ' , check_list ( check_int ) ) ,
] )
hamlet = self . example_user ( ' hamlet ' )
events = self . do_test ( lambda : remove_members_from_user_group ( backend , [ hamlet ] ) )
2020-06-21 02:36:20 +02:00
user_group_remove_member_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 08:01:50 +01:00
2017-11-15 08:09:49 +01:00
# Test delete event
user_group_remove_checker = self . check_events_dict ( [
( ' type ' , equals ( ' user_group ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
( ' group_id ' , check_int ) ,
] )
2018-02-19 13:38:18 +01:00
events = self . do_test ( lambda : check_delete_user_group ( backend . id , othello ) )
2020-06-21 02:36:20 +02:00
user_group_remove_checker ( ' events[0] ' , events [ 0 ] )
2017-11-15 08:09:49 +01:00
2017-11-05 10:51:25 +01:00
def test_default_stream_groups_events ( self ) - > None :
2017-11-01 18:20:34 +01:00
default_stream_groups_checker = self . check_events_dict ( [
( ' type ' , equals ( ' default_stream_groups ' ) ) ,
( ' default_stream_groups ' , check_list ( check_dict_only ( [
( ' name ' , check_string ) ,
2017-11-14 20:33:09 +01:00
( ' id ' , check_int ) ,
2017-11-14 20:51:34 +01:00
( ' description ' , check_string ) ,
2017-11-01 18:20:34 +01:00
( ' streams ' , check_list ( check_dict_only ( [
( ' description ' , check_string ) ,
2019-01-11 13:48:22 +01:00
( ' rendered_description ' , check_string ) ,
2017-11-01 18:20:34 +01:00
( ' invite_only ' , check_bool ) ,
2019-04-07 20:29:25 +02:00
( ' is_web_public ' , check_bool ) ,
2018-05-14 12:06:25 +02:00
( ' is_announcement_only ' , check_bool ) ,
2020-02-04 21:50:55 +01:00
( ' stream_post_policy ' , check_int_in ( Stream . STREAM_POST_POLICY_TYPES ) ) ,
2020-06-14 18:57:02 +02:00
( ' message_retention_days ' , check_none_or ( check_int ) ) ,
2017-11-01 18:20:34 +01:00
( ' name ' , check_string ) ,
2018-05-07 23:14:15 +02:00
( ' stream_id ' , check_int ) ,
2019-03-04 17:50:49 +01:00
( ' first_message_id ' , check_none_or ( check_int ) ) ,
2018-05-07 23:14:15 +02:00
( ' history_public_to_subscribers ' , check_bool ) ] ) ) ) ,
2017-11-01 18:20:34 +01:00
] ) ) ) ,
] )
streams = [ ]
for stream_name in [ " Scotland " , " Verona " , " Denmark " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
2017-11-14 20:51:34 +01:00
events = self . do_test ( lambda : do_create_default_stream_group (
self . user_profile . realm , " group1 " , " This is group1 " , streams ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-01 18:20:34 +01:00
2017-11-14 20:33:09 +01:00
group = lookup_default_stream_groups ( [ " group1 " ] , self . user_profile . realm ) [ 0 ]
2017-11-01 18:20:34 +01:00
venice_stream = get_stream ( " Venice " , self . user_profile . realm )
events = self . do_test ( lambda : do_add_streams_to_default_stream_group ( self . user_profile . realm ,
2017-11-14 20:33:09 +01:00
group , [ venice_stream ] ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-01 18:20:34 +01:00
events = self . do_test ( lambda : do_remove_streams_from_default_stream_group ( self . user_profile . realm ,
2017-11-14 20:33:09 +01:00
group , [ venice_stream ] ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-01 18:20:34 +01:00
2017-11-14 20:51:34 +01:00
events = self . do_test ( lambda : do_change_default_stream_group_description ( self . user_profile . realm ,
group , " New description " ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 20:51:34 +01:00
2017-11-14 21:06:02 +01:00
events = self . do_test ( lambda : do_change_default_stream_group_name ( self . user_profile . realm ,
group , " New Group Name " ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-14 21:06:02 +01:00
2017-11-14 20:33:09 +01:00
events = self . do_test ( lambda : do_remove_default_stream_group ( self . user_profile . realm , group ) )
2020-06-21 02:36:20 +02:00
default_stream_groups_checker ( ' events[0] ' , events [ 0 ] )
2017-11-01 18:20:34 +01:00
2019-03-01 01:26:57 +01:00
def test_default_stream_group_events_guest ( self ) - > None :
streams = [ ]
for stream_name in [ " Scotland " , " Verona " , " Denmark " ] :
streams . append ( get_stream ( stream_name , self . user_profile . realm ) )
do_create_default_stream_group ( self . user_profile . realm , " group1 " ,
" This is group1 " , streams )
group = lookup_default_stream_groups ( [ " group1 " ] , self . user_profile . realm ) [ 0 ]
2020-05-21 00:13:06 +02:00
do_change_user_role ( self . user_profile , UserProfile . ROLE_GUEST )
2019-03-01 01:26:57 +01:00
venice_stream = get_stream ( " Venice " , self . user_profile . realm )
self . do_test ( lambda : do_add_streams_to_default_stream_group ( self . user_profile . realm ,
group , [ venice_stream ] ) ,
state_change_expected = False , num_events = 0 )
2017-11-05 10:51:25 +01:00
def test_default_streams_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
default_streams_checker = self . check_events_dict ( [
2016-05-20 22:08:42 +02:00
( ' type ' , equals ( ' default_streams ' ) ) ,
2020-06-21 02:36:20 +02:00
( ' default_streams ' , check_list ( check_dict ( [
2016-05-20 22:08:42 +02:00
( ' description ' , check_string ) ,
( ' invite_only ' , check_bool ) ,
( ' name ' , check_string ) ,
( ' stream_id ' , check_int ) ,
] ) ) ) ,
] )
2017-01-30 04:23:08 +01:00
stream = get_stream ( " Scotland " , self . user_profile . realm )
events = self . do_test ( lambda : do_add_default_stream ( stream ) )
2020-06-21 02:36:20 +02:00
default_streams_checker ( ' events[0] ' , events [ 0 ] )
2017-01-30 04:25:40 +01:00
events = self . do_test ( lambda : do_remove_default_stream ( stream ) )
2020-06-21 02:36:20 +02:00
default_streams_checker ( ' events[0] ' , events [ 0 ] )
2016-05-20 22:08:42 +02:00
2019-03-01 01:26:57 +01:00
def test_default_streams_events_guest ( self ) - > None :
2020-05-21 00:13:06 +02:00
do_change_user_role ( self . user_profile , UserProfile . ROLE_GUEST )
2019-03-01 01:26:57 +01:00
stream = get_stream ( " Scotland " , self . user_profile . realm )
self . do_test ( lambda : do_add_default_stream ( stream ) ,
state_change_expected = False , num_events = 0 )
self . do_test ( lambda : do_remove_default_stream ( stream ) ,
state_change_expected = False , num_events = 0 )
2017-11-05 10:51:25 +01:00
def test_muted_topics_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
muted_topics_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' muted_topics ' ) ) ,
2020-06-24 16:38:35 +02:00
( ' muted_topics ' , check_list ( check_tuple ( [
check_string , # stream name
check_string , # topic name
check_int , # timestamp
] ) ) ) ,
2014-03-06 17:07:43 +01:00
] )
2017-08-30 02:19:34 +02:00
stream = get_stream ( ' Denmark ' , self . user_profile . realm )
2020-02-18 17:25:43 +01:00
recipient = stream . recipient
2017-08-30 02:19:34 +02:00
events = self . do_test ( lambda : do_mute_topic (
self . user_profile , stream , recipient , " topic " ) )
2020-06-21 02:36:20 +02:00
muted_topics_checker ( ' events[0] ' , events [ 0 ] )
2017-03-24 05:32:50 +01:00
2017-08-30 02:19:34 +02:00
events = self . do_test ( lambda : do_unmute_topic (
self . user_profile , stream , " topic " ) )
2020-06-21 02:36:20 +02:00
muted_topics_checker ( ' events[0] ' , events [ 0 ] )
2017-03-24 05:32:50 +01:00
2017-11-05 10:51:25 +01:00
def test_change_avatar_fields ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-02-21 21:37:16 +01:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' person ' , check_dict_only ( [
2017-02-21 21:37:16 +01:00
( ' avatar_url ' , check_string ) ,
2017-10-21 16:33:07 +02:00
( ' avatar_url_medium ' , check_string ) ,
2020-04-07 20:09:30 +02:00
( ' avatar_version ' , check_int ) ,
2017-10-21 16:33:07 +02:00
( ' avatar_source ' , check_string ) ,
2020-05-12 20:28:53 +02:00
( ' user_id ' , check_int ) ,
2017-02-21 21:37:16 +01:00
] ) ) ,
] )
events = self . do_test (
lambda : do_change_avatar_fields ( self . user_profile , UserProfile . AVATAR_FROM_USER ) ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-02-21 21:37:16 +01:00
2018-02-05 21:42:54 +01:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
2020-04-07 20:09:30 +02:00
( ' avatar_source ' , check_string ) ,
2018-02-05 21:42:54 +01:00
( ' avatar_url ' , check_none_or ( check_string ) ) ,
( ' avatar_url_medium ' , check_none_or ( check_string ) ) ,
2020-04-07 20:09:30 +02:00
( ' avatar_version ' , check_int ) ,
( ' user_id ' , check_int ) ,
2018-02-05 21:42:54 +01:00
] ) ) ,
] )
events = self . do_test (
lambda : do_change_avatar_fields ( self . user_profile , UserProfile . AVATAR_FROM_GRAVATAR ) ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-02-05 21:42:54 +01:00
2017-11-05 10:51:25 +01:00
def test_change_full_name ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' person ' , check_dict_only ( [
2014-03-06 17:07:43 +01:00
( ' full_name ' , check_string ) ,
2017-04-20 17:31:41 +02:00
( ' user_id ' , check_int ) ,
2014-03-06 17:07:43 +01:00
] ) ) ,
] )
2017-04-07 07:28:28 +02:00
events = self . do_test ( lambda : do_change_full_name ( self . user_profile , ' Sir Hamlet ' , self . user_profile ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2018-08-02 08:47:13 +02:00
def test_change_user_delivery_email_email_address_visibilty_admins ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' delivery_email ' , check_string ) ,
( ' user_id ' , check_int ) ,
] ) ) ,
] )
2019-11-05 20:23:58 +01:00
avatar_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
2020-04-07 20:09:30 +02:00
( ' avatar_source ' , check_string ) ,
2019-11-05 20:23:58 +01:00
( ' avatar_url ' , check_string ) ,
( ' avatar_url_medium ' , check_string ) ,
2020-04-07 20:09:30 +02:00
( ' avatar_version ' , check_int ) ,
2020-05-12 20:28:53 +02:00
( ' user_id ' , check_int ) ,
2019-11-05 20:23:58 +01:00
] ) ) ,
] )
2018-08-02 08:47:13 +02:00
do_set_realm_property ( self . user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_ADMINS )
2018-12-06 23:17:46 +01:00
# Important: We need to refresh from the database here so that
# we don't have a stale UserProfile object with an old value
# for email being passed into this next function.
self . user_profile . refresh_from_db ( )
2018-08-02 08:47:13 +02:00
action = lambda : do_change_user_delivery_email ( self . user_profile , ' newhamlet@zulip.com ' )
2019-11-05 20:23:58 +01:00
events = self . do_test ( action , num_events = 2 , client_gravatar = False )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
avatar_schema_checker ( ' events[1] ' , events [ 1 ] )
2018-08-02 08:47:13 +02:00
2017-11-05 10:51:25 +01:00
def do_set_realm_property_test ( self , name : str ) - > None :
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
bool_tests : List [ bool ] = [ True , False , True ]
test_values : Dict [ str , Any ] = dict (
2020-04-09 21:51:58 +02:00
default_language = [ ' es ' , ' de ' , ' en ' ] ,
description = [ ' Realm description ' , ' New description ' ] ,
2019-03-31 12:13:42 +02:00
digest_weekday = [ 0 , 1 , 2 ] ,
2017-04-12 22:25:21 +02:00
message_retention_days = [ 10 , 20 ] ,
2020-04-09 21:51:58 +02:00
name = [ ' Zulip ' , ' New Name ' ] ,
2017-04-12 22:25:21 +02:00
waiting_period_threshold = [ 10 , 20 ] ,
2019-05-06 16:34:31 +02:00
create_stream_policy = [ 3 , 2 , 1 ] ,
2019-04-08 19:23:00 +02:00
invite_to_stream_policy = [ 3 , 2 , 1 ] ,
2020-01-08 01:49:44 +01:00
private_message_policy = [ 2 , 1 ] ,
2019-11-02 17:58:55 +01:00
user_group_edit_policy = [ 1 , 2 ] ,
2018-12-07 00:48:06 +01:00
email_address_visibility = [ Realm . EMAIL_ADDRESS_VISIBILITY_ADMINS ] ,
2018-01-29 16:10:54 +01:00
bot_creation_policy = [ Realm . BOT_CREATION_EVERYONE ] ,
2019-05-09 08:46:59 +02:00
video_chat_provider = [
2019-05-09 09:54:38 +02:00
Realm . VIDEO_CHAT_PROVIDERS [ ' jitsi_meet ' ] [ ' id ' ] ,
2019-05-09 08:46:59 +02:00
] ,
2020-04-10 01:59:41 +02:00
default_code_block_language = [ ' python ' , ' javascript ' ] ,
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
)
2017-04-12 22:25:21 +02:00
2017-07-04 23:18:29 +02:00
vals = test_values . get ( name )
2017-04-12 22:25:21 +02:00
property_type = Realm . property_types [ name ]
2017-03-24 13:04:17 +01:00
if property_type is bool :
2020-06-22 22:37:00 +02:00
validator : Validator [ object ] = check_bool
2017-07-04 23:18:29 +02:00
vals = bool_tests
2018-05-11 01:39:38 +02:00
elif property_type is str :
2017-03-24 13:04:17 +01:00
validator = check_string
2020-03-31 15:21:27 +02:00
elif property_type == ( str , type ( None ) ) :
validator = check_string
2017-03-24 13:04:17 +01:00
elif property_type is int :
validator = check_int
2017-04-09 00:35:41 +02:00
elif property_type == ( int , type ( None ) ) :
validator = check_int
2017-03-24 13:04:17 +01:00
else :
2020-06-10 06:41:04 +02:00
raise AssertionError ( f " Unexpected property type { property_type } " )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-03-24 13:04:17 +01:00
( ' property ' , equals ( name ) ) ,
( ' value ' , validator ) ,
2014-03-06 17:07:43 +01:00
] )
2017-03-24 13:04:17 +01:00
2017-04-12 22:25:21 +02:00
if vals is None :
2020-06-10 06:41:04 +02:00
raise AssertionError ( f ' No test created for { name } ' )
2017-04-12 22:25:21 +02:00
do_set_realm_property ( self . user_profile . realm , name , vals [ 0 ] )
for val in vals [ 1 : ] :
2019-03-01 00:12:40 +01:00
state_change_expected = True
2017-03-24 13:04:17 +01:00
events = self . do_test (
2019-03-01 00:12:40 +01:00
lambda : do_set_realm_property ( self . user_profile . realm , name , val ) ,
state_change_expected = state_change_expected )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2017-10-07 00:29:18 +02:00
@slow ( " Actually runs several full-stack fetching tests " )
2017-11-05 10:51:25 +01:00
def test_change_realm_property ( self ) - > None :
2017-04-12 22:25:21 +02:00
for prop in Realm . property_types :
2019-04-06 06:34:49 +02:00
with self . settings ( SEND_DIGEST_EMAILS = True ) :
self . do_set_realm_property_test ( prop )
2017-03-04 06:39:45 +01:00
2017-10-28 00:46:59 +02:00
@slow ( " Runs a large matrix of tests " )
2017-11-05 10:51:25 +01:00
def test_change_realm_authentication_methods ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-11-02 21:51:56 +01:00
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update_dict ' ) ) ,
( ' property ' , equals ( ' default ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' data ' , check_dict_only ( [
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' authentication_methods ' , check_dict ( [ ] ) ) ,
2017-04-20 17:31:41 +02:00
] ) ) ,
2016-11-02 21:51:56 +01:00
] )
2017-02-21 19:35:17 +01:00
2017-11-05 10:51:25 +01:00
def fake_backends ( ) - > Any :
2017-02-21 19:35:17 +01:00
backends = (
' zproject.backends.DevAuthBackend ' ,
' zproject.backends.EmailAuthBackend ' ,
' zproject.backends.GitHubAuthBackend ' ,
2019-02-02 16:51:26 +01:00
' zproject.backends.GoogleAuthBackend ' ,
2017-02-21 19:35:17 +01:00
' zproject.backends.ZulipLDAPAuthBackend ' ,
)
return self . settings ( AUTHENTICATION_BACKENDS = backends )
2016-11-02 21:51:56 +01:00
# Test transitions; any new backends should be tested with T/T/T/F/T
for ( auth_method_dict ) in \
( { ' Google ' : True , ' Email ' : True , ' GitHub ' : True , ' LDAP ' : False , ' Dev ' : False } ,
2016-12-03 00:04:17 +01:00
{ ' Google ' : True , ' Email ' : True , ' GitHub ' : False , ' LDAP ' : False , ' Dev ' : False } ,
{ ' Google ' : True , ' Email ' : False , ' GitHub ' : False , ' LDAP ' : False , ' Dev ' : False } ,
{ ' Google ' : True , ' Email ' : False , ' GitHub ' : True , ' LDAP ' : False , ' Dev ' : False } ,
{ ' Google ' : False , ' Email ' : False , ' GitHub ' : False , ' LDAP ' : False , ' Dev ' : True } ,
{ ' Google ' : False , ' Email ' : False , ' GitHub ' : True , ' LDAP ' : False , ' Dev ' : True } ,
{ ' Google ' : False , ' Email ' : True , ' GitHub ' : True , ' LDAP ' : True , ' Dev ' : False } ) :
2017-02-21 19:35:17 +01:00
with fake_backends ( ) :
events = self . do_test (
lambda : do_set_realm_authentication_methods (
self . user_profile . realm ,
auth_method_dict ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-11-02 21:51:56 +01:00
2017-11-05 10:51:25 +01:00
def test_change_pin_stream ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-07-01 07:26:09 +02:00
( ' type ' , equals ( ' subscription ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' pin_to_top ' ) ) ,
2017-03-05 01:30:48 +01:00
( ' stream_id ' , check_int ) ,
2016-07-01 07:26:09 +02:00
( ' value ' , check_bool ) ,
2017-04-20 17:31:41 +02:00
( ' name ' , check_string ) ,
( ' email ' , check_string ) ,
2016-07-01 07:26:09 +02:00
] )
2017-03-05 01:30:48 +01:00
stream = get_stream ( " Denmark " , self . user_profile . realm )
sub = get_subscription ( stream . name , self . user_profile )
2017-02-21 19:35:17 +01:00
do_change_subscription_property ( self . user_profile , sub , stream , " pin_to_top " , False )
for pinned in ( True , False ) :
2016-07-01 07:26:09 +02:00
events = self . do_test ( lambda : do_change_subscription_property ( self . user_profile , sub , stream , " pin_to_top " , pinned ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-07-01 07:26:09 +02:00
2019-02-13 10:22:16 +01:00
def test_change_stream_notification_settings ( self ) - > None :
for setting_name in [ ' email_notifications ' ] :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' subscription ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( setting_name ) ) ,
( ' stream_id ' , check_int ) ,
( ' value ' , check_bool ) ,
( ' name ' , check_string ) ,
( ' email ' , check_string ) ,
] )
stream = get_stream ( " Denmark " , self . user_profile . realm )
sub = get_subscription ( stream . name , self . user_profile )
# First test with notification_settings_null enabled
for value in ( True , False ) :
events = self . do_test ( lambda : do_change_subscription_property ( self . user_profile , sub , stream ,
setting_name , value ) ,
notification_settings_null = True )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-02-13 10:22:16 +01:00
for value in ( True , False ) :
events = self . do_test ( lambda : do_change_subscription_property ( self . user_profile , sub , stream ,
setting_name , value ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-02-13 10:22:16 +01:00
2017-10-28 00:46:59 +02:00
@slow ( " Runs a matrix of 6 queries to the /home view " )
2017-11-05 10:51:25 +01:00
def test_change_realm_message_edit_settings ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-06-21 21:34:41 +02:00
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update_dict ' ) ) ,
( ' property ' , equals ( ' default ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' data ' , check_dict_only ( [
( ' allow_message_editing ' , check_bool ) ,
( ' message_content_edit_limit_seconds ' , check_int ) ,
2017-12-03 00:50:48 +01:00
( ' allow_community_topic_editing ' , check_bool ) ,
2017-04-20 17:31:41 +02:00
] ) ) ,
2016-06-21 21:34:41 +02:00
] )
2016-07-08 02:25:55 +02:00
# Test every transition among the four possibilities {T,F} x {0, non-0}
for ( allow_message_editing , message_content_edit_limit_seconds ) in \
2017-10-07 00:29:18 +02:00
( ( True , 0 ) , ( False , 0 ) , ( False , 1234 ) ,
( True , 600 ) , ( False , 0 ) , ( True , 1234 ) ) :
2017-03-21 18:08:40 +01:00
events = self . do_test (
lambda : do_set_realm_message_editing ( self . user_profile . realm ,
allow_message_editing ,
2017-12-03 00:50:48 +01:00
message_content_edit_limit_seconds ,
False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-06-21 21:34:41 +02:00
2017-11-05 10:51:25 +01:00
def test_change_realm_notifications_stream ( self ) - > None :
2017-06-09 20:50:38 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' notifications_stream_id ' ) ) ,
( ' value ' , check_int ) ,
] )
stream = get_stream ( " Rome " , self . user_profile . realm )
for notifications_stream , notifications_stream_id in ( ( stream , stream . id ) , ( None , - 1 ) ) :
events = self . do_test (
lambda : do_set_realm_notifications_stream ( self . user_profile . realm ,
notifications_stream ,
notifications_stream_id ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-06-09 20:50:38 +02:00
2017-10-20 16:55:04 +02:00
def test_change_realm_signup_notifications_stream ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' signup_notifications_stream_id ' ) ) ,
( ' value ' , check_int ) ,
] )
stream = get_stream ( " Rome " , self . user_profile . realm )
for signup_notifications_stream , signup_notifications_stream_id in ( ( stream , stream . id ) , ( None , - 1 ) ) :
events = self . do_test (
lambda : do_set_realm_signup_notifications_stream ( self . user_profile . realm ,
signup_notifications_stream ,
signup_notifications_stream_id ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-10-20 16:55:04 +02:00
2017-11-05 10:51:25 +01:00
def test_change_is_admin ( self ) - > None :
2020-03-12 14:17:25 +01:00
reset_emails_in_zulip_realm ( )
# Important: We need to refresh from the database here so that
# we don't have a stale UserProfile object with an old value
# for email being passed into this next function.
self . user_profile . refresh_from_db ( )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' person ' , check_dict_only ( [
2020-05-30 21:43:19 +02:00
( ' role ' , check_int_in ( UserProfile . ROLE_TYPES ) ) ,
2017-04-20 17:31:41 +02:00
( ' user_id ' , check_int ) ,
2014-03-06 17:07:43 +01:00
] ) ) ,
] )
2020-03-12 14:17:25 +01:00
2020-05-21 00:13:06 +02:00
do_change_user_role ( self . user_profile , UserProfile . ROLE_MEMBER )
for role in [ UserProfile . ROLE_REALM_ADMINISTRATOR , UserProfile . ROLE_MEMBER ] :
events = self . do_test ( lambda : do_change_user_role ( self . user_profile , role ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2020-06-03 19:49:45 +02:00
def test_change_is_owner ( self ) - > None :
reset_emails_in_zulip_realm ( )
# Important: We need to refresh from the database here so that
# we don't have a stale UserProfile object with an old value
# for email being passed into this next function.
self . user_profile . refresh_from_db ( )
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' role ' , check_int_in ( UserProfile . ROLE_TYPES ) ) ,
( ' user_id ' , check_int ) ,
] ) ) ,
] )
do_change_user_role ( self . user_profile , UserProfile . ROLE_MEMBER )
for role in [ UserProfile . ROLE_REALM_OWNER , UserProfile . ROLE_MEMBER ] :
events = self . do_test ( lambda : do_change_user_role ( self . user_profile , role ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2020-06-01 02:31:30 +02:00
def test_change_is_guest ( self ) - > None :
reset_emails_in_zulip_realm ( )
# Important: We need to refresh from the database here so that
# we don't have a stale UserProfile object with an old value
# for email being passed into this next function.
self . user_profile . refresh_from_db ( )
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' role ' , check_int_in ( UserProfile . ROLE_TYPES ) ) ,
( ' user_id ' , check_int ) ,
] ) ) ,
] )
do_change_user_role ( self . user_profile , UserProfile . ROLE_MEMBER )
for role in [ UserProfile . ROLE_GUEST , UserProfile . ROLE_MEMBER ] :
events = self . do_test ( lambda : do_change_user_role ( self . user_profile , role ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2020-06-01 02:31:30 +02:00
2017-11-05 10:51:25 +01:00
def do_set_user_display_settings_test ( self , setting_name : str ) - > None :
2017-06-18 03:48:52 +02:00
""" Test updating each setting in UserProfile.property_types dict. """
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
test_changes : Dict [ str , Any ] = dict (
2020-04-09 21:51:58 +02:00
emojiset = [ ' twitter ' ] ,
default_language = [ ' es ' , ' de ' , ' en ' ] ,
timezone = [ ' US/Mountain ' , ' US/Samoa ' , ' Pacific/Galapogos ' , ' ' ] ,
2019-03-17 14:48:51 +01:00
demote_inactive_streams = [ 2 , 3 , 1 ] ,
2020-05-16 13:13:59 +02:00
color_scheme = [ 2 , 3 , 1 ]
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
)
2017-04-07 00:05:55 +02:00
property_type = UserProfile . property_types [ setting_name ]
if property_type is bool :
2020-06-22 22:37:00 +02:00
validator : Validator [ object ] = check_bool
2018-05-11 01:39:38 +02:00
elif property_type is str :
2017-04-07 00:05:55 +02:00
validator = check_string
2019-03-17 14:48:51 +01:00
elif property_type is int :
validator = check_int
2017-04-07 00:05:55 +02:00
else :
2020-06-10 06:41:04 +02:00
raise AssertionError ( f " Unexpected property type { property_type } " )
2017-04-07 00:05:55 +02:00
2017-04-02 20:57:27 +02:00
num_events = 1
if setting_name == " timezone " :
num_events = 2
2017-06-18 03:48:52 +02:00
values = test_changes . get ( setting_name )
2017-07-13 23:07:54 +02:00
if property_type is bool :
2018-05-24 20:53:26 +02:00
if getattr ( self . user_profile , setting_name ) is False :
values = [ True , False , True ]
else :
values = [ False , True , False ]
2017-06-18 03:48:52 +02:00
if values is None :
2020-06-10 06:41:04 +02:00
raise AssertionError ( f ' No test created for { setting_name } ' )
2017-06-18 03:48:52 +02:00
for value in values :
2017-04-07 00:05:55 +02:00
events = self . do_test ( lambda : do_set_user_display_setting (
2017-04-02 20:57:27 +02:00
self . user_profile , setting_name , value ) , num_events = num_events )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-04-02 20:57:27 +02:00
( ' type ' , equals ( ' update_display_settings ' ) ) ,
( ' setting_name ' , equals ( setting_name ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , validator ) ,
] )
2018-03-05 03:46:07 +01:00
language_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' update_display_settings ' ) ) ,
( ' language_name ' , check_string ) ,
( ' setting_name ' , equals ( setting_name ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , validator ) ,
] )
if setting_name == " default_language " :
2020-06-21 02:36:20 +02:00
language_schema_checker ( ' events[0] ' , events [ 0 ] )
2018-03-05 03:46:07 +01:00
else :
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2015-08-19 22:35:46 +02:00
2017-04-20 17:31:41 +02:00
timezone_schema_checker = self . check_events_dict ( [
2017-04-02 20:57:27 +02:00
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' person ' , check_dict_only ( [
2020-06-21 02:36:20 +02:00
( ' email ' , check_string ) ,
2017-04-02 20:57:27 +02:00
( ' user_id ' , check_int ) ,
( ' timezone ' , check_string ) ,
] ) ) ,
] )
if setting_name == " timezone " :
2020-06-21 02:36:20 +02:00
timezone_schema_checker ( ' events[1] ' , events [ 1 ] )
2017-04-02 20:57:27 +02:00
2017-10-07 00:29:18 +02:00
@slow ( " Actually runs several full-stack fetching tests " )
2017-11-05 10:51:25 +01:00
def test_set_user_display_settings ( self ) - > None :
2017-06-18 03:48:52 +02:00
for prop in UserProfile . property_types :
self . do_set_user_display_settings_test ( prop )
2017-03-02 08:30:53 +01:00
2017-10-07 00:29:18 +02:00
@slow ( " Actually runs several full-stack fetching tests " )
2017-11-05 10:51:25 +01:00
def test_change_notification_settings ( self ) - > None :
2017-05-23 03:19:21 +02:00
for notification_setting , v in self . user_profile . notification_setting_types . items ( ) :
2019-06-29 22:00:44 +02:00
if notification_setting in [ " notification_sound " , " desktop_icon_count_display " ] :
# These settings are tested in their own tests.
2018-01-11 21:36:11 +01:00
continue
2017-05-23 03:19:21 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' update_global_notifications ' ) ) ,
( ' notification_name ' , equals ( notification_setting ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , check_bool ) ,
] )
do_change_notification_settings ( self . user_profile , notification_setting , False )
2018-01-11 21:36:11 +01:00
2017-05-23 03:19:21 +02:00
for setting_value in [ True , False ] :
events = self . do_test ( lambda : do_change_notification_settings (
self . user_profile , notification_setting , setting_value , log = False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-12-08 21:06:23 +01:00
2019-02-13 10:22:16 +01:00
# Also test with notification_settings_null=True
events = self . do_test (
lambda : do_change_notification_settings (
self . user_profile , notification_setting , setting_value , log = False ) ,
notification_settings_null = True ,
state_change_expected = False )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-02-13 10:22:16 +01:00
2018-01-11 21:36:11 +01:00
def test_change_notification_sound ( self ) - > None :
notification_setting = " notification_sound "
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' update_global_notifications ' ) ) ,
( ' notification_name ' , equals ( notification_setting ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , equals ( " ding " ) ) ,
] )
events = self . do_test ( lambda : do_change_notification_settings (
self . user_profile , notification_setting , ' ding ' , log = False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-01-11 21:36:11 +01:00
2019-06-29 22:00:44 +02:00
def test_change_desktop_icon_count_display ( self ) - > None :
notification_setting = " desktop_icon_count_display "
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' update_global_notifications ' ) ) ,
( ' notification_name ' , equals ( notification_setting ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , equals ( 2 ) ) ,
] )
events = self . do_test ( lambda : do_change_notification_settings (
self . user_profile , notification_setting , 2 , log = False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-06-29 22:00:44 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' update_global_notifications ' ) ) ,
( ' notification_name ' , equals ( notification_setting ) ) ,
( ' user ' , check_string ) ,
( ' setting ' , equals ( 1 ) ) ,
] )
events = self . do_test ( lambda : do_change_notification_settings (
self . user_profile , notification_setting , 1 , log = False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-06-29 22:00:44 +02:00
2019-06-11 12:43:08 +02:00
def test_realm_update_plan_type ( self ) - > None :
realm = self . user_profile . realm
2019-06-12 08:56:28 +02:00
2020-06-13 10:10:05 +02:00
state_data = fetch_initial_state_data ( self . user_profile , None , " " , False , False )
2019-06-12 08:56:28 +02:00
self . assertEqual ( state_data [ ' realm_plan_type ' ] , Realm . SELF_HOSTED )
2020-05-08 13:30:34 +02:00
self . assertEqual ( state_data [ ' zulip_plan_is_not_limited ' ] , True )
2019-06-12 08:56:28 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' plan_type ' ) ) ,
( ' value ' , equals ( Realm . LIMITED ) ) ,
( ' extra_data ' , check_dict_only ( [
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' upload_quota ' , check_int ) ,
2019-06-12 08:56:28 +02:00
] ) ) ,
] )
events = self . do_test ( lambda : do_change_plan_type ( realm , Realm . LIMITED ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-06-12 08:56:28 +02:00
2020-06-13 10:10:05 +02:00
state_data = fetch_initial_state_data ( self . user_profile , None , " " , False , False )
2019-06-12 08:56:28 +02:00
self . assertEqual ( state_data [ ' realm_plan_type ' ] , Realm . LIMITED )
2020-05-08 13:30:34 +02:00
self . assertEqual ( state_data [ ' zulip_plan_is_not_limited ' ] , False )
2019-06-11 12:43:08 +02:00
2017-11-05 10:51:25 +01:00
def test_realm_emoji_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm_emoji ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' realm_emoji ' , check_dict ( [ ] ) ) ,
] )
2018-03-11 18:55:20 +01:00
author = self . example_user ( ' iago ' )
with get_test_image_file ( ' img.png ' ) as img_file :
2019-03-07 21:29:16 +01:00
events = self . do_test ( lambda : check_add_realm_emoji ( self . user_profile . realm ,
2018-03-11 18:55:20 +01:00
" my_emoji " ,
author ,
img_file ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-03-06 17:07:43 +01:00
2019-03-07 21:29:16 +01:00
events = self . do_test ( lambda : do_remove_realm_emoji ( self . user_profile . realm , " my_emoji " ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_realm_filter_events ( self ) - > None :
2020-06-24 21:36:27 +02:00
regex = " #(?P<id>[123]) "
url = " https://realm.com/my_realm_filter/ %(id)s "
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-03-06 17:07:43 +01:00
( ' type ' , equals ( ' realm_filters ' ) ) ,
2020-06-24 21:36:27 +02:00
( ' realm_filters ' , check_list ( check_tuple ( [
check_string ,
check_string ,
check_int ,
] ) ) ) ,
2014-03-06 17:07:43 +01:00
] )
2020-06-24 21:36:27 +02:00
events = self . do_test ( lambda : do_add_realm_filter ( self . user_profile . realm , regex , url ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-03-06 17:07:43 +01:00
2019-11-19 23:22:34 +01:00
events = self . do_test ( lambda : do_remove_realm_filter ( self . user_profile . realm , " #(?P<id>[123]) " ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_realm_domain_events ( self ) - > None :
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-12-26 19:19:02 +01:00
( ' type ' , equals ( ' realm_domains ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' realm_domain ' , check_dict_only ( [
2016-12-26 19:19:02 +01:00
( ' domain ' , check_string ) ,
2017-01-21 08:19:03 +01:00
( ' allow_subdomains ' , check_bool ) ,
2016-12-26 19:19:02 +01:00
] ) ) ,
] )
2019-03-07 21:29:16 +01:00
events = self . do_test ( lambda : do_add_realm_domain (
self . user_profile . realm , ' zulip.org ' , False ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-12-26 19:19:02 +01:00
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-12-26 19:19:02 +01:00
( ' type ' , equals ( ' realm_domains ' ) ) ,
2017-02-09 22:44:03 +01:00
( ' op ' , equals ( ' change ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' realm_domain ' , check_dict_only ( [
2017-02-09 22:44:03 +01:00
( ' domain ' , equals ( ' zulip.org ' ) ) ,
( ' allow_subdomains ' , equals ( True ) ) ,
] ) ) ,
2016-12-26 19:19:02 +01:00
] )
2019-03-07 21:29:16 +01:00
test_domain = RealmDomain . objects . get ( realm = self . user_profile . realm ,
domain = ' zulip.org ' )
2017-03-31 20:41:16 +02:00
events = self . do_test ( lambda : do_change_realm_domain ( test_domain , True ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-02-09 22:44:03 +01:00
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-02-09 22:44:03 +01:00
( ' type ' , equals ( ' realm_domains ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
( ' domain ' , equals ( ' zulip.org ' ) ) ,
] )
2017-03-31 20:41:16 +02:00
events = self . do_test ( lambda : do_remove_realm_domain ( test_domain ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-12-26 19:19:02 +01:00
2017-11-05 10:51:25 +01:00
def test_create_bot ( self ) - > None :
2018-01-30 17:10:10 +01:00
2020-06-21 02:36:20 +02:00
def get_bot_created_checker ( bot_type : str ) - > Validator [ object ] :
2018-01-30 17:10:10 +01:00
if bot_type == " GENERIC_BOT " :
2020-06-21 20:36:12 +02:00
# Generic bots don't really understand the concept of
# "services", so we just enforce that we get an empty list.
check_services : Validator [ List [ object ] ] = equals ( [ ] )
2018-01-30 17:10:10 +01:00
elif bot_type == " OUTGOING_WEBHOOK_BOT " :
2018-03-16 16:46:51 +01:00
check_services = check_list ( check_dict_only ( [
2018-01-30 19:21:13 +01:00
( ' base_url ' , check_url ) ,
( ' interface ' , check_int ) ,
2018-05-30 11:09:35 +02:00
( ' token ' , check_string ) ,
2018-01-30 19:21:13 +01:00
] ) , length = 1 )
elif bot_type == " EMBEDDED_BOT " :
2018-03-16 16:46:51 +01:00
check_services = check_list ( check_dict_only ( [
2018-01-30 19:21:13 +01:00
( ' service_name ' , check_string ) ,
( ' config_data ' , check_dict ( value_validator = check_string ) ) ,
] ) , length = 1 )
2018-01-30 17:10:10 +01:00
return self . check_events_dict ( [
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' bot ' , check_dict_only ( [
( ' email ' , check_string ) ,
( ' user_id ' , check_int ) ,
( ' bot_type ' , check_int ) ,
( ' full_name ' , check_string ) ,
( ' is_active ' , check_bool ) ,
( ' api_key ' , check_string ) ,
( ' default_sending_stream ' , check_none_or ( check_string ) ) ,
( ' default_events_register_stream ' , check_none_or ( check_string ) ) ,
( ' default_all_public_streams ' , check_bool ) ,
( ' avatar_url ' , check_string ) ,
2020-05-10 19:21:08 +02:00
( ' owner_id ' , check_int ) ,
2018-01-30 19:21:13 +01:00
( ' services ' , check_services ) ,
2018-01-30 17:10:10 +01:00
] ) ) ,
] )
2018-01-30 17:08:35 +01:00
action = lambda : self . create_bot ( ' test ' )
2020-03-19 17:13:09 +01:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
get_bot_created_checker ( bot_type = " GENERIC_BOT " ) ( ' events[1] ' , events [ 1 ] )
2018-01-30 17:10:10 +01:00
action = lambda : self . create_bot ( ' test_outgoing_webhook ' ,
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
full_name = ' Outgoing Webhook Bot ' ,
2018-01-30 17:10:10 +01:00
payload_url = ujson . dumps ( ' https://foo.bar.com ' ) ,
interface_type = Service . GENERIC ,
bot_type = UserProfile . OUTGOING_WEBHOOK_BOT )
2020-03-19 17:13:09 +01:00
events = self . do_test ( action , num_events = 2 )
2018-01-30 17:10:10 +01:00
# The third event is the second call of notify_created_bot, which contains additional
# data for services (in contrast to the first call).
2020-06-21 02:36:20 +02:00
get_bot_created_checker ( bot_type = " OUTGOING_WEBHOOK_BOT " ) ( ' events[1] ' , events [ 1 ] )
2014-02-26 00:12:14 +01:00
2018-01-30 19:21:13 +01:00
action = lambda : self . create_bot ( ' test_embedded ' ,
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
full_name = ' Embedded Bot ' ,
2018-01-30 19:21:13 +01:00
service_name = ' helloworld ' ,
config_data = ujson . dumps ( { ' foo ' : ' bar ' } ) ,
bot_type = UserProfile . EMBEDDED_BOT )
2020-03-19 17:13:09 +01:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
get_bot_created_checker ( bot_type = " EMBEDDED_BOT " ) ( ' events[1] ' , events [ 1 ] )
2018-01-30 19:21:13 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_full_name ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-04-07 07:28:28 +02:00
action = lambda : do_change_full_name ( bot , ' New Bot Name ' , self . user_profile )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' full_name ' , check_string ) ( ' events[1] ' , events [ 1 ] )
2014-02-26 19:55:29 +01:00
2017-11-05 10:51:25 +01:00
def test_regenerate_bot_api_key ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-04-06 12:27:58 +02:00
action = lambda : do_regenerate_api_key ( bot , self . user_profile )
2014-02-26 20:17:19 +01:00
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' api_key ' , check_string ) ( ' events[0] ' , events [ 0 ] )
2014-02-26 20:17:19 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_avatar_source ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-02-21 19:35:17 +01:00
action = lambda : do_change_avatar_fields ( bot , bot . AVATAR_FROM_USER )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' avatar_url ' , check_string ) ( ' events[0] ' , events [ 0 ] )
2017-03-26 08:17:48 +02:00
self . assertEqual ( events [ 1 ] [ ' type ' ] , ' realm_user ' )
2014-02-26 21:05:10 +01:00
2017-11-05 10:51:25 +01:00
def test_change_realm_icon_source ( self ) - > None :
2019-03-07 21:29:16 +01:00
action = lambda : do_change_icon_source ( self . user_profile . realm , Realm . ICON_UPLOADED )
events = self . do_test ( action , state_change_expected = True )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-02-26 20:35:23 +01:00
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update_dict ' ) ) ,
( ' property ' , equals ( ' icon ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' data ' , check_dict_only ( [
( ' icon_url ' , check_string ) ,
( ' icon_source ' , check_string ) ,
] ) ) ,
2017-02-21 03:41:20 +01:00
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-02-21 03:41:20 +01:00
2019-03-01 15:52:44 +01:00
def test_change_realm_day_mode_logo_source ( self ) - > None :
action = lambda : do_change_logo_source ( self . user_profile . realm , Realm . LOGO_UPLOADED , False )
events = self . do_test ( action , state_change_expected = True )
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update_dict ' ) ) ,
( ' property ' , equals ( ' logo ' ) ) ,
( ' data ' , check_dict_only ( [
( ' logo_url ' , check_string ) ,
( ' logo_source ' , check_string ) ,
] ) ) ,
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-03-01 15:52:44 +01:00
def test_change_realm_night_mode_logo_source ( self ) - > None :
action = lambda : do_change_logo_source ( self . user_profile . realm , Realm . LOGO_UPLOADED , True )
events = self . do_test ( action , state_change_expected = True )
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm ' ) ) ,
( ' op ' , equals ( ' update_dict ' ) ) ,
( ' property ' , equals ( ' night_logo ' ) ) ,
( ' data ' , check_dict_only ( [
( ' night_logo_url ' , check_string ) ,
( ' night_logo_source ' , check_string ) ,
] ) ) ,
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-03-01 15:52:44 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_default_all_public_streams ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-02-21 19:35:17 +01:00
action = lambda : do_change_default_all_public_streams ( bot , True )
2014-02-26 21:15:31 +01:00
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' default_all_public_streams ' , check_bool ) ( ' events[0] ' , events [ 0 ] )
2014-02-26 21:15:31 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_default_sending_stream ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-02-21 19:35:17 +01:00
stream = get_stream ( " Rome " , bot . realm )
2017-03-24 03:04:13 +01:00
2017-02-21 19:35:17 +01:00
action = lambda : do_change_default_sending_stream ( bot , stream )
2014-02-26 21:23:18 +01:00
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' default_sending_stream ' , check_string ) ( ' events[0] ' , events [ 0 ] )
2014-02-26 21:23:18 +01:00
2017-03-24 03:04:13 +01:00
action = lambda : do_change_default_sending_stream ( bot , None )
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' default_sending_stream ' , equals ( None ) ) ( ' events[0] ' , events [ 0 ] )
2017-03-24 03:04:13 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_default_events_register_stream ( self ) - > None :
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-02-21 19:35:17 +01:00
stream = get_stream ( " Rome " , bot . realm )
2017-03-24 03:04:13 +01:00
2017-02-21 19:35:17 +01:00
action = lambda : do_change_default_events_register_stream ( bot , stream )
2014-02-26 21:34:12 +01:00
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' default_events_register_stream ' , check_string ) ( ' events[0] ' , events [ 0 ] )
2017-02-24 06:36:54 +01:00
2017-03-24 03:04:13 +01:00
action = lambda : do_change_default_events_register_stream ( bot , None )
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
self . realm_bot_schema ( ' default_events_register_stream ' , equals ( None ) ) ( ' events[0] ' , events [ 0 ] )
2017-03-24 03:04:13 +01:00
2017-11-05 10:51:25 +01:00
def test_change_bot_owner ( self ) - > None :
2019-05-21 18:47:30 +02:00
change_bot_owner_checker_user = self . check_events_dict ( [
( ' type ' , equals ( ' realm_user ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' person ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
( ' bot_owner_id ' , check_int ) ,
] ) ) ,
] )
change_bot_owner_checker_bot = self . check_events_dict ( [
2017-02-24 06:36:54 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' bot ' , check_dict_only ( [
2017-02-24 06:36:54 +01:00
( ' user_id ' , check_int ) ,
( ' owner_id ' , check_int ) ,
] ) ) ,
] )
2017-05-07 17:21:26 +02:00
self . user_profile = self . example_user ( ' iago ' )
owner = self . example_user ( ' hamlet ' )
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-03-31 17:27:08 +02:00
action = lambda : do_change_bot_owner ( bot , owner , self . user_profile )
2019-05-21 18:47:30 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
change_bot_owner_checker_bot ( ' events[0] ' , events [ 0 ] )
change_bot_owner_checker_user ( ' events[1] ' , events [ 1 ] )
2014-02-26 21:34:12 +01:00
2019-05-21 18:47:30 +02:00
change_bot_owner_checker_bot = self . check_events_dict ( [
2018-03-06 22:32:03 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' delete ' ) ) ,
( ' bot ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
] ) ) ,
] )
self . user_profile = self . example_user ( ' aaron ' )
owner = self . example_user ( ' hamlet ' )
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
bot = self . create_bot ( ' test1 ' , full_name = ' Test1 Testerson ' )
2018-03-06 22:32:03 +01:00
action = lambda : do_change_bot_owner ( bot , owner , self . user_profile )
2019-05-21 18:47:30 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
change_bot_owner_checker_bot ( ' events[0] ' , events [ 0 ] )
change_bot_owner_checker_user ( ' events[1] ' , events [ 1 ] )
2018-03-06 22:32:03 +01:00
2019-05-21 18:47:30 +02:00
change_bot_owner_checker_bot = self . check_events_dict ( [
2018-03-06 22:32:03 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' bot ' , check_dict_only ( [
( ' email ' , check_string ) ,
( ' user_id ' , check_int ) ,
( ' bot_type ' , check_int ) ,
( ' full_name ' , check_string ) ,
( ' is_active ' , check_bool ) ,
( ' api_key ' , check_string ) ,
( ' default_sending_stream ' , check_none_or ( check_string ) ) ,
( ' default_events_register_stream ' , check_none_or ( check_string ) ) ,
( ' default_all_public_streams ' , check_bool ) ,
( ' avatar_url ' , check_string ) ,
2020-05-10 19:21:08 +02:00
( ' owner_id ' , check_int ) ,
2020-06-21 20:36:12 +02:00
( ' services ' , equals ( [ ] ) ) ,
2018-03-06 22:32:03 +01:00
] ) ) ,
] )
previous_owner = self . example_user ( ' aaron ' )
self . user_profile = self . example_user ( ' hamlet ' )
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
bot = self . create_test_bot ( ' test2 ' , previous_owner , full_name = ' Test2 Testerson ' )
2018-03-06 22:32:03 +01:00
action = lambda : do_change_bot_owner ( bot , self . user_profile , previous_owner )
2019-05-21 18:47:30 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
change_bot_owner_checker_bot ( ' events[0] ' , events [ 0 ] )
change_bot_owner_checker_user ( ' events[1] ' , events [ 1 ] )
2018-03-06 22:32:03 +01:00
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_do_update_outgoing_webhook_service ( self ) - > None :
2018-01-16 20:34:12 +01:00
update_outgoing_webhook_service_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' bot ' , check_dict_only ( [
( ' user_id ' , check_int ) ,
2018-03-16 16:46:51 +01:00
( ' services ' , check_list ( check_dict_only ( [
2018-01-16 20:34:12 +01:00
( ' base_url ' , check_url ) ,
( ' interface ' , check_int ) ,
2018-05-30 11:09:35 +02:00
( ' token ' , check_string ) ,
2018-01-16 20:34:12 +01:00
] ) ) ) ,
] ) ) ,
] )
self . user_profile = self . example_user ( ' iago ' )
2018-01-30 17:08:35 +01:00
bot = self . create_test_bot ( ' test ' , self . user_profile ,
full_name = ' Test Bot ' ,
bot_type = UserProfile . OUTGOING_WEBHOOK_BOT ,
payload_url = ujson . dumps ( ' http://hostname.domain2.com ' ) ,
interface_type = Service . GENERIC ,
)
2018-01-16 20:34:12 +01:00
action = lambda : do_update_outgoing_webhook_service ( bot , 2 , ' http://hostname.domain2.com ' )
events = self . do_test ( action )
2020-06-21 02:36:20 +02:00
update_outgoing_webhook_service_checker ( ' events[0] ' , events [ 0 ] )
2018-01-16 20:34:12 +01:00
2017-11-05 10:51:25 +01:00
def test_do_deactivate_user ( self ) - > None :
2017-04-20 17:31:41 +02:00
bot_deactivate_checker = self . check_events_dict ( [
2014-02-26 22:27:51 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' bot ' , check_dict_only ( [
2014-02-26 22:27:51 +01:00
( ' full_name ' , check_string ) ,
2017-04-20 17:31:41 +02:00
( ' user_id ' , check_int ) ,
2014-02-26 22:27:51 +01:00
] ) ) ,
] )
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2014-02-26 22:27:51 +01:00
action = lambda : do_deactivate_user ( bot )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
bot_deactivate_checker ( ' events[1] ' , events [ 1 ] )
2014-02-26 22:27:51 +01:00
2017-11-05 10:51:25 +01:00
def test_do_reactivate_user ( self ) - > None :
2017-04-20 17:31:41 +02:00
bot_reactivate_checker = self . check_events_dict ( [
2017-02-15 21:06:07 +01:00
( ' type ' , equals ( ' realm_bot ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' bot ' , check_dict_only ( [
2017-02-15 21:06:07 +01:00
( ' email ' , check_string ) ,
( ' user_id ' , check_int ) ,
2017-06-12 19:50:03 +02:00
( ' bot_type ' , check_int ) ,
2017-02-15 21:06:07 +01:00
( ' full_name ' , check_string ) ,
( ' is_active ' , check_bool ) ,
( ' api_key ' , check_string ) ,
( ' default_sending_stream ' , check_none_or ( check_string ) ) ,
( ' default_events_register_stream ' , check_none_or ( check_string ) ) ,
( ' default_all_public_streams ' , check_bool ) ,
( ' avatar_url ' , check_string ) ,
2020-05-10 19:21:08 +02:00
( ' owner_id ' , check_none_or ( check_int ) ) ,
2018-03-16 16:46:51 +01:00
( ' services ' , check_list ( check_dict_only ( [
2018-01-16 20:34:12 +01:00
( ' base_url ' , check_url ) ,
( ' interface ' , check_int ) ,
] ) ) ) ,
2017-02-15 21:06:07 +01:00
] ) ) ,
] )
2018-01-30 17:08:35 +01:00
bot = self . create_bot ( ' test ' )
2017-02-15 21:06:07 +01:00
do_deactivate_user ( bot )
action = lambda : do_reactivate_user ( bot )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , num_events = 2 )
2020-06-21 02:36:20 +02:00
bot_reactivate_checker ( ' events[1] ' , events [ 1 ] )
2017-01-24 01:48:35 +01:00
2017-11-05 10:51:25 +01:00
def test_do_mark_hotspot_as_read ( self ) - > None :
2017-10-12 17:13:02 +02:00
self . user_profile . tutorial_status = UserProfile . TUTORIAL_WAITING
self . user_profile . save ( update_fields = [ ' tutorial_status ' ] )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2017-01-24 01:48:35 +01:00
( ' type ' , equals ( ' hotspots ' ) ) ,
2017-04-15 05:50:59 +02:00
( ' hotspots ' , check_list ( check_dict_only ( [
( ' name ' , check_string ) ,
2017-07-14 03:20:27 +02:00
( ' title ' , check_string ) ,
2017-04-15 05:50:59 +02:00
( ' description ' , check_string ) ,
2017-08-31 05:13:37 +02:00
( ' delay ' , check_float ) ,
2017-04-15 05:50:59 +02:00
] ) ) ) ,
2017-01-24 01:48:35 +01:00
] )
2017-08-30 02:13:04 +02:00
events = self . do_test ( lambda : do_mark_hotspot_as_read ( self . user_profile , ' intro_reply ' ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-02-15 21:06:07 +01:00
2017-11-05 10:51:25 +01:00
def test_rename_stream ( self ) - > None :
2016-10-21 22:13:43 +02:00
stream = self . make_stream ( ' old_name ' )
2020-04-09 21:51:58 +02:00
new_name = ' stream with a brand new name '
2017-08-25 06:01:29 +02:00
self . subscribe ( self . user_profile , stream . name )
2019-08-19 04:16:32 +02:00
notification = ' <p><span class= " user-mention silent " data-user-id= " {user_id} " >King Hamlet</span> renamed stream <strong>old_name</strong> to <strong>stream with a brand new name</strong>.</p> '
notification = notification . format ( user_id = self . user_profile . id )
2019-01-05 12:47:38 +01:00
action = lambda : do_rename_stream ( stream , new_name , self . user_profile )
events = self . do_test ( action , num_events = 3 )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-02-04 20:52:02 +01:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' email_address ' ) ) ,
( ' value ' , check_string ) ,
2017-03-05 01:50:25 +01:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
( ' name ' , equals ( ' old_name ' ) ) ,
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2014-02-04 20:52:02 +01:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' name ' ) ) ,
( ' value ' , equals ( new_name ) ) ,
( ' name ' , equals ( ' old_name ' ) ) ,
2017-04-20 17:31:41 +02:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[1] ' , events [ 1 ] )
2019-01-05 12:47:38 +01:00
schema_checker = check_dict ( [
( ' flags ' , check_list ( check_string ) ) ,
( ' type ' , equals ( ' message ' ) ) ,
( ' message ' , check_dict ( [
( ' timestamp ' , check_int ) ,
( ' content ' , equals ( notification ) ) ,
( ' content_type ' , equals ( ' text/html ' ) ) ,
( ' sender_email ' , equals ( ' notification-bot@zulip.com ' ) ) ,
( ' sender_id ' , check_int ) ,
( ' sender_short_name ' , equals ( ' notification-bot ' ) ) ,
( ' display_recipient ' , equals ( new_name ) ) ,
( ' id ' , check_int ) ,
( ' stream_id ' , check_int ) ,
( ' sender_realm_str ' , check_string ) ,
( ' sender_full_name ' , equals ( ' Notification Bot ' ) ) ,
( ' is_me_message ' , equals ( False ) ) ,
( ' type ' , equals ( ' stream ' ) ) ,
( ' submessages ' , check_list ( check_string ) ) ,
( TOPIC_LINKS , check_list ( check_url ) ) ,
2019-11-05 21:17:15 +01:00
( ' avatar_url ' , check_none_or ( check_url ) ) ,
2019-01-05 12:47:38 +01:00
( ' reactions ' , check_list ( None ) ) ,
( ' client ' , equals ( ' Internal ' ) ) ,
2019-07-19 20:13:21 +02:00
( TOPIC_NAME , equals ( ' stream events ' ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' recipient_id ' , check_int ) ,
2019-01-05 12:47:38 +01:00
] ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
( ' id ' , check_int ) ,
2019-01-05 12:47:38 +01:00
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[2] ' , events [ 2 ] )
2014-01-31 23:23:39 +01:00
2017-11-05 10:51:25 +01:00
def test_deactivate_stream_neversubscribed ( self ) - > None :
2016-10-21 22:13:43 +02:00
stream = self . make_stream ( ' old_name ' )
2016-07-12 23:57:16 +02:00
action = lambda : do_deactivate_stream ( stream )
events = self . do_test ( action )
2017-04-20 17:31:41 +02:00
schema_checker = self . check_events_dict ( [
2016-07-12 23:57:16 +02:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' delete ' ) ) ,
( ' streams ' , check_list ( check_dict ( [ ] ) ) ) ,
] )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2016-07-12 23:57:16 +02:00
2017-11-05 10:51:25 +01:00
def test_subscribe_other_user_never_subscribed ( self ) - > None :
2020-04-09 21:51:58 +02:00
action = lambda : self . subscribe ( self . example_user ( " othello " ) , " test_stream " )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , num_events = 2 )
2017-04-20 17:31:41 +02:00
peer_add_schema_checker = self . check_events_dict ( [
2016-07-12 23:57:16 +02:00
( ' type ' , equals ( ' subscription ' ) ) ,
( ' op ' , equals ( ' peer_add ' ) ) ,
2016-10-31 20:18:32 +01:00
( ' user_id ' , check_int ) ,
2020-06-12 16:54:01 +02:00
( ' stream_id ' , check_int ) ,
2016-07-12 23:57:16 +02:00
] )
2020-06-21 02:36:20 +02:00
peer_add_schema_checker ( ' events[1] ' , events [ 1 ] )
2016-07-12 23:57:16 +02:00
2017-10-28 00:46:59 +02:00
@slow ( " Actually several tests combined together " )
2017-11-05 10:51:25 +01:00
def test_subscribe_events ( self ) - > None :
2017-02-21 19:35:17 +01:00
self . do_test_subscribe_events ( include_subscribers = True )
2017-02-20 08:30:09 +01:00
2017-10-28 00:46:59 +02:00
@slow ( " Actually several tests combined together " )
2017-11-05 10:51:25 +01:00
def test_subscribe_events_no_include_subscribers ( self ) - > None :
2017-02-21 19:35:17 +01:00
self . do_test_subscribe_events ( include_subscribers = False )
2017-02-20 08:30:09 +01:00
2017-11-05 10:51:25 +01:00
def do_test_subscribe_events ( self , include_subscribers : bool ) - > None :
2017-02-20 08:30:09 +01:00
subscription_fields = [
( ' color ' , check_string ) ,
( ' description ' , check_string ) ,
2019-01-11 13:48:22 +01:00
( ' rendered_description ' , check_string ) ,
2017-02-20 08:30:09 +01:00
( ' email_address ' , check_string ) ,
( ' invite_only ' , check_bool ) ,
2019-04-07 20:29:25 +02:00
( ' is_web_public ' , check_bool ) ,
2018-05-13 15:50:48 +02:00
( ' is_announcement_only ' , check_bool ) ,
2020-02-04 21:50:55 +01:00
( ' stream_post_policy ' , check_int_in ( Stream . STREAM_POST_POLICY_TYPES ) ) ,
2020-06-14 18:57:02 +02:00
( ' message_retention_days ' , check_none_or ( check_int ) ) ,
2018-08-02 23:46:05 +02:00
( ' is_muted ' , check_bool ) ,
2017-02-20 08:30:09 +01:00
( ' in_home_view ' , check_bool ) ,
( ' name ' , check_string ) ,
2019-02-13 10:22:16 +01:00
( ' audible_notifications ' , check_none_or ( check_bool ) ) ,
( ' email_notifications ' , check_none_or ( check_bool ) ) ,
( ' desktop_notifications ' , check_none_or ( check_bool ) ) ,
( ' push_notifications ' , check_none_or ( check_bool ) ) ,
2017-02-20 08:30:09 +01:00
( ' stream_id ' , check_int ) ,
2019-03-04 17:50:49 +01:00
( ' first_message_id ' , check_none_or ( check_int ) ) ,
2018-05-07 23:14:15 +02:00
( ' history_public_to_subscribers ' , check_bool ) ,
2018-07-24 01:50:57 +02:00
( ' pin_to_top ' , check_bool ) ,
( ' stream_weekly_traffic ' , check_none_or ( check_int ) ) ,
2019-11-26 02:37:12 +01:00
( ' wildcard_mentions_notify ' , check_none_or ( check_bool ) ) ,
2017-02-20 08:30:09 +01:00
]
if include_subscribers :
2019-08-10 00:30:34 +02:00
subscription_fields . append ( ( ' subscribers ' , check_list ( check_int ) ) )
2014-02-04 20:52:02 +01:00
subscription_schema_checker = check_list (
2018-07-24 01:50:57 +02:00
check_dict_only ( subscription_fields ) ,
2014-02-04 20:52:02 +01:00
)
2017-04-20 17:31:41 +02:00
stream_create_schema_checker = self . check_events_dict ( [
2017-03-24 05:49:23 +01:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' create ' ) ) ,
2020-06-21 02:36:20 +02:00
( ' streams ' , check_list ( check_dict ( [
2017-03-24 05:49:23 +01:00
( ' name ' , check_string ) ,
( ' stream_id ' , check_int ) ,
( ' invite_only ' , check_bool ) ,
( ' description ' , check_string ) ,
2019-01-11 13:48:22 +01:00
( ' rendered_description ' , check_string ) ,
2017-03-24 05:49:23 +01:00
] ) ) ) ,
] )
2017-04-20 17:31:41 +02:00
add_schema_checker = self . check_events_dict ( [
2014-02-06 21:21:21 +01:00
( ' type ' , equals ( ' subscription ' ) ) ,
2014-02-04 20:52:02 +01:00
( ' op ' , equals ( ' add ' ) ) ,
( ' subscriptions ' , subscription_schema_checker ) ,
] )
2017-04-20 17:31:41 +02:00
remove_schema_checker = self . check_events_dict ( [
2014-02-06 21:21:21 +01:00
( ' type ' , equals ( ' subscription ' ) ) ,
2014-02-04 20:52:02 +01:00
( ' op ' , equals ( ' remove ' ) ) ,
( ' subscriptions ' , check_list (
2017-04-20 17:31:41 +02:00
check_dict_only ( [
2014-02-04 20:52:02 +01:00
( ' name ' , equals ( ' test_stream ' ) ) ,
2014-02-07 19:06:02 +01:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
] ) ,
) ) ,
] )
2017-04-20 17:31:41 +02:00
peer_add_schema_checker = self . check_events_dict ( [
2014-02-06 21:21:21 +01:00
( ' type ' , equals ( ' subscription ' ) ) ,
2014-02-04 20:52:02 +01:00
( ' op ' , equals ( ' peer_add ' ) ) ,
2016-10-31 20:18:32 +01:00
( ' user_id ' , check_int ) ,
2020-06-12 16:54:01 +02:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
] )
2017-04-20 17:31:41 +02:00
peer_remove_schema_checker = self . check_events_dict ( [
2014-02-06 21:21:21 +01:00
( ' type ' , equals ( ' subscription ' ) ) ,
2014-02-04 20:52:02 +01:00
( ' op ' , equals ( ' peer_remove ' ) ) ,
2016-11-08 15:04:18 +01:00
( ' user_id ' , check_int ) ,
2020-06-12 16:54:01 +02:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
] )
2017-04-20 17:31:41 +02:00
stream_update_schema_checker = self . check_events_dict ( [
2014-02-04 20:52:02 +01:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' description ' ) ) ,
( ' value ' , check_string ) ,
2019-01-11 13:48:22 +01:00
( ' rendered_description ' , check_string ) ,
2017-03-05 01:50:25 +01:00
( ' stream_id ' , check_int ) ,
2014-02-04 20:52:02 +01:00
( ' name ' , check_string ) ,
] )
2019-05-02 19:43:27 +02:00
stream_update_invite_only_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' invite_only ' ) ) ,
( ' stream_id ' , check_int ) ,
( ' name ' , check_string ) ,
( ' value ' , check_bool ) ,
( ' history_public_to_subscribers ' , check_bool ) ,
] )
2020-02-04 21:50:55 +01:00
stream_update_stream_post_policy_schema_checker = self . check_events_dict ( [
2019-05-02 19:43:27 +02:00
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
2020-02-04 21:50:55 +01:00
( ' property ' , equals ( ' stream_post_policy ' ) ) ,
2019-05-02 19:43:27 +02:00
( ' stream_id ' , check_int ) ,
( ' name ' , check_string ) ,
2020-02-04 21:50:55 +01:00
( ' value ' , check_int_in ( Stream . STREAM_POST_POLICY_TYPES ) ) ,
2019-05-02 19:43:27 +02:00
] )
2020-06-14 18:57:02 +02:00
stream_update_message_retention_days_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' stream ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' property ' , equals ( ' message_retention_days ' ) ) ,
( ' stream_id ' , check_int ) ,
( ' name ' , check_string ) ,
( ' value ' , check_none_or ( check_int ) )
] )
2014-02-04 20:52:02 +01:00
2017-02-22 09:34:12 +01:00
# Subscribe to a totally new stream, so it's just Hamlet on it
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
action : Callable [ [ ] , object ] = lambda : self . subscribe ( self . example_user ( " hamlet " ) , " test_stream " )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action , event_types = [ " subscription " , " realm_user " ] ,
include_subscribers = include_subscribers )
2020-06-21 02:36:20 +02:00
add_schema_checker ( ' events[0] ' , events [ 0 ] )
2014-02-04 20:52:02 +01:00
2017-02-22 09:34:12 +01:00
# Add another user to that totally new stream
2017-08-25 06:01:29 +02:00
action = lambda : self . subscribe ( self . example_user ( " othello " ) , " test_stream " )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action ,
2017-02-21 19:35:17 +01:00
include_subscribers = include_subscribers ,
state_change_expected = include_subscribers ,
)
2020-06-21 02:36:20 +02:00
peer_add_schema_checker ( ' events[0] ' , events [ 0 ] )
2014-02-04 20:52:02 +01:00
2014-01-31 23:23:39 +01:00
stream = get_stream ( " test_stream " , self . user_profile . realm )
2014-02-04 20:52:02 +01:00
2017-02-22 09:34:12 +01:00
# Now remove the first user, to test the normal unsubscribe flow
2016-10-20 16:53:22 +02:00
action = lambda : bulk_remove_subscriptions (
2017-05-07 17:21:26 +02:00
[ self . example_user ( ' othello ' ) ] ,
2018-03-14 00:13:21 +01:00
[ stream ] ,
get_client ( " website " ) )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action ,
2017-02-21 19:35:17 +01:00
include_subscribers = include_subscribers ,
state_change_expected = include_subscribers ,
)
2020-06-21 02:36:20 +02:00
peer_remove_schema_checker ( ' events[0] ' , events [ 0 ] )
2014-02-04 20:52:02 +01:00
2017-02-22 09:34:12 +01:00
# Now remove the second user, to test the 'vacate' event flow
2016-10-20 16:53:22 +02:00
action = lambda : bulk_remove_subscriptions (
2017-05-07 17:21:26 +02:00
[ self . example_user ( ' hamlet ' ) ] ,
2018-03-14 00:13:21 +01:00
[ stream ] ,
get_client ( " website " ) )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action ,
2017-03-26 08:17:48 +02:00
include_subscribers = include_subscribers ,
2017-11-13 21:24:51 +01:00
num_events = 3 )
2020-06-21 02:36:20 +02:00
remove_schema_checker ( ' events[0] ' , events [ 0 ] )
2014-02-04 20:52:02 +01:00
2017-02-22 09:34:12 +01:00
# Now resubscribe a user, to make sure that works on a vacated stream
2017-08-25 06:01:29 +02:00
action = lambda : self . subscribe ( self . example_user ( " hamlet " ) , " test_stream " )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action ,
2017-03-26 08:17:48 +02:00
include_subscribers = include_subscribers ,
num_events = 2 )
2020-06-21 02:36:20 +02:00
add_schema_checker ( ' events[1] ' , events [ 1 ] )
2014-02-04 20:52:02 +01:00
2020-04-09 21:51:58 +02:00
action = lambda : do_change_stream_description ( stream , ' new description ' )
2017-02-20 08:30:09 +01:00
events = self . do_test ( action ,
include_subscribers = include_subscribers )
2020-06-21 02:36:20 +02:00
stream_update_schema_checker ( ' events[0] ' , events [ 0 ] )
2014-02-04 20:52:02 +01:00
2019-05-02 19:43:27 +02:00
# Update stream privacy
action = lambda : do_change_stream_invite_only ( stream , True , history_public_to_subscribers = True )
events = self . do_test ( action ,
include_subscribers = include_subscribers )
2020-06-21 02:36:20 +02:00
stream_update_invite_only_schema_checker ( ' events[0] ' , events [ 0 ] )
2019-05-02 19:43:27 +02:00
2020-02-04 21:50:55 +01:00
# Update stream stream_post_policy property
action = lambda : do_change_stream_post_policy ( stream , Stream . STREAM_POST_POLICY_ADMINS )
2019-05-02 19:43:27 +02:00
events = self . do_test ( action ,
2020-02-04 21:50:55 +01:00
include_subscribers = include_subscribers , num_events = 2 )
2020-06-21 02:36:20 +02:00
stream_update_stream_post_policy_schema_checker ( ' events[0] ' , events [ 0 ] )
2019-05-02 19:43:27 +02:00
2020-06-14 18:57:02 +02:00
action = lambda : do_change_stream_message_retention_days ( stream , - 1 )
events = self . do_test ( action ,
include_subscribers = include_subscribers , num_events = 1 )
2020-06-21 02:36:20 +02:00
stream_update_message_retention_days_schema_checker ( ' events[0] ' , events [ 0 ] )
2020-06-14 18:57:02 +02:00
2017-03-24 05:49:23 +01:00
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
2019-03-07 21:29:16 +01:00
stream = self . make_stream ( " private " , self . user_profile . realm , invite_only = True )
2017-05-07 17:21:26 +02:00
user_profile = self . example_user ( ' hamlet ' )
2017-03-24 05:49:23 +01:00
action = lambda : bulk_add_subscriptions ( [ stream ] , [ user_profile ] )
2017-03-26 08:17:48 +02:00
events = self . do_test ( action , include_subscribers = include_subscribers ,
num_events = 2 )
2020-06-21 02:36:20 +02:00
stream_create_schema_checker ( ' events[0] ' , events [ 0 ] )
add_schema_checker ( ' events[1] ' , events [ 1 ] )
2017-03-24 05:49:23 +01:00
2018-04-02 00:21:21 +02:00
def test_do_delete_message_stream ( self ) - > None :
2020-06-11 12:12:12 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' delete_message ' ) ) ,
( ' message_ids ' , check_list ( check_int , 2 ) ) ,
( ' message_type ' , equals ( " stream " ) ) ,
( ' stream_id ' , check_int ) ,
( ' topic ' , check_string ) ,
] )
hamlet = self . example_user ( ' hamlet ' )
msg_id = self . send_stream_message ( hamlet , " Verona " )
msg_id_2 = self . send_stream_message ( hamlet , " Verona " )
messages = [
Message . objects . get ( id = msg_id ) ,
Message . objects . get ( id = msg_id_2 )
]
events = self . do_test (
lambda : do_delete_messages ( self . user_profile . realm , messages ) ,
state_change_expected = True ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2020-06-11 12:12:12 +02:00
def test_do_delete_message_stream_legacy ( self ) - > None :
"""
Test for legacy method of deleting messages which
sends an event per message to delete to the client .
"""
2017-05-14 21:14:26 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' delete_message ' ) ) ,
( ' message_id ' , check_int ) ,
2018-04-02 00:21:21 +02:00
( ' message_type ' , equals ( " stream " ) ) ,
( ' stream_id ' , check_int ) ,
( ' topic ' , check_string ) ,
2017-05-14 21:14:26 +02:00
] )
2020-03-07 11:43:05 +01:00
hamlet = self . example_user ( ' hamlet ' )
msg_id = self . send_stream_message ( hamlet , " Verona " )
2020-06-11 12:12:12 +02:00
msg_id_2 = self . send_stream_message ( hamlet , " Verona " )
messages = [
Message . objects . get ( id = msg_id ) ,
Message . objects . get ( id = msg_id_2 )
]
events = self . do_test (
lambda : do_delete_messages ( self . user_profile . realm , messages ) ,
state_change_expected = True , bulk_message_deletion = False ,
num_events = 2
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2020-06-11 12:12:12 +02:00
def test_do_delete_message_personal ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' delete_message ' ) ) ,
( ' message_ids ' , check_list ( check_int , 1 ) ) ,
( ' sender_id ' , check_int ) ,
( ' message_type ' , equals ( " private " ) ) ,
( ' recipient_id ' , check_int ) ,
] )
msg_id = self . send_personal_message (
self . example_user ( " cordelia " ) ,
self . user_profile ,
" hello " ,
)
2017-05-14 21:14:26 +02:00
message = Message . objects . get ( id = msg_id )
events = self . do_test (
2019-11-12 21:20:31 +01:00
lambda : do_delete_messages ( self . user_profile . realm , [ message ] ) ,
2017-05-14 21:14:26 +02:00
state_change_expected = True ,
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2017-05-14 21:14:26 +02:00
2020-06-11 12:12:12 +02:00
def test_do_delete_message_personal_legacy ( self ) - > None :
2018-04-02 00:21:21 +02:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' delete_message ' ) ) ,
( ' message_id ' , check_int ) ,
2019-03-20 04:16:22 +01:00
( ' sender_id ' , check_int ) ,
2018-04-02 00:21:21 +02:00
( ' message_type ' , equals ( " private " ) ) ,
2019-03-20 04:16:22 +01:00
( ' recipient_id ' , check_int ) ,
2018-04-02 00:21:21 +02:00
] )
msg_id = self . send_personal_message (
2020-03-07 11:43:05 +01:00
self . example_user ( " cordelia " ) ,
self . user_profile ,
2018-04-02 00:21:21 +02:00
" hello " ,
)
message = Message . objects . get ( id = msg_id )
events = self . do_test (
2019-11-12 21:20:31 +01:00
lambda : do_delete_messages ( self . user_profile . realm , [ message ] ) ,
2020-06-11 12:12:12 +02:00
state_change_expected = True , bulk_message_deletion = False
2018-04-02 00:21:21 +02:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-04-02 00:21:21 +02:00
2017-11-05 10:51:25 +01:00
def test_do_delete_message_no_max_id ( self ) - > None :
2017-05-14 21:14:26 +02:00
user_profile = self . example_user ( ' aaron ' )
# Delete all historical messages for this user
user_profile = self . example_user ( ' hamlet ' )
UserMessage . objects . filter ( user_profile = user_profile ) . delete ( )
2020-03-07 11:43:05 +01:00
msg_id = self . send_stream_message ( user_profile , " Verona " )
2017-05-14 21:14:26 +02:00
message = Message . objects . get ( id = msg_id )
self . do_test (
2019-11-12 21:20:31 +01:00
lambda : do_delete_messages ( self . user_profile . realm , [ message ] ) ,
2017-05-14 21:14:26 +02:00
state_change_expected = True ,
)
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2017-05-14 21:14:26 +02:00
self . assertEqual ( result [ ' max_message_id ' ] , - 1 )
2018-05-04 22:57:36 +02:00
def test_add_attachment ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' attachment ' ) ) ,
( ' op ' , equals ( ' add ' ) ) ,
( ' attachment ' , check_dict_only ( [
( ' id ' , check_int ) ,
( ' name ' , check_string ) ,
( ' size ' , check_int ) ,
( ' path_id ' , check_string ) ,
( ' create_time ' , check_float ) ,
( ' messages ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' name ' , check_float ) ,
] ) ) ) ,
] ) ) ,
2019-01-17 12:05:09 +01:00
( ' upload_space_used ' , equals ( 6 ) ) ,
2018-05-04 22:57:36 +02:00
] )
2020-03-06 18:40:46 +01:00
self . login ( ' hamlet ' )
2018-05-04 22:57:36 +02:00
fp = StringIO ( " zulip! " )
fp . name = " zulip.txt "
data = { ' uri ' : None }
def do_upload ( ) - > None :
result = self . client_post ( " /json/user_uploads " , { ' file ' : fp } )
self . assert_json_success ( result )
self . assertIn ( " uri " , result . json ( ) )
uri = result . json ( ) [ " uri " ]
base = ' /user_uploads/ '
self . assertEqual ( base , uri [ : len ( base ) ] )
data [ ' uri ' ] = uri
events = self . do_test (
lambda : do_upload ( ) ,
num_events = 1 , state_change_expected = False )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-05-04 22:57:36 +02:00
# Verify that the DB has the attachment marked as unclaimed
entry = Attachment . objects . get ( file_name = ' zulip.txt ' )
self . assertEqual ( entry . is_claimed ( ) , False )
# Now we send an actual message using this attachment.
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' attachment ' ) ) ,
( ' op ' , equals ( ' update ' ) ) ,
( ' attachment ' , check_dict_only ( [
( ' id ' , check_int ) ,
( ' name ' , check_string ) ,
( ' size ' , check_int ) ,
( ' path_id ' , check_string ) ,
( ' create_time ' , check_float ) ,
( ' messages ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' name ' , check_float ) ,
] ) ) ) ,
] ) ) ,
2019-01-17 12:05:09 +01:00
( ' upload_space_used ' , equals ( 6 ) ) ,
2018-05-04 22:57:36 +02:00
] )
2019-12-13 03:56:59 +01:00
hamlet = self . example_user ( " hamlet " )
self . subscribe ( hamlet , " Denmark " )
2020-06-09 00:25:09 +02:00
body = f " First message ...[zulip.txt](http:// { hamlet . realm . host } " + data [ ' uri ' ] + " ) "
2018-05-04 22:57:36 +02:00
events = self . do_test (
2020-03-07 11:43:05 +01:00
lambda : self . send_stream_message ( self . example_user ( " hamlet " ) , " Denmark " , body , " test " ) ,
2018-05-04 22:57:36 +02:00
num_events = 2 )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-05-04 22:57:36 +02:00
# Now remove the attachment
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' attachment ' ) ) ,
( ' op ' , equals ( ' remove ' ) ) ,
( ' attachment ' , check_dict_only ( [
( ' id ' , check_int ) ,
] ) ) ,
2019-01-17 12:05:09 +01:00
( ' upload_space_used ' , equals ( 0 ) ) ,
2018-05-04 22:57:36 +02:00
] )
events = self . do_test (
2020-06-10 06:41:04 +02:00
lambda : self . client_delete ( f " /json/attachments/ { entry . id } " ) ,
2018-05-04 22:57:36 +02:00
num_events = 1 , state_change_expected = False )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2018-05-04 22:57:36 +02:00
2019-08-02 00:14:58 +02:00
def test_notify_realm_export ( self ) - > None :
2020-04-16 23:00:24 +02:00
pending_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_export ' ) ) ,
( ' exports ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' export_time ' , check_float ) ,
( ' acting_user_id ' , check_int ) ,
( ' export_url ' , equals ( None ) ) ,
( ' deleted_timestamp ' , equals ( None ) ) ,
( ' failed_timestamp ' , equals ( None ) ) ,
( ' pending ' , check_bool ) ,
] ) ) ) ,
] )
2019-03-27 00:57:33 +01:00
schema_checker = self . check_events_dict ( [
2019-06-24 02:51:13 +02:00
( ' type ' , equals ( ' realm_export ' ) ) ,
( ' exports ' , check_list ( check_dict_only ( [
2019-08-01 02:30:38 +02:00
( ' id ' , check_int ) ,
2019-08-08 21:46:44 +02:00
( ' export_time ' , check_float ) ,
2019-06-24 02:51:13 +02:00
( ' acting_user_id ' , check_int ) ,
2019-09-24 22:46:53 +02:00
( ' export_url ' , check_string ) ,
( ' deleted_timestamp ' , equals ( None ) ) ,
2020-04-16 23:00:24 +02:00
( ' failed_timestamp ' , equals ( None ) ) ,
( ' pending ' , check_bool ) ,
2019-08-08 21:46:44 +02:00
] ) ) ) ,
2019-03-27 00:57:33 +01:00
] )
2020-05-21 00:13:06 +02:00
do_change_user_role ( self . user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2020-03-06 18:40:46 +01:00
self . login_user ( self . user_profile )
2019-08-07 21:49:54 +02:00
with mock . patch ( ' zerver.lib.export.do_export_realm ' ,
return_value = create_dummy_file ( ' test-export.tar.gz ' ) ) :
2019-09-13 08:40:35 +02:00
with stdout_suppressed ( ) :
events = self . do_test (
lambda : self . client_post ( ' /json/export/realm ' ) ,
2020-04-16 23:00:24 +02:00
state_change_expected = True , num_events = 3 )
2019-08-07 21:49:54 +02:00
2020-04-16 23:00:24 +02:00
# We first notify when an export is initiated,
2020-06-21 02:36:20 +02:00
pending_schema_checker ( ' events[0] ' , events [ 0 ] )
2020-04-16 23:00:24 +02:00
# The second event is then a message from notification-bot.
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[2] ' , events [ 2 ] )
2019-03-27 00:57:33 +01:00
2019-08-01 19:59:36 +02:00
# Now we check the deletion of the export.
deletion_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_export ' ) ) ,
( ' exports ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' export_time ' , check_float ) ,
( ' acting_user_id ' , check_int ) ,
2020-04-20 23:02:45 +02:00
( ' export_url ' , equals ( None ) ) ,
2019-09-24 22:46:53 +02:00
( ' deleted_timestamp ' , check_float ) ,
2020-04-16 23:00:24 +02:00
( ' failed_timestamp ' , equals ( None ) ) ,
( ' pending ' , check_bool ) ,
2019-08-01 19:59:36 +02:00
] ) ) ) ,
] )
audit_log_entry = RealmAuditLog . objects . filter (
2019-09-26 03:20:56 +02:00
event_type = RealmAuditLog . REALM_EXPORTED ) . first ( )
2019-08-01 19:59:36 +02:00
events = self . do_test (
2020-06-09 00:25:09 +02:00
lambda : self . client_delete ( f ' /json/export/realm/ { audit_log_entry . id } ' ) ,
2019-08-01 19:59:36 +02:00
state_change_expected = False , num_events = 1 )
2020-06-21 02:36:20 +02:00
deletion_schema_checker ( ' events[0] ' , events [ 0 ] )
2019-08-01 19:59:36 +02:00
2020-04-16 23:00:24 +02:00
def test_notify_realm_export_on_failure ( self ) - > None :
pending_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_export ' ) ) ,
( ' exports ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' export_time ' , check_float ) ,
( ' acting_user_id ' , check_int ) ,
( ' export_url ' , equals ( None ) ) ,
( ' deleted_timestamp ' , equals ( None ) ) ,
( ' failed_timestamp ' , equals ( None ) ) ,
( ' pending ' , check_bool ) ,
] ) ) ) ,
] )
failed_schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' realm_export ' ) ) ,
( ' exports ' , check_list ( check_dict_only ( [
( ' id ' , check_int ) ,
( ' export_time ' , check_float ) ,
( ' acting_user_id ' , check_int ) ,
( ' export_url ' , equals ( None ) ) ,
( ' deleted_timestamp ' , equals ( None ) ) ,
( ' failed_timestamp ' , check_float ) ,
( ' pending ' , check_bool ) ,
] ) ) ) ,
] )
2020-05-21 00:13:06 +02:00
do_change_user_role ( self . user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2020-04-16 23:00:24 +02:00
self . login_user ( self . user_profile )
with mock . patch ( ' zerver.lib.export.do_export_realm ' ,
side_effect = Exception ( " test " ) ) :
with stdout_suppressed ( ) :
events = self . do_test (
lambda : self . client_post ( ' /json/export/realm ' ) ,
state_change_expected = False , num_events = 2 )
2020-06-21 02:36:20 +02:00
pending_schema_checker ( ' events[0] ' , events [ 0 ] )
2020-04-16 23:00:24 +02:00
2020-06-21 02:36:20 +02:00
failed_schema_checker ( ' events[1] ' , events [ 1 ] )
2020-04-16 23:00:24 +02:00
2019-11-16 09:26:28 +01:00
def test_has_zoom_token ( self ) - > None :
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' has_zoom_token ' ) ) ,
( ' value ' , equals ( True ) ) ,
] )
events = self . do_test (
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
lambda : do_set_zoom_token ( self . user_profile , { ' access_token ' : ' token ' } ) ,
2019-11-16 09:26:28 +01:00
)
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-11-16 09:26:28 +01:00
schema_checker = self . check_events_dict ( [
( ' type ' , equals ( ' has_zoom_token ' ) ) ,
( ' value ' , equals ( False ) ) ,
] )
events = self . do_test ( lambda : do_set_zoom_token ( self . user_profile , None ) )
2020-06-21 02:36:20 +02:00
schema_checker ( ' events[0] ' , events [ 0 ] )
2019-11-16 09:26:28 +01:00
2016-08-23 02:08:42 +02:00
class FetchInitialStateDataTest ( ZulipTestCase ) :
2016-04-28 01:15:06 +02:00
# Non-admin users don't have access to all bots
2017-11-05 10:51:25 +01:00
def test_realm_bots_non_admin ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' cordelia ' )
2016-04-28 01:15:06 +02:00
self . assertFalse ( user_profile . is_realm_admin )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2016-04-28 01:15:06 +02:00
self . assert_length ( result [ ' realm_bots ' ] , 0 )
# additionally the API key for a random bot is not present in the data
2018-08-01 10:53:40 +02:00
api_key = get_api_key ( self . notification_bot ( ) )
2016-04-28 01:15:06 +02:00
self . assertNotIn ( api_key , str ( result ) )
# Admin users have access to all bots in the realm_bots field
2019-10-23 09:55:31 +02:00
def test_realm_bots_admin ( self ) - > None :
2017-05-07 19:39:30 +02:00
user_profile = self . example_user ( ' hamlet ' )
2020-05-21 00:13:06 +02:00
do_change_user_role ( user_profile , UserProfile . ROLE_REALM_ADMINISTRATOR )
2016-04-28 01:15:06 +02:00
self . assertTrue ( user_profile . is_realm_admin )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2019-07-24 08:38:25 +02:00
self . assertTrue ( len ( result [ ' realm_bots ' ] ) > 2 )
2016-04-28 01:15:06 +02:00
2017-11-05 10:51:25 +01:00
def test_max_message_id_with_no_history ( self ) - > None :
2017-05-24 02:42:31 +02:00
user_profile = self . example_user ( ' aaron ' )
2017-03-24 06:38:06 +01:00
# Delete all historical messages for this user
UserMessage . objects . filter ( user_profile = user_profile ) . delete ( )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2017-03-24 06:38:06 +01:00
self . assertEqual ( result [ ' max_message_id ' ] , - 1 )
2019-08-11 13:39:27 +02:00
def test_delivery_email_presence_for_non_admins ( self ) - > None :
user_profile = self . example_user ( ' aaron ' )
self . assertFalse ( user_profile . is_realm_admin )
do_set_realm_property ( user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_EVERYONE )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2019-08-11 13:39:27 +02:00
for key , value in result [ ' raw_users ' ] . items ( ) :
self . assertNotIn ( ' delivery_email ' , value )
do_set_realm_property ( user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_ADMINS )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2019-08-11 13:39:27 +02:00
for key , value in result [ ' raw_users ' ] . items ( ) :
self . assertNotIn ( ' delivery_email ' , value )
def test_delivery_email_presence_for_admins ( self ) - > None :
user_profile = self . example_user ( ' iago ' )
self . assertTrue ( user_profile . is_realm_admin )
do_set_realm_property ( user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_EVERYONE )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2019-08-11 13:39:27 +02:00
for key , value in result [ ' raw_users ' ] . items ( ) :
self . assertNotIn ( ' delivery_email ' , value )
do_set_realm_property ( user_profile . realm , " email_address_visibility " ,
Realm . EMAIL_ADDRESS_VISIBILITY_ADMINS )
2020-06-13 10:10:05 +02:00
result = fetch_initial_state_data ( user_profile , None , " " , client_gravatar = False , user_avatar_url_field_optional = False )
2019-08-11 13:39:27 +02:00
for key , value in result [ ' raw_users ' ] . items ( ) :
self . assertIn ( ' delivery_email ' , value )
2020-06-13 10:10:05 +02:00
def test_user_avatar_url_field_optional ( self ) - > None :
hamlet = self . example_user ( ' hamlet ' )
users = [
self . example_user ( ' iago ' ) ,
self . example_user ( ' cordelia ' ) ,
self . example_user ( ' ZOE ' ) ,
self . example_user ( ' othello ' ) ,
]
for user in users :
user . long_term_idle = True
user . save ( )
long_term_idle_users_ids = [ user . id for user in users ]
result = fetch_initial_state_data ( user_profile = hamlet ,
event_types = None ,
queue_id = ' ' ,
client_gravatar = False ,
user_avatar_url_field_optional = True )
raw_users = result [ ' raw_users ' ]
for user_dict in raw_users . values ( ) :
if user_dict [ ' user_id ' ] in long_term_idle_users_ids :
self . assertFalse ( ' avatar_url ' in user_dict )
else :
self . assertIsNotNone ( user_dict [ ' avatar_url ' ] )
gravatar_users_id = [ user_dict [ ' user_id ' ] for user_dict in raw_users . values ( )
if ' avatar_url ' in user_dict and ' gravatar.com ' in user_dict [ ' avatar_url ' ] ]
# Test again with client_gravatar = True
result = fetch_initial_state_data ( user_profile = hamlet ,
event_types = None ,
queue_id = ' ' ,
client_gravatar = True ,
user_avatar_url_field_optional = True )
raw_users = result [ ' raw_users ' ]
for user_dict in raw_users . values ( ) :
if user_dict [ ' user_id ' ] in gravatar_users_id :
self . assertIsNone ( user_dict [ ' avatar_url ' ] )
else :
self . assertFalse ( ' avatar_url ' in user_dict )
2019-08-11 13:39:27 +02:00
2017-11-10 15:26:30 +01:00
class GetUnreadMsgsTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def mute_stream ( self , user_profile : UserProfile , stream : Stream ) - > None :
2017-11-10 15:26:30 +01:00
recipient = Recipient . objects . get ( type_id = stream . id , type = Recipient . STREAM )
subscription = Subscription . objects . get (
user_profile = user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
recipient = recipient ,
2017-11-10 15:26:30 +01:00
)
2018-08-02 23:46:05 +02:00
subscription . is_muted = True
2017-11-10 15:26:30 +01:00
subscription . save ( )
2018-05-11 01:39:38 +02:00
def mute_topic ( self , user_profile : UserProfile , stream_name : str ,
topic_name : str ) - > None :
2017-11-10 15:26:30 +01:00
realm = user_profile . realm
stream = get_stream ( stream_name , realm )
2020-02-18 17:25:43 +01:00
recipient = stream . recipient
2017-11-10 15:26:30 +01:00
add_topic_mute (
user_profile = user_profile ,
stream_id = stream . id ,
recipient_id = recipient . id ,
topic_name = topic_name ,
)
2017-11-05 10:51:25 +01:00
def test_raw_unread_stream ( self ) - > None :
2017-11-10 17:00:31 +01:00
cordelia = self . example_user ( ' cordelia ' )
hamlet = self . example_user ( ' hamlet ' )
realm = hamlet . realm
for stream_name in [ ' social ' , ' devel ' , ' test here ' ] :
self . subscribe ( hamlet , stream_name )
self . subscribe ( cordelia , stream_name )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
all_message_ids : Set [ int ] = set ( )
2017-11-10 17:00:31 +01:00
message_ids = dict ( )
tups = [
( ' social ' , ' lunch ' ) ,
( ' test here ' , ' bla ' ) ,
( ' devel ' , ' python ' ) ,
( ' devel ' , ' ruby ' ) ,
]
for stream_name , topic_name in tups :
message_ids [ topic_name ] = [
self . send_stream_message (
2020-03-07 11:43:05 +01:00
sender = cordelia ,
2017-11-10 17:00:31 +01:00
stream_name = stream_name ,
topic_name = topic_name ,
) for i in range ( 3 )
]
all_message_ids | = set ( message_ids [ topic_name ] )
self . assertEqual ( len ( all_message_ids ) , 12 ) # sanity check on test setup
self . mute_stream (
user_profile = hamlet ,
stream = get_stream ( ' test here ' , realm ) ,
)
self . mute_topic (
user_profile = hamlet ,
stream_name = ' devel ' ,
topic_name = ' ruby ' ,
)
raw_unread_data = get_raw_unread_data (
user_profile = hamlet ,
)
stream_dict = raw_unread_data [ ' stream_dict ' ]
self . assertEqual (
set ( stream_dict . keys ( ) ) ,
all_message_ids ,
)
self . assertEqual (
raw_unread_data [ ' unmuted_stream_msgs ' ] ,
set ( message_ids [ ' python ' ] ) | set ( message_ids [ ' lunch ' ] ) ,
)
self . assertEqual (
stream_dict [ message_ids [ ' lunch ' ] [ 0 ] ] ,
dict (
sender_id = cordelia . id ,
stream_id = get_stream ( ' social ' , realm ) . id ,
topic = ' lunch ' ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
) ,
2017-11-10 17:00:31 +01:00
)
2017-11-05 10:51:25 +01:00
def test_raw_unread_huddle ( self ) - > None :
2017-11-10 16:24:31 +01:00
cordelia = self . example_user ( ' cordelia ' )
othello = self . example_user ( ' othello ' )
hamlet = self . example_user ( ' hamlet ' )
prospero = self . example_user ( ' prospero ' )
huddle1_message_ids = [
self . send_huddle_message (
2020-03-07 11:43:05 +01:00
cordelia ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
[ hamlet , othello ] ,
2017-11-10 16:24:31 +01:00
)
for i in range ( 3 )
]
huddle2_message_ids = [
self . send_huddle_message (
2020-03-07 11:43:05 +01:00
cordelia ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
[ hamlet , prospero ] ,
2017-11-10 16:24:31 +01:00
)
for i in range ( 3 )
]
raw_unread_data = get_raw_unread_data (
user_profile = hamlet ,
)
huddle_dict = raw_unread_data [ ' huddle_dict ' ]
self . assertEqual (
set ( huddle_dict . keys ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
set ( huddle1_message_ids ) | set ( huddle2_message_ids ) ,
2017-11-10 16:24:31 +01:00
)
huddle_string = ' , ' . join (
str ( uid )
for uid in sorted ( [ cordelia . id , hamlet . id , othello . id ] )
)
self . assertEqual (
huddle_dict [ huddle1_message_ids [ 0 ] ] ,
dict ( user_ids_string = huddle_string ) ,
)
2017-11-17 10:47:43 +01:00
def test_raw_unread_personal ( self ) - > None :
2017-11-10 15:49:42 +01:00
cordelia = self . example_user ( ' cordelia ' )
othello = self . example_user ( ' othello ' )
hamlet = self . example_user ( ' hamlet ' )
cordelia_pm_message_ids = [
2020-03-07 11:43:05 +01:00
self . send_personal_message ( cordelia , hamlet )
2017-11-10 15:49:42 +01:00
for i in range ( 3 )
]
othello_pm_message_ids = [
2020-03-07 11:43:05 +01:00
self . send_personal_message ( othello , hamlet )
2017-11-10 15:49:42 +01:00
for i in range ( 3 )
]
raw_unread_data = get_raw_unread_data (
user_profile = hamlet ,
)
pm_dict = raw_unread_data [ ' pm_dict ' ]
self . assertEqual (
set ( pm_dict . keys ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
set ( cordelia_pm_message_ids ) | set ( othello_pm_message_ids ) ,
2017-11-10 15:49:42 +01:00
)
self . assertEqual (
pm_dict [ cordelia_pm_message_ids [ 0 ] ] ,
dict ( sender_id = cordelia . id ) ,
)
2020-03-17 23:17:12 +01:00
def test_raw_unread_personal_from_self ( self ) - > None :
hamlet = self . example_user ( ' hamlet ' )
def send_unread_pm ( other_user : UserProfile ) - > Message :
# It is rare to send a message from Hamlet to Othello
# (or any other user) and have it be unread for
# Hamlet himself, but that is actually normal
# behavior for most API clients.
message_id = self . send_personal_message (
from_user = hamlet ,
to_user = other_user ,
sending_client_name = ' some_api_program ' ,
)
# Check our test setup is correct--the message should
# not have looked like it was sent by a human.
message = Message . objects . get ( id = message_id )
self . assertFalse ( message . sent_by_human ( ) )
# And since it was not sent by a human, it should not
# be read, not even by the sender (Hamlet).
um = UserMessage . objects . get (
user_profile_id = hamlet . id ,
message_id = message_id ,
)
self . assertFalse ( um . flags . read )
return message
othello = self . example_user ( ' othello ' )
othello_msg = send_unread_pm ( other_user = othello )
# And now check the unread data structure...
raw_unread_data = get_raw_unread_data (
user_profile = hamlet ,
)
pm_dict = raw_unread_data [ ' pm_dict ' ]
self . assertEqual ( set ( pm_dict . keys ( ) ) , { othello_msg . id } )
# For legacy reason we call the field `sender_id` here,
# but it really refers to the other user id in the conversation,
# which is Othello.
self . assertEqual (
pm_dict [ othello_msg . id ] ,
dict ( sender_id = othello . id ) ,
)
cordelia = self . example_user ( ' cordelia ' )
cordelia_msg = send_unread_pm ( other_user = cordelia )
apply_unread_message_event (
user_profile = hamlet ,
state = raw_unread_data ,
message = MessageDict . wide_dict ( cordelia_msg ) ,
flags = [ ] ,
)
self . assertEqual (
set ( pm_dict . keys ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ othello_msg . id , cordelia_msg . id } ,
2020-03-17 23:17:12 +01:00
)
# Again, `sender_id` is misnamed here.
self . assertEqual (
pm_dict [ cordelia_msg . id ] ,
dict ( sender_id = cordelia . id ) ,
)
# Send a message to ourself.
hamlet_msg = send_unread_pm ( other_user = hamlet )
apply_unread_message_event (
user_profile = hamlet ,
state = raw_unread_data ,
message = MessageDict . wide_dict ( hamlet_msg ) ,
flags = [ ] ,
)
self . assertEqual (
set ( pm_dict . keys ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ othello_msg . id , cordelia_msg . id , hamlet_msg . id } ,
2020-03-17 23:17:12 +01:00
)
# Again, `sender_id` is misnamed here.
self . assertEqual (
pm_dict [ hamlet_msg . id ] ,
dict ( sender_id = hamlet . id ) ,
)
# Call get_raw_unread_data again.
raw_unread_data = get_raw_unread_data (
user_profile = hamlet ,
)
pm_dict = raw_unread_data [ ' pm_dict ' ]
self . assertEqual (
set ( pm_dict . keys ( ) ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
{ othello_msg . id , cordelia_msg . id , hamlet_msg . id } ,
2020-03-17 23:17:12 +01:00
)
# Again, `sender_id` is misnamed here.
self . assertEqual (
pm_dict [ hamlet_msg . id ] ,
dict ( sender_id = hamlet . id ) ,
)
2017-11-17 10:47:43 +01:00
def test_unread_msgs ( self ) - > None :
2020-03-07 11:43:05 +01:00
sender = self . example_user ( ' cordelia ' )
sender_id = sender . id
2017-05-23 03:02:01 +02:00
user_profile = self . example_user ( ' hamlet ' )
othello = self . example_user ( ' othello ' )
2020-03-07 11:43:05 +01:00
pm1_message_id = self . send_personal_message ( sender , user_profile , " hello1 " )
pm2_message_id = self . send_personal_message ( sender , user_profile , " hello2 " )
2017-05-23 03:02:01 +02:00
2017-08-25 06:01:29 +02:00
muted_stream = self . subscribe ( user_profile , ' Muted Stream ' )
2017-11-10 15:26:30 +01:00
self . mute_stream ( user_profile , muted_stream )
self . mute_topic ( user_profile , ' Denmark ' , ' muted-topic ' )
2017-08-31 23:19:05 +02:00
2020-03-07 11:43:05 +01:00
stream_message_id = self . send_stream_message ( sender , " Denmark " , " hello " )
muted_stream_message_id = self . send_stream_message ( sender , " Muted Stream " , " hello " )
2017-10-28 16:40:28 +02:00
muted_topic_message_id = self . send_stream_message (
2020-03-07 11:43:05 +01:00
sender ,
2017-10-28 16:40:28 +02:00
" Denmark " ,
topic_name = " muted-topic " ,
content = " hello " ,
)
2017-05-23 03:02:01 +02:00
2017-10-28 16:40:28 +02:00
huddle_message_id = self . send_huddle_message (
2020-03-07 11:43:05 +01:00
sender ,
[ user_profile , othello ] ,
2017-10-28 16:40:28 +02:00
' hello3 ' ,
)
2017-05-23 03:02:01 +02:00
2017-11-05 10:51:25 +01:00
def get_unread_data ( ) - > UnreadMessagesResult :
2017-11-10 15:14:13 +01:00
raw_unread_data = get_raw_unread_data ( user_profile )
aggregated_data = aggregate_unread_data ( raw_unread_data )
return aggregated_data
2017-07-21 20:31:25 +02:00
result = get_unread_data ( )
2017-05-23 03:02:01 +02:00
2017-08-23 22:45:50 +02:00
# The count here reflects the count of unread messages that we will
# report to users in the bankruptcy dialog, and for now it excludes unread messages
# from muted treams, but it doesn't exclude unread messages from muted topics yet.
self . assertEqual ( result [ ' count ' ] , 4 )
2017-05-23 03:02:01 +02:00
unread_pm = result [ ' pms ' ] [ 0 ]
self . assertEqual ( unread_pm [ ' sender_id ' ] , sender_id )
self . assertEqual ( unread_pm [ ' unread_message_ids ' ] , [ pm1_message_id , pm2_message_id ] )
2017-10-05 18:35:34 +02:00
self . assertTrue ( ' sender_ids ' not in unread_pm )
2017-05-23 03:02:01 +02:00
unread_stream = result [ ' streams ' ] [ 0 ]
self . assertEqual ( unread_stream [ ' stream_id ' ] , get_stream ( ' Denmark ' , user_profile . realm ) . id )
2017-08-31 23:19:05 +02:00
self . assertEqual ( unread_stream [ ' topic ' ] , ' muted-topic ' )
self . assertEqual ( unread_stream [ ' unread_message_ids ' ] , [ muted_topic_message_id ] )
2017-10-05 18:35:34 +02:00
self . assertEqual ( unread_stream [ ' sender_ids ' ] , [ sender_id ] )
2017-08-31 23:19:05 +02:00
unread_stream = result [ ' streams ' ] [ 1 ]
self . assertEqual ( unread_stream [ ' stream_id ' ] , get_stream ( ' Denmark ' , user_profile . realm ) . id )
2017-05-23 03:02:01 +02:00
self . assertEqual ( unread_stream [ ' topic ' ] , ' test ' )
self . assertEqual ( unread_stream [ ' unread_message_ids ' ] , [ stream_message_id ] )
2017-10-05 18:35:34 +02:00
self . assertEqual ( unread_stream [ ' sender_ids ' ] , [ sender_id ] )
2017-05-23 03:02:01 +02:00
2017-08-31 23:19:05 +02:00
unread_stream = result [ ' streams ' ] [ 2 ]
2017-08-23 22:45:50 +02:00
self . assertEqual ( unread_stream [ ' stream_id ' ] , get_stream ( ' Muted Stream ' , user_profile . realm ) . id )
self . assertEqual ( unread_stream [ ' topic ' ] , ' test ' )
self . assertEqual ( unread_stream [ ' unread_message_ids ' ] , [ muted_stream_message_id ] )
2017-10-05 18:35:34 +02:00
self . assertEqual ( unread_stream [ ' sender_ids ' ] , [ sender_id ] )
2017-08-23 22:45:50 +02:00
2017-05-23 03:02:01 +02:00
huddle_string = ' , ' . join ( str ( uid ) for uid in sorted ( [ sender_id , user_profile . id , othello . id ] ) )
unread_huddle = result [ ' huddles ' ] [ 0 ]
self . assertEqual ( unread_huddle [ ' user_ids_string ' ] , huddle_string )
self . assertEqual ( unread_huddle [ ' unread_message_ids ' ] , [ huddle_message_id ] )
2017-10-05 18:35:34 +02:00
self . assertTrue ( ' sender_ids ' not in unread_huddle )
2017-05-23 03:02:01 +02:00
2017-07-21 20:31:25 +02:00
self . assertEqual ( result [ ' mentions ' ] , [ ] )
um = UserMessage . objects . get (
user_profile_id = user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message_id = stream_message_id ,
2017-07-21 20:31:25 +02:00
)
um . flags | = UserMessage . flags . mentioned
um . save ( )
2019-08-26 05:11:18 +02:00
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ stream_message_id ] )
um . flags = UserMessage . flags . has_alert_word
um . save ( )
result = get_unread_data ( )
# TODO: This should change when we make alert words work better.
self . assertEqual ( result [ ' mentions ' ] , [ ] )
2017-07-21 20:31:25 +02:00
2019-08-26 05:11:18 +02:00
um . flags = UserMessage . flags . wildcard_mentioned
um . save ( )
2017-07-21 20:31:25 +02:00
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ stream_message_id ] )
2017-05-23 03:02:01 +02:00
2019-08-26 05:11:18 +02:00
um . flags = 0
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
# Test with a muted stream
um = UserMessage . objects . get (
user_profile_id = user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message_id = muted_stream_message_id ,
2019-08-26 05:11:18 +02:00
)
um . flags = UserMessage . flags . mentioned
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ muted_stream_message_id ] )
um . flags = UserMessage . flags . has_alert_word
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
um . flags = UserMessage . flags . wildcard_mentioned
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
um . flags = 0
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
# Test with a muted topic
um = UserMessage . objects . get (
user_profile_id = user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message_id = muted_topic_message_id ,
2019-08-26 05:11:18 +02:00
)
um . flags = UserMessage . flags . mentioned
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ muted_topic_message_id ] )
um . flags = UserMessage . flags . has_alert_word
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
um . flags = UserMessage . flags . wildcard_mentioned
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
um . flags = 0
um . save ( )
result = get_unread_data ( )
self . assertEqual ( result [ ' mentions ' ] , [ ] )
2017-10-26 22:10:52 +02:00
class ClientDescriptorsTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_get_client_info_for_all_public_streams ( self ) - > None :
2017-10-26 22:10:52 +02:00
hamlet = self . example_user ( ' hamlet ' )
realm = hamlet . realm
queue_data = dict (
all_public_streams = True ,
apply_markdown = True ,
2017-10-31 18:36:18 +01:00
client_gravatar = True ,
2017-10-26 22:10:52 +02:00
client_type_name = ' website ' ,
event_types = [ ' message ' ] ,
last_connection_time = time . time ( ) ,
queue_timeout = 0 ,
realm_id = realm . id ,
user_profile_id = hamlet . id ,
)
client = allocate_client_descriptor ( queue_data )
message_event = dict (
realm_id = realm . id ,
stream_name = ' whatever ' ,
)
client_info = get_client_info_for_message_event (
message_event ,
users = [ ] ,
)
self . assertEqual ( len ( client_info ) , 1 )
dct = client_info [ client . event_queue . id ]
self . assertEqual ( dct [ ' client ' ] . apply_markdown , True )
2017-10-31 18:36:18 +01:00
self . assertEqual ( dct [ ' client ' ] . client_gravatar , True )
2017-10-26 22:10:52 +02:00
self . assertEqual ( dct [ ' client ' ] . user_profile_id , hamlet . id )
2018-03-28 21:42:06 +02:00
self . assertEqual ( dct [ ' flags ' ] , [ ] )
2017-10-26 22:10:52 +02:00
self . assertEqual ( dct [ ' is_sender ' ] , False )
message_event = dict (
realm_id = realm . id ,
stream_name = ' whatever ' ,
sender_queue_id = client . event_queue . id ,
)
client_info = get_client_info_for_message_event (
message_event ,
users = [ ] ,
)
dct = client_info [ client . event_queue . id ]
self . assertEqual ( dct [ ' is_sender ' ] , True )
2017-11-05 10:51:25 +01:00
def test_get_client_info_for_normal_users ( self ) - > None :
2017-10-26 22:10:52 +02:00
hamlet = self . example_user ( ' hamlet ' )
cordelia = self . example_user ( ' cordelia ' )
realm = hamlet . realm
2017-11-17 10:47:43 +01:00
def test_get_info ( apply_markdown : bool , client_gravatar : bool ) - > None :
2017-11-02 17:19:54 +01:00
clear_client_event_queues_for_testing ( )
queue_data = dict (
all_public_streams = False ,
apply_markdown = apply_markdown ,
2017-10-31 18:36:18 +01:00
client_gravatar = client_gravatar ,
2017-11-02 17:19:54 +01:00
client_type_name = ' website ' ,
event_types = [ ' message ' ] ,
last_connection_time = time . time ( ) ,
queue_timeout = 0 ,
realm_id = realm . id ,
user_profile_id = hamlet . id ,
)
2017-10-26 22:10:52 +02:00
2017-11-02 17:19:54 +01:00
client = allocate_client_descriptor ( queue_data )
message_event = dict (
realm_id = realm . id ,
stream_name = ' whatever ' ,
)
2017-10-26 22:10:52 +02:00
2017-11-02 17:19:54 +01:00
client_info = get_client_info_for_message_event (
message_event ,
users = [
dict ( id = cordelia . id ) ,
] ,
)
2017-10-26 22:10:52 +02:00
2017-11-02 17:19:54 +01:00
self . assertEqual ( len ( client_info ) , 0 )
2017-10-26 22:10:52 +02:00
2017-11-02 17:19:54 +01:00
client_info = get_client_info_for_message_event (
message_event ,
users = [
dict ( id = cordelia . id ) ,
dict ( id = hamlet . id , flags = [ ' mentioned ' ] ) ,
] ,
)
self . assertEqual ( len ( client_info ) , 1 )
2017-10-26 22:10:52 +02:00
2017-11-02 17:19:54 +01:00
dct = client_info [ client . event_queue . id ]
self . assertEqual ( dct [ ' client ' ] . apply_markdown , apply_markdown )
2017-10-31 18:36:18 +01:00
self . assertEqual ( dct [ ' client ' ] . client_gravatar , client_gravatar )
2017-11-02 17:19:54 +01:00
self . assertEqual ( dct [ ' client ' ] . user_profile_id , hamlet . id )
self . assertEqual ( dct [ ' flags ' ] , [ ' mentioned ' ] )
self . assertEqual ( dct [ ' is_sender ' ] , False )
2017-10-26 22:10:52 +02:00
2017-10-31 18:36:18 +01:00
test_get_info ( apply_markdown = False , client_gravatar = False )
test_get_info ( apply_markdown = True , client_gravatar = False )
test_get_info ( apply_markdown = False , client_gravatar = True )
test_get_info ( apply_markdown = True , client_gravatar = True )
2017-10-26 22:10:52 +02:00
2017-11-05 10:51:25 +01:00
def test_process_message_event_with_mocked_client_info ( self ) - > None :
2017-10-26 22:10:52 +02:00
hamlet = self . example_user ( " hamlet " )
2017-11-05 11:49:43 +01:00
class MockClient :
2017-11-05 10:51:25 +01:00
def __init__ ( self , user_profile_id : int ,
apply_markdown : bool ,
client_gravatar : bool ) - > None :
2017-10-26 22:10:52 +02:00
self . user_profile_id = user_profile_id
self . apply_markdown = apply_markdown
2017-10-31 18:36:18 +01:00
self . client_gravatar = client_gravatar
2017-10-26 22:10:52 +02:00
self . client_type_name = ' whatever '
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
self . events : List [ Dict [ str , Any ] ] = [ ]
2017-10-26 22:10:52 +02:00
2017-11-05 10:51:25 +01:00
def accepts_messages ( self ) - > bool :
2017-10-26 22:10:52 +02:00
return True
2017-11-05 10:51:25 +01:00
def accepts_event ( self , event : Dict [ str , Any ] ) - > bool :
2017-10-26 22:10:52 +02:00
assert ( event [ ' type ' ] == ' message ' )
return True
2017-11-05 10:51:25 +01:00
def add_event ( self , event : Dict [ str , Any ] ) - > None :
2017-10-26 22:10:52 +02:00
self . events . append ( event )
client1 = MockClient (
user_profile_id = hamlet . id ,
apply_markdown = True ,
2017-10-31 18:36:18 +01:00
client_gravatar = False ,
2017-10-26 22:10:52 +02:00
)
client2 = MockClient (
user_profile_id = hamlet . id ,
apply_markdown = False ,
2017-10-31 18:36:18 +01:00
client_gravatar = False ,
)
client3 = MockClient (
user_profile_id = hamlet . id ,
apply_markdown = True ,
client_gravatar = True ,
)
client4 = MockClient (
user_profile_id = hamlet . id ,
apply_markdown = False ,
client_gravatar = True ,
2017-10-26 22:10:52 +02:00
)
client_info = {
' client:1 ' : dict (
client = client1 ,
flags = [ ' starred ' ] ,
) ,
' client:2 ' : dict (
client = client2 ,
flags = [ ' has_alert_word ' ] ,
) ,
2017-10-31 18:36:18 +01:00
' client:3 ' : dict (
client = client3 ,
flags = [ ] ,
) ,
' client:4 ' : dict (
client = client4 ,
flags = [ ] ,
) ,
2017-10-26 22:10:52 +02:00
}
2017-10-31 03:02:23 +01:00
sender = hamlet
2017-10-26 22:10:52 +02:00
message_event = dict (
2017-10-20 21:34:05 +02:00
message_dict = dict (
2017-10-26 22:10:52 +02:00
id = 999 ,
content = ' **hello** ' ,
2017-10-20 21:34:05 +02:00
rendered_content = ' <b>hello</b> ' ,
2017-10-31 03:02:23 +01:00
sender_id = sender . id ,
2017-10-26 22:10:52 +02:00
type = ' stream ' ,
client = ' website ' ,
2017-10-20 21:34:05 +02:00
# NOTE: Some of these fields are clutter, but some
# will be useful when we let clients specify
# that they can compute their own gravatar URLs.
2017-10-31 03:02:23 +01:00
sender_email = sender . email ,
2019-11-05 20:23:58 +01:00
sender_delivery_email = sender . delivery_email ,
2017-10-31 03:02:23 +01:00
sender_realm_id = sender . realm_id ,
2017-10-31 18:36:18 +01:00
sender_avatar_source = UserProfile . AVATAR_FROM_GRAVATAR ,
sender_avatar_version = 1 ,
2017-10-20 21:34:05 +02:00
sender_is_mirror_dummy = None ,
recipient_type = None ,
recipient_type_id = None ,
2017-10-26 22:10:52 +02:00
) ,
)
# Setting users to `[]` bypasses code we don't care about
# for this test--we assume client_info is correct in our mocks,
# and we are interested in how messages are put on event queue.
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
users : List [ Dict [ str , Any ] ] = [ ]
2017-10-26 22:10:52 +02:00
with mock . patch ( ' zerver.tornado.event_queue.get_client_info_for_message_event ' ,
return_value = client_info ) :
process_message_event ( message_event , users )
2017-10-31 03:02:23 +01:00
# We are not closely examining avatar_url at this point, so
# just sanity check them and then delete the keys so that
# upcoming comparisons work.
for client in [ client1 , client2 ] :
message = client . events [ 0 ] [ ' message ' ]
self . assertIn ( ' gravatar.com ' , message [ ' avatar_url ' ] )
message . pop ( ' avatar_url ' )
2017-10-26 22:10:52 +02:00
self . assertEqual ( client1 . events , [
dict (
type = ' message ' ,
message = dict (
type = ' stream ' ,
2017-10-31 03:02:23 +01:00
sender_id = sender . id ,
sender_email = sender . email ,
2017-10-26 22:10:52 +02:00
id = 999 ,
content = ' <b>hello</b> ' ,
2017-10-20 21:34:05 +02:00
content_type = ' text/html ' ,
2017-10-26 22:10:52 +02:00
client = ' website ' ,
) ,
flags = [ ' starred ' ] ,
) ,
] )
self . assertEqual ( client2 . events , [
dict (
type = ' message ' ,
message = dict (
type = ' stream ' ,
2017-10-31 03:02:23 +01:00
sender_id = sender . id ,
sender_email = sender . email ,
2017-10-26 22:10:52 +02:00
id = 999 ,
content = ' **hello** ' ,
2017-10-20 21:34:05 +02:00
content_type = ' text/x-markdown ' ,
2017-10-26 22:10:52 +02:00
client = ' website ' ,
) ,
flags = [ ' has_alert_word ' ] ,
) ,
] )
2017-10-31 18:36:18 +01:00
self . assertEqual ( client3 . events , [
dict (
type = ' message ' ,
message = dict (
type = ' stream ' ,
sender_id = sender . id ,
sender_email = sender . email ,
avatar_url = None ,
id = 999 ,
content = ' <b>hello</b> ' ,
content_type = ' text/html ' ,
client = ' website ' ,
) ,
flags = [ ] ,
) ,
] )
self . assertEqual ( client4 . events , [
dict (
type = ' message ' ,
message = dict (
type = ' stream ' ,
sender_id = sender . id ,
sender_email = sender . email ,
avatar_url = None ,
id = 999 ,
content = ' **hello** ' ,
content_type = ' text/x-markdown ' ,
client = ' website ' ,
) ,
flags = [ ] ,
) ,
] )
2017-10-21 23:10:22 +02:00
class FetchQueriesTest ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_queries ( self ) - > None :
2017-10-21 23:10:22 +02:00
user = self . example_user ( " hamlet " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2017-10-21 23:10:22 +02:00
flush_per_request_caches ( )
with queries_captured ( ) as queries :
with mock . patch ( ' zerver.lib.events.always_want ' ) as want_mock :
fetch_initial_state_data (
user_profile = user ,
event_types = None ,
queue_id = ' x ' ,
2017-11-02 20:55:44 +01:00
client_gravatar = False ,
2020-06-13 10:10:05 +02:00
user_avatar_url_field_optional = False
2017-10-21 23:10:22 +02:00
)
2020-05-14 19:18:50 +02:00
self . assert_length ( queries , 30 )
2017-10-21 23:10:22 +02:00
expected_counts = dict (
2020-04-15 12:34:26 +02:00
alert_words = 1 ,
2017-10-21 23:10:22 +02:00
custom_profile_fields = 1 ,
default_streams = 1 ,
2017-11-01 18:20:34 +01:00
default_stream_groups = 1 ,
2017-10-21 23:10:22 +02:00
hotspots = 0 ,
message = 1 ,
muted_topics = 1 ,
pointer = 0 ,
2020-05-14 19:18:50 +02:00
presence = 1 ,
2017-10-21 23:10:22 +02:00
realm = 0 ,
realm_bot = 1 ,
realm_domains = 1 ,
realm_embedded_bots = 0 ,
2019-08-18 15:09:18 +02:00
realm_incoming_webhook_bots = 0 ,
2017-10-21 23:10:22 +02:00
realm_emoji = 1 ,
realm_filters = 1 ,
2018-03-12 01:27:08 +01:00
realm_user = 3 ,
2017-11-30 01:09:23 +01:00
realm_user_groups = 2 ,
2019-11-28 20:31:18 +01:00
recent_private_conversations = 1 ,
2018-08-14 23:57:20 +02:00
starred_messages = 1 ,
2017-10-21 23:10:22 +02:00
stream = 2 ,
2019-01-27 18:57:15 +01:00
stop_words = 0 ,
2020-01-08 10:51:08 +01:00
subscription = 5 ,
2017-10-21 23:10:22 +02:00
update_display_settings = 0 ,
update_global_notifications = 0 ,
update_message_flags = 5 ,
2018-12-18 17:17:08 +01:00
user_status = 1 ,
2019-11-16 09:26:28 +01:00
video_calls = 0 ,
2017-10-21 23:10:22 +02:00
)
wanted_event_types = {
item [ 0 ] [ 0 ] for item
in want_mock . call_args_list
}
self . assertEqual ( wanted_event_types , set ( expected_counts ) )
for event_type in sorted ( wanted_event_types ) :
count = expected_counts [ event_type ]
flush_per_request_caches ( )
with queries_captured ( ) as queries :
if event_type == ' update_message_flags ' :
event_types = [ ' update_message_flags ' , ' message ' ]
else :
event_types = [ event_type ]
fetch_initial_state_data (
user_profile = user ,
event_types = event_types ,
queue_id = ' x ' ,
2017-11-02 20:55:44 +01:00
client_gravatar = False ,
2020-06-13 10:10:05 +02:00
user_avatar_url_field_optional = False
2017-10-21 23:10:22 +02:00
)
self . assert_length ( queries , count )
2017-05-07 21:25:59 +02:00
class TestEventsRegisterAllPublicStreamsDefaults ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2017-05-07 21:25:59 +02:00
self . user_profile = self . example_user ( ' hamlet ' )
self . email = self . user_profile . email
2014-02-07 01:22:19 +01:00
2017-11-05 10:51:25 +01:00
def test_use_passed_all_public_true_default_false ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = False
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , True )
self . assertTrue ( result )
2017-11-05 10:51:25 +01:00
def test_use_passed_all_public_true_default ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = True
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , True )
self . assertTrue ( result )
2017-11-05 10:51:25 +01:00
def test_use_passed_all_public_false_default_false ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = False
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , False )
self . assertFalse ( result )
2017-11-05 10:51:25 +01:00
def test_use_passed_all_public_false_default_true ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = True
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , False )
self . assertFalse ( result )
2017-11-05 10:51:25 +01:00
def test_use_true_default_for_none ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = True
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , None )
self . assertTrue ( result )
2017-11-05 10:51:25 +01:00
def test_use_false_default_for_none ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_all_public_streams = False
self . user_profile . save ( )
result = _default_all_public_streams ( self . user_profile , None )
self . assertFalse ( result )
2017-05-07 21:25:59 +02:00
class TestEventsRegisterNarrowDefaults ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2019-10-19 20:47:00 +02:00
super ( ) . setUp ( )
2017-05-07 21:25:59 +02:00
self . user_profile = self . example_user ( ' hamlet ' )
self . email = self . user_profile . email
2014-02-07 01:22:19 +01:00
self . stream = get_stream ( ' Verona ' , self . user_profile . realm )
2017-11-05 10:51:25 +01:00
def test_use_passed_narrow_no_default ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_events_register_stream_id = None
self . user_profile . save ( )
2020-04-09 21:51:58 +02:00
result = _default_narrow ( self . user_profile , [ [ ' stream ' , ' my_stream ' ] ] )
self . assertEqual ( result , [ [ ' stream ' , ' my_stream ' ] ] )
2014-02-07 01:22:19 +01:00
2017-11-05 10:51:25 +01:00
def test_use_passed_narrow_with_default ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_events_register_stream_id = self . stream . id
self . user_profile . save ( )
2020-04-09 21:51:58 +02:00
result = _default_narrow ( self . user_profile , [ [ ' stream ' , ' my_stream ' ] ] )
self . assertEqual ( result , [ [ ' stream ' , ' my_stream ' ] ] )
2014-02-07 01:22:19 +01:00
2017-11-05 10:51:25 +01:00
def test_use_default_if_narrow_is_empty ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_events_register_stream_id = self . stream . id
self . user_profile . save ( )
result = _default_narrow ( self . user_profile , [ ] )
2020-04-09 21:51:58 +02:00
self . assertEqual ( result , [ [ ' stream ' , ' Verona ' ] ] )
2014-02-07 01:22:19 +01:00
2017-11-05 10:51:25 +01:00
def test_use_narrow_if_default_is_none ( self ) - > None :
2014-02-07 01:22:19 +01:00
self . user_profile . default_events_register_stream_id = None
self . user_profile . save ( )
result = _default_narrow ( self . user_profile , [ ] )
self . assertEqual ( result , [ ] )
2019-07-24 08:38:25 +02:00
class TestGetRawUserDataSystemBotRealm ( ZulipTestCase ) :
def test_get_raw_user_data_on_system_bot_realm ( self ) - > None :
2020-06-13 10:10:05 +02:00
result = get_raw_user_data ( get_realm ( " zulipinternal " ) , self . example_user ( ' hamlet ' ) ,
client_gravatar = True , user_avatar_url_field_optional = True )
2019-07-24 08:38:25 +02:00
for bot_email in settings . CROSS_REALM_BOT_EMAILS :
bot_profile = get_system_bot ( bot_email )
self . assertTrue ( bot_profile . id in result )
self . assertTrue ( result [ bot_profile . id ] [ ' is_cross_realm_bot ' ] )