2017-03-08 11:57:55 +01:00
|
|
|
|
2017-03-25 21:23:54 +01:00
|
|
|
import datetime
|
|
|
|
import os
|
2018-01-27 18:30:55 +01:00
|
|
|
import re
|
2017-03-08 11:57:55 +01:00
|
|
|
import ujson
|
|
|
|
|
|
|
|
from django.http import HttpResponse
|
2017-09-16 11:48:01 +02:00
|
|
|
from django.test import override_settings
|
2017-03-25 21:23:54 +01:00
|
|
|
from mock import MagicMock, patch
|
2017-11-05 05:30:31 +01:00
|
|
|
import urllib
|
2018-05-11 01:39:38 +02:00
|
|
|
from typing import Any, Dict, List
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2017-10-21 19:42:11 +02:00
|
|
|
from zerver.lib.actions import do_create_user
|
2017-03-08 11:57:55 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2017-07-16 09:41:38 +02:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
HostRequestMock, queries_captured, get_user_messages
|
|
|
|
)
|
|
|
|
from zerver.lib.soft_deactivation import do_soft_deactivate_users
|
2017-03-08 11:57:55 +01:00
|
|
|
from zerver.lib.test_runner import slow
|
2017-07-16 09:41:38 +02:00
|
|
|
from zerver.models import (
|
2017-09-19 17:44:16 +02:00
|
|
|
get_realm, get_stream, get_user, UserProfile, UserMessage, Recipient,
|
2017-10-21 19:42:11 +02:00
|
|
|
flush_per_request_caches, DefaultStream, Realm,
|
2017-07-16 09:41:38 +02:00
|
|
|
)
|
2017-03-25 21:23:54 +01:00
|
|
|
from zerver.views.home import home, sent_time_in_epoch_seconds
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
class HomeTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_home(self) -> None:
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
# Keep this list sorted!!!
|
|
|
|
html_bits = [
|
|
|
|
'Compose your message here...',
|
|
|
|
'Exclude messages with topic',
|
|
|
|
'Keyboard shortcuts',
|
|
|
|
'Loading...',
|
|
|
|
'Manage streams',
|
|
|
|
'Narrow by topic',
|
|
|
|
'Next message',
|
|
|
|
'Search streams',
|
|
|
|
'Welcome to Zulip',
|
2018-05-02 15:28:58 +02:00
|
|
|
# Verify that the app styles get included
|
|
|
|
'app-styles-stubentry.js',
|
2017-03-08 11:57:55 +01:00
|
|
|
'var page_params',
|
|
|
|
]
|
|
|
|
|
|
|
|
# Keep this list sorted!!!
|
|
|
|
expected_keys = [
|
|
|
|
"alert_words",
|
|
|
|
"avatar_source",
|
|
|
|
"avatar_url",
|
|
|
|
"avatar_url_medium",
|
2017-11-22 23:58:58 +01:00
|
|
|
"bot_types",
|
2017-03-08 11:57:55 +01:00
|
|
|
"can_create_streams",
|
|
|
|
"cross_realm_bots",
|
2017-12-14 05:49:19 +01:00
|
|
|
"custom_profile_field_types",
|
2017-05-14 07:49:35 +02:00
|
|
|
"custom_profile_fields",
|
2017-03-08 11:57:55 +01:00
|
|
|
"debug_mode",
|
|
|
|
"default_language",
|
|
|
|
"default_language_name",
|
|
|
|
"development_environment",
|
|
|
|
"email",
|
2017-04-02 21:05:33 +02:00
|
|
|
"emojiset",
|
|
|
|
"emojiset_choices",
|
2017-04-29 08:13:47 +02:00
|
|
|
"enable_desktop_notifications",
|
2017-03-08 11:57:55 +01:00
|
|
|
"enable_digest_emails",
|
|
|
|
"enable_offline_email_notifications",
|
|
|
|
"enable_offline_push_notifications",
|
|
|
|
"enable_online_push_notifications",
|
2017-04-29 06:53:28 +02:00
|
|
|
"enable_sounds",
|
2017-04-29 07:01:46 +02:00
|
|
|
"enable_stream_desktop_notifications",
|
2017-11-21 04:34:01 +01:00
|
|
|
"enable_stream_email_notifications",
|
2017-08-17 16:55:32 +02:00
|
|
|
"enable_stream_push_notifications",
|
2017-04-29 06:56:58 +02:00
|
|
|
"enable_stream_sounds",
|
2017-03-08 11:57:55 +01:00
|
|
|
"enter_sends",
|
|
|
|
"first_in_realm",
|
2017-04-27 00:26:49 +02:00
|
|
|
"full_name",
|
2017-03-08 11:57:55 +01:00
|
|
|
"furthest_read_time",
|
|
|
|
"has_mobile_devices",
|
|
|
|
"have_initial_messages",
|
2017-07-07 18:15:10 +02:00
|
|
|
"high_contrast_mode",
|
2017-01-24 01:48:35 +01:00
|
|
|
"hotspots",
|
2017-03-08 11:57:55 +01:00
|
|
|
"initial_servertime",
|
|
|
|
"is_admin",
|
2018-04-03 01:46:55 +02:00
|
|
|
"jitsi_server_url",
|
2017-03-08 11:57:55 +01:00
|
|
|
"language_list",
|
|
|
|
"language_list_dbl_col",
|
|
|
|
"last_event_id",
|
|
|
|
"left_side_userlist",
|
|
|
|
"login_page",
|
|
|
|
"max_avatar_file_size",
|
|
|
|
"max_icon_file_size",
|
|
|
|
"max_message_id",
|
|
|
|
"maxfilesize",
|
2017-11-29 13:42:39 +01:00
|
|
|
"message_content_in_email_notifications",
|
2017-03-08 11:57:55 +01:00
|
|
|
"muted_topics",
|
|
|
|
"narrow",
|
|
|
|
"narrow_stream",
|
|
|
|
"needs_tutorial",
|
2017-04-21 07:53:21 +02:00
|
|
|
"never_subscribed",
|
2017-11-14 20:42:31 +01:00
|
|
|
"night_mode",
|
passwords: Express the quality threshold as guesses required.
The original "quality score" was invented purely for populating
our password-strength progress bar, and isn't expressed in terms
that are particularly meaningful. For configuration and the core
accept/reject logic, it's better to use units that are readily
understood. Switch to those.
I considered using "bits of entropy", defined loosely as the log
of this number, but both the zxcvbn paper and the linked CACM
article (which I recommend!) are written in terms of the number
of guesses. And reading (most of) those two papers made me
less happy about referring to "entropy" in our terminology.
I already knew that notion was a little fuzzy if looked at
too closely, and I gained a better appreciation of how it's
contributed to confusion in discussing password policies and
to adoption of perverse policies that favor "Password1!" over
"derived unusual ravioli raft". So, "guesses" it is.
And although the log is handy for some analysis purposes
(certainly for a graph like those in the zxcvbn paper), it adds
a layer of abstraction, and I think makes it harder to think
clearly about attacks, especially in the online setting. So
just use the actual number, and if someone wants to set a
gigantic value, they will have the pleasure of seeing just
how many digits are involved.
(Thanks to @YJDave for a prototype that the code changes in this
commit are based on.)
2017-10-03 19:48:06 +02:00
|
|
|
"password_min_guesses",
|
2017-07-06 22:32:29 +02:00
|
|
|
"password_min_length",
|
2017-03-08 11:57:55 +01:00
|
|
|
"pm_content_in_desktop_notifications",
|
2017-04-24 21:33:48 +02:00
|
|
|
"pointer",
|
2017-03-08 11:57:55 +01:00
|
|
|
"poll_timeout",
|
2017-04-24 21:23:50 +02:00
|
|
|
"presences",
|
2017-03-08 11:57:55 +01:00
|
|
|
"prompt_for_invites",
|
2017-04-24 21:40:16 +02:00
|
|
|
"queue_id",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_add_emoji_by_admins_only",
|
2017-12-03 00:50:48 +01:00
|
|
|
"realm_allow_community_topic_editing",
|
2017-07-16 11:00:44 +02:00
|
|
|
"realm_allow_edit_history",
|
2017-11-08 13:40:46 +01:00
|
|
|
"realm_allow_message_deleting",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_allow_message_editing",
|
|
|
|
"realm_authentication_methods",
|
2018-04-23 14:51:30 +02:00
|
|
|
"realm_available_video_chat_providers",
|
2018-01-29 16:10:54 +01:00
|
|
|
"realm_bot_creation_policy",
|
2017-03-05 04:17:12 +01:00
|
|
|
"realm_bot_domain",
|
2017-04-21 08:24:30 +02:00
|
|
|
"realm_bots",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_create_stream_by_admins_only",
|
|
|
|
"realm_default_language",
|
2017-11-01 18:20:34 +01:00
|
|
|
"realm_default_stream_groups",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_default_streams",
|
2018-03-30 22:38:16 +02:00
|
|
|
"realm_default_twenty_four_hour_time",
|
2017-03-18 20:19:44 +01:00
|
|
|
"realm_description",
|
2018-03-05 20:19:07 +01:00
|
|
|
"realm_disallow_disposable_email_addresses",
|
2017-04-20 07:30:51 +02:00
|
|
|
"realm_domains",
|
2017-10-24 20:59:11 +02:00
|
|
|
"realm_email_auth_enabled",
|
2017-03-13 18:41:27 +01:00
|
|
|
"realm_email_changes_disabled",
|
2017-10-19 16:25:06 +02:00
|
|
|
"realm_embedded_bots",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_emoji",
|
|
|
|
"realm_filters",
|
2018-04-23 14:51:30 +02:00
|
|
|
"realm_google_hangouts_domain",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_icon_source",
|
|
|
|
"realm_icon_url",
|
2017-03-13 14:42:03 +01:00
|
|
|
"realm_inline_image_preview",
|
|
|
|
"realm_inline_url_embed_preview",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_invite_by_admins_only",
|
|
|
|
"realm_invite_required",
|
2017-04-20 08:03:44 +02:00
|
|
|
"realm_is_zephyr_mirror_realm",
|
2017-04-20 07:50:34 +02:00
|
|
|
"realm_mandatory_topics",
|
2017-11-26 09:12:10 +01:00
|
|
|
"realm_message_content_delete_limit_seconds",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_message_content_edit_limit_seconds",
|
2016-11-30 10:42:58 +01:00
|
|
|
"realm_message_retention_days",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_name",
|
2017-03-13 18:33:49 +01:00
|
|
|
"realm_name_changes_disabled",
|
2018-01-06 23:30:43 +01:00
|
|
|
"realm_name_in_notifications",
|
2017-10-21 18:36:09 +02:00
|
|
|
"realm_non_active_users",
|
2017-05-17 03:48:47 +02:00
|
|
|
"realm_notifications_stream_id",
|
2017-04-20 08:21:31 +02:00
|
|
|
"realm_password_auth_enabled",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_presence_disabled",
|
2018-05-08 20:45:13 +02:00
|
|
|
"realm_push_notifications_enabled",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_restricted_to_domain",
|
2018-02-18 09:34:54 +01:00
|
|
|
"realm_send_welcome_emails",
|
2017-04-20 07:59:03 +02:00
|
|
|
"realm_show_digest_email",
|
2017-10-20 16:55:04 +02:00
|
|
|
"realm_signup_notifications_stream_id",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_uri",
|
2017-11-07 07:56:26 +01:00
|
|
|
"realm_user_groups",
|
2017-04-24 21:59:07 +02:00
|
|
|
"realm_users",
|
2018-04-23 14:51:30 +02:00
|
|
|
"realm_video_chat_provider",
|
2017-03-08 11:57:55 +01:00
|
|
|
"realm_waiting_period_threshold",
|
2017-08-28 23:01:18 +02:00
|
|
|
"root_domain_uri",
|
2017-03-08 11:57:55 +01:00
|
|
|
"save_stacktraces",
|
|
|
|
"server_generation",
|
2017-03-13 14:42:03 +01:00
|
|
|
"server_inline_image_preview",
|
|
|
|
"server_inline_url_embed_preview",
|
2018-04-30 11:48:00 +02:00
|
|
|
"stream_description_max_length",
|
|
|
|
"stream_name_max_length",
|
2017-04-21 07:43:51 +02:00
|
|
|
"subscriptions",
|
2017-03-08 11:57:55 +01:00
|
|
|
"test_suite",
|
2017-03-14 10:53:09 +01:00
|
|
|
"timezone",
|
2018-01-15 19:36:32 +01:00
|
|
|
"translate_emoticons",
|
2017-03-08 11:57:55 +01:00
|
|
|
"twenty_four_hour_time",
|
2017-05-23 03:02:01 +02:00
|
|
|
"unread_msgs",
|
2017-04-21 07:49:41 +02:00
|
|
|
"unsubscribed",
|
2017-03-08 11:57:55 +01:00
|
|
|
"use_websockets",
|
|
|
|
"user_id",
|
2018-01-26 22:09:38 +01:00
|
|
|
"warn_no_email",
|
2017-03-08 11:57:55 +01:00
|
|
|
"zulip_version",
|
|
|
|
]
|
|
|
|
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
# Verify fails if logged-out
|
|
|
|
result = self.client_get('/')
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
self.login(email)
|
|
|
|
|
2017-04-21 08:24:30 +02:00
|
|
|
# Create bot for realm_bots testing. Must be done before fetching home_page.
|
2017-03-08 11:57:55 +01:00
|
|
|
bot_info = {
|
|
|
|
'full_name': 'The Bot of Hamlet',
|
|
|
|
'short_name': 'hambot',
|
|
|
|
}
|
|
|
|
self.client_post("/json/bots", bot_info)
|
|
|
|
|
|
|
|
# Verify succeeds once logged-in
|
2017-09-19 17:44:16 +02:00
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
2017-10-21 21:50:07 +02:00
|
|
|
with patch('zerver.lib.cache.cache_set') as cache_mock:
|
|
|
|
result = self._get_home_page(stream='Denmark')
|
2017-09-19 17:44:16 +02:00
|
|
|
|
2018-05-05 00:27:25 +02:00
|
|
|
self.assert_length(queries, 41)
|
2018-04-02 19:52:54 +02:00
|
|
|
self.assert_length(cache_mock.call_args_list, 7)
|
2017-09-19 17:44:16 +02:00
|
|
|
|
2017-03-08 11:57:55 +01:00
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
|
|
|
|
for html_bit in html_bits:
|
|
|
|
if html_bit not in html:
|
|
|
|
raise AssertionError('%s not in result' % (html_bit,))
|
|
|
|
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
|
|
|
|
actual_keys = sorted([str(k) for k in page_params.keys()])
|
|
|
|
|
|
|
|
self.assertEqual(actual_keys, expected_keys)
|
|
|
|
|
|
|
|
# TODO: Inspect the page_params data further.
|
|
|
|
# print(ujson.dumps(page_params, indent=2))
|
2017-04-21 08:24:30 +02:00
|
|
|
realm_bots_expected_keys = [
|
2017-03-08 11:57:55 +01:00
|
|
|
'api_key',
|
|
|
|
'avatar_url',
|
2017-06-12 19:50:03 +02:00
|
|
|
'bot_type',
|
2017-03-08 11:57:55 +01:00
|
|
|
'default_all_public_streams',
|
|
|
|
'default_events_register_stream',
|
|
|
|
'default_sending_stream',
|
|
|
|
'email',
|
|
|
|
'full_name',
|
|
|
|
'is_active',
|
|
|
|
'owner',
|
2018-01-16 20:34:12 +01:00
|
|
|
'services',
|
2017-03-08 11:57:55 +01:00
|
|
|
'user_id',
|
|
|
|
]
|
|
|
|
|
2017-04-21 08:24:30 +02:00
|
|
|
realm_bots_actual_keys = sorted([str(key) for key in page_params['realm_bots'][0].keys()])
|
|
|
|
self.assertEqual(realm_bots_actual_keys, realm_bots_expected_keys)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2018-03-27 20:53:34 +02:00
|
|
|
def test_num_queries_for_realm_admin(self) -> None:
|
|
|
|
# Verify number of queries for Realm admin isn't much higher than for normal users.
|
|
|
|
self.login(self.example_email("iago"))
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
|
|
|
with patch('zerver.lib.cache.cache_set') as cache_mock:
|
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 200)
|
2018-04-02 19:52:54 +02:00
|
|
|
self.assert_length(cache_mock.call_args_list, 6)
|
2018-05-05 00:27:25 +02:00
|
|
|
self.assert_length(queries, 37)
|
2018-03-27 20:53:34 +02:00
|
|
|
|
2017-10-28 00:57:15 +02:00
|
|
|
@slow("Creates and subscribes 10 users in a loop. Should use bulk queries.")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_num_queries_with_streams(self) -> None:
|
2017-09-19 23:34:07 +02:00
|
|
|
main_user = self.example_user('hamlet')
|
|
|
|
other_user = self.example_user('cordelia')
|
|
|
|
|
|
|
|
realm_id = main_user.realm_id
|
|
|
|
|
|
|
|
self.login(main_user.email)
|
|
|
|
|
|
|
|
# Try to make page-load do extra work for various subscribed
|
|
|
|
# streams.
|
|
|
|
for i in range(10):
|
|
|
|
stream_name = 'test_stream_' + str(i)
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
DefaultStream.objects.create(
|
|
|
|
realm_id=realm_id,
|
|
|
|
stream_id=stream.id
|
|
|
|
)
|
|
|
|
for user in [main_user, other_user]:
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
|
|
|
|
# Simulate hitting the page the first time to avoid some noise
|
|
|
|
# related to initial logins.
|
|
|
|
self._get_home_page()
|
|
|
|
|
|
|
|
# Then for the second page load, measure the number of queries.
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries2:
|
|
|
|
result = self._get_home_page()
|
|
|
|
|
2018-05-05 00:27:25 +02:00
|
|
|
self.assert_length(queries2, 35)
|
2017-09-19 23:34:07 +02:00
|
|
|
|
|
|
|
# Do a sanity check that our new streams were in the payload.
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
self.assertIn('test_stream_7', html)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _get_home_page(self, **kwargs: Any) -> HttpResponse:
|
2017-03-08 11:57:55 +01:00
|
|
|
with \
|
|
|
|
patch('zerver.lib.events.request_event_queue', return_value=42), \
|
|
|
|
patch('zerver.lib.events.get_user_events', return_value=[]):
|
|
|
|
result = self.client_get('/', dict(**kwargs))
|
|
|
|
return result
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _get_page_params(self, result: HttpResponse) -> Dict[str, Any]:
|
2017-03-08 11:57:55 +01:00
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
lines = html.split('\n')
|
2018-01-27 18:30:55 +01:00
|
|
|
page_params_line = [l for l in lines if re.match('^\s*var page_params', l)][0]
|
2017-03-08 11:57:55 +01:00
|
|
|
page_params_json = page_params_line.split(' = ')[1].rstrip(';')
|
|
|
|
page_params = ujson.loads(page_params_json)
|
|
|
|
return page_params
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _sanity_check(self, result: HttpResponse) -> None:
|
2017-03-08 11:57:55 +01:00
|
|
|
'''
|
|
|
|
Use this for tests that are geared toward specific edge cases, but
|
|
|
|
which still want the home page to load properly.
|
|
|
|
'''
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
if 'Compose your message' not in html:
|
|
|
|
raise AssertionError('Home page probably did not load.')
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_terms_of_service(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
email = user.email
|
2017-03-08 11:57:55 +01:00
|
|
|
self.login(email)
|
|
|
|
|
|
|
|
for user_tos_version in [None, '1.1', '2.0.3.4']:
|
|
|
|
user.tos_version = user_tos_version
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
with \
|
|
|
|
self.settings(TERMS_OF_SERVICE='whatever'), \
|
|
|
|
self.settings(TOS_VERSION='99.99'):
|
|
|
|
|
|
|
|
result = self.client_get('/', dict(stream='Denmark'))
|
|
|
|
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
self.assertIn('There are new Terms of Service', html)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_terms_of_service_first_time_template(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
email = user.email
|
2017-03-25 21:23:54 +01:00
|
|
|
self.login(email)
|
|
|
|
|
|
|
|
user.tos_version = None
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
with \
|
|
|
|
self.settings(FIRST_TIME_TOS_TEMPLATE='hello.html'), \
|
|
|
|
self.settings(TOS_VERSION='99.99'):
|
|
|
|
result = self.client_post('/accounts/accept_terms/')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("I agree to the", result)
|
2017-05-10 01:59:33 +02:00
|
|
|
self.assert_in_response("most productive group chat", result)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_accept_terms_of_service(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-25 21:23:54 +01:00
|
|
|
self.login(email)
|
|
|
|
|
|
|
|
result = self.client_post('/accounts/accept_terms/')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("I agree to the", result)
|
|
|
|
|
|
|
|
result = self.client_post('/accounts/accept_terms/', {'terms': True})
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result['Location'], '/')
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_bad_narrow(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
self.login(email)
|
|
|
|
with patch('logging.exception') as mock:
|
|
|
|
result = self._get_home_page(stream='Invalid Stream')
|
|
|
|
mock.assert_called_once_with('Narrow parsing')
|
|
|
|
self._sanity_check(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_bad_pointer(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2017-03-08 11:57:55 +01:00
|
|
|
user_profile.pointer = 999999
|
|
|
|
user_profile.save()
|
|
|
|
|
|
|
|
self.login(email)
|
|
|
|
with patch('logging.warning') as mock:
|
|
|
|
result = self._get_home_page()
|
|
|
|
mock.assert_called_once_with('hamlet@zulip.com has invalid pointer 999999')
|
|
|
|
self._sanity_check(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_topic_narrow(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
self.login(email)
|
|
|
|
result = self._get_home_page(stream='Denmark', topic='lunch')
|
|
|
|
self._sanity_check(result)
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
self.assertIn('lunch', html)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_notifications_stream(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
realm = get_realm('zulip')
|
2017-09-17 19:53:38 +02:00
|
|
|
realm.notifications_stream_id = get_stream('Denmark', realm).id
|
2017-03-08 11:57:55 +01:00
|
|
|
realm.save()
|
|
|
|
self.login(email)
|
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
2017-05-17 03:48:47 +02:00
|
|
|
self.assertEqual(page_params['realm_notifications_stream_id'], get_stream('Denmark', realm).id)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_bot(self, owner: UserProfile, bot_email: str, bot_name: str) -> UserProfile:
|
2017-10-21 19:42:11 +02:00
|
|
|
user = do_create_user(
|
|
|
|
email=bot_email,
|
|
|
|
password='123',
|
|
|
|
realm=owner.realm,
|
|
|
|
full_name=bot_name,
|
|
|
|
short_name=bot_name,
|
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
|
|
|
bot_owner=owner
|
|
|
|
)
|
|
|
|
return user
|
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_non_active_user(self, realm: Realm, email: str, name: str) -> UserProfile:
|
2017-10-21 19:42:11 +02:00
|
|
|
user = do_create_user(
|
|
|
|
email=email,
|
|
|
|
password='123',
|
|
|
|
realm=realm,
|
|
|
|
full_name=name,
|
|
|
|
short_name=name,
|
|
|
|
)
|
2017-10-28 19:22:02 +02:00
|
|
|
|
|
|
|
# Doing a full-stack deactivation would be expensive here,
|
|
|
|
# and we really only need to flip the flag to get a valid
|
|
|
|
# test.
|
|
|
|
user.is_active = False
|
|
|
|
user.save()
|
2017-10-21 19:42:11 +02:00
|
|
|
return user
|
|
|
|
|
2017-12-20 21:03:51 +01:00
|
|
|
def test_signup_notifications_stream(self) -> None:
|
2017-10-20 16:55:04 +02:00
|
|
|
email = self.example_email("hamlet")
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
realm.signup_notifications_stream = get_stream('Denmark', realm)
|
|
|
|
realm.save()
|
|
|
|
self.login(email)
|
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
self.assertEqual(page_params['realm_signup_notifications_stream_id'], get_stream('Denmark', realm).id)
|
|
|
|
|
2017-10-21 19:42:11 +02:00
|
|
|
@slow('creating users and loading home page')
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_people(self) -> None:
|
2017-10-21 19:42:11 +02:00
|
|
|
hamlet = self.example_user('hamlet')
|
2017-05-23 20:57:59 +02:00
|
|
|
realm = get_realm('zulip')
|
2017-10-21 19:42:11 +02:00
|
|
|
self.login(hamlet.email)
|
|
|
|
|
|
|
|
for i in range(3):
|
|
|
|
self.create_bot(
|
|
|
|
owner=hamlet,
|
|
|
|
bot_email='bot-%d@zulip.com' % (i,),
|
|
|
|
bot_name='Bot %d' % (i,),
|
|
|
|
)
|
|
|
|
|
|
|
|
for i in range(3):
|
|
|
|
self.create_non_active_user(
|
|
|
|
realm=realm,
|
|
|
|
email='defunct-%d@zulip.com' % (i,),
|
|
|
|
name='Defunct User %d' % (i,),
|
|
|
|
)
|
|
|
|
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
|
|
|
'''
|
|
|
|
We send three lists of users. The first two below are disjoint
|
|
|
|
lists of users, and the records we send for them have identical
|
|
|
|
structure.
|
|
|
|
|
|
|
|
The realm_bots bucket is somewhat redundant, since all bots will
|
|
|
|
be in one of the first two buckets. They do include fields, however,
|
|
|
|
that normal users don't care about, such as default_sending_stream.
|
|
|
|
'''
|
|
|
|
|
|
|
|
buckets = [
|
|
|
|
'realm_users',
|
|
|
|
'realm_non_active_users',
|
|
|
|
'realm_bots',
|
|
|
|
]
|
|
|
|
|
|
|
|
for field in buckets:
|
|
|
|
users = page_params[field]
|
|
|
|
self.assertTrue(len(users) >= 3, field)
|
|
|
|
for rec in users:
|
|
|
|
self.assertEqual(rec['user_id'],
|
|
|
|
get_user(rec['email'], realm).id)
|
|
|
|
if field == 'realm_bots':
|
|
|
|
self.assertNotIn('is_bot', rec)
|
|
|
|
self.assertIn('is_active', rec)
|
|
|
|
self.assertIn('owner', rec)
|
|
|
|
else:
|
|
|
|
self.assertIn('is_bot', rec)
|
|
|
|
self.assertNotIn('is_active', rec)
|
|
|
|
|
|
|
|
active_emails = {p['email'] for p in page_params['realm_users']}
|
|
|
|
non_active_emails = {p['email'] for p in page_params['realm_non_active_users']}
|
|
|
|
bot_emails = {p['email'] for p in page_params['realm_bots']}
|
|
|
|
|
|
|
|
self.assertIn(hamlet.email, active_emails)
|
|
|
|
self.assertIn('defunct-1@zulip.com', non_active_emails)
|
|
|
|
|
|
|
|
# Bots can show up in multiple buckets.
|
|
|
|
self.assertIn('bot-2@zulip.com', bot_emails)
|
|
|
|
self.assertIn('bot-2@zulip.com', active_emails)
|
|
|
|
|
|
|
|
# Make sure nobody got mis-bucketed.
|
|
|
|
self.assertNotIn(hamlet.email, non_active_emails)
|
|
|
|
self.assertNotIn('defunct-1@zulip.com', active_emails)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
cross_bots = page_params['cross_realm_bots']
|
2017-11-10 23:36:13 +01:00
|
|
|
self.assertEqual(len(cross_bots), 5)
|
2017-03-08 11:57:55 +01:00
|
|
|
cross_bots.sort(key=lambda d: d['email'])
|
2017-05-08 17:42:50 +02:00
|
|
|
|
|
|
|
notification_bot = self.notification_bot()
|
|
|
|
|
2017-11-10 23:36:13 +01:00
|
|
|
by_email = lambda d: d['email']
|
|
|
|
|
|
|
|
self.assertEqual(sorted(cross_bots, key=by_email), sorted([
|
|
|
|
dict(
|
|
|
|
user_id=get_user('new-user-bot@zulip.com', get_realm('zulip')).id,
|
|
|
|
is_admin=False,
|
|
|
|
email='new-user-bot@zulip.com',
|
|
|
|
full_name='Zulip New User Bot',
|
|
|
|
is_bot=True
|
|
|
|
),
|
|
|
|
dict(
|
|
|
|
user_id=get_user('emailgateway@zulip.com', get_realm('zulip')).id,
|
|
|
|
is_admin=False,
|
|
|
|
email='emailgateway@zulip.com',
|
|
|
|
full_name='Email Gateway',
|
|
|
|
is_bot=True
|
|
|
|
),
|
2017-03-08 11:57:55 +01:00
|
|
|
dict(
|
2017-05-23 20:57:59 +02:00
|
|
|
user_id=get_user('feedback@zulip.com', get_realm('zulip')).id,
|
2017-03-08 11:57:55 +01:00
|
|
|
is_admin=False,
|
|
|
|
email='feedback@zulip.com',
|
|
|
|
full_name='Zulip Feedback Bot',
|
|
|
|
is_bot=True
|
|
|
|
),
|
|
|
|
dict(
|
2017-05-08 17:42:50 +02:00
|
|
|
user_id=notification_bot.id,
|
2017-03-08 11:57:55 +01:00
|
|
|
is_admin=False,
|
2017-05-08 17:42:50 +02:00
|
|
|
email=notification_bot.email,
|
2017-03-08 11:57:55 +01:00
|
|
|
full_name='Notification Bot',
|
|
|
|
is_bot=True
|
|
|
|
),
|
2017-06-11 18:34:25 +02:00
|
|
|
dict(
|
|
|
|
user_id=get_user('welcome-bot@zulip.com', get_realm('zulip')).id,
|
|
|
|
is_admin=False,
|
|
|
|
email='welcome-bot@zulip.com',
|
|
|
|
full_name='Welcome Bot',
|
|
|
|
is_bot=True
|
|
|
|
),
|
2017-11-10 23:36:13 +01:00
|
|
|
], key=by_email))
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_new_stream(self) -> None:
|
2017-08-25 06:01:29 +02:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
stream_name = 'New stream'
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(user_profile, stream_name)
|
|
|
|
self.login(user_profile.email)
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self._get_home_page(stream=stream_name)
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
self.assertEqual(page_params['narrow_stream'], stream_name)
|
|
|
|
self.assertEqual(page_params['narrow'], [dict(operator='stream', operand=stream_name)])
|
2017-04-24 21:33:48 +02:00
|
|
|
self.assertEqual(page_params['pointer'], -1)
|
2017-03-08 11:57:55 +01:00
|
|
|
self.assertEqual(page_params['max_message_id'], -1)
|
|
|
|
self.assertEqual(page_params['have_initial_messages'], False)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invites_by_admins_only(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
realm = user_profile.realm
|
|
|
|
realm.invite_by_admins_only = True
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
self.login(email)
|
|
|
|
self.assertFalse(user_profile.is_realm_admin)
|
|
|
|
result = self._get_home_page()
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
self.assertNotIn('Invite more users', html)
|
|
|
|
|
|
|
|
user_profile.is_realm_admin = True
|
|
|
|
user_profile.save()
|
|
|
|
result = self._get_home_page()
|
|
|
|
html = result.content.decode('utf-8')
|
|
|
|
self.assertIn('Invite more users', html)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_desktop_home(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
self.login(email)
|
|
|
|
result = self.client_get("/desktop_home")
|
|
|
|
self.assertEqual(result.status_code, 301)
|
|
|
|
self.assertTrue(result["Location"].endswith("/desktop_home/"))
|
|
|
|
result = self.client_get("/desktop_home/")
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
path = urllib.parse.urlparse(result['Location']).path
|
|
|
|
self.assertEqual(path, "/")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_apps_view(self) -> None:
|
2017-06-06 03:01:56 +02:00
|
|
|
result = self.client_get('/apps')
|
|
|
|
self.assertEqual(result.status_code, 301)
|
|
|
|
self.assertTrue(result['Location'].endswith('/apps/'))
|
|
|
|
|
|
|
|
with self.settings(ZILENCER_ENABLED=False):
|
|
|
|
result = self.client_get('/apps/')
|
|
|
|
self.assertEqual(result.status_code, 301)
|
|
|
|
self.assertTrue(result['Location'] == 'https://zulipchat.com/apps/')
|
|
|
|
|
|
|
|
with self.settings(ZILENCER_ENABLED=True):
|
|
|
|
result = self.client_get('/apps/')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
html = result.content.decode('utf-8')
|
2017-07-26 19:37:54 +02:00
|
|
|
self.assertIn('Apps for every platform.', html)
|
2017-06-06 03:01:56 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_generate_204(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
self.login(email)
|
|
|
|
result = self.client_get("/api/v1/generate_204")
|
|
|
|
self.assertEqual(result.status_code, 204)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_message_sent_time(self) -> None:
|
2017-03-25 21:23:54 +01:00
|
|
|
epoch_seconds = 1490472096
|
|
|
|
pub_date = datetime.datetime.fromtimestamp(epoch_seconds)
|
|
|
|
user_message = MagicMock()
|
|
|
|
user_message.message.pub_date = pub_date
|
|
|
|
self.assertEqual(sent_time_in_epoch_seconds(user_message), epoch_seconds)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_handlebars_compile_error(self) -> None:
|
2017-04-18 07:15:13 +02:00
|
|
|
request = HostRequestMock()
|
2018-04-28 22:46:59 +02:00
|
|
|
with self.settings(DEVELOPMENT=True, TEST_SUITE=False):
|
2017-03-25 21:23:54 +01:00
|
|
|
with patch('os.path.exists', return_value=True):
|
|
|
|
result = home(request)
|
|
|
|
self.assertEqual(result.status_code, 500)
|
|
|
|
self.assert_in_response('Error compiling handlebars templates.', result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subdomain_homepage(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-25 21:23:54 +01:00
|
|
|
self.login(email)
|
2017-08-25 04:32:16 +02:00
|
|
|
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
|
2017-03-25 21:23:54 +01:00
|
|
|
with patch('zerver.views.home.get_subdomain', return_value=""):
|
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 200)
|
2017-05-10 01:59:33 +02:00
|
|
|
self.assert_in_response('most productive group chat', result)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
|
|
|
with patch('zerver.views.home.get_subdomain', return_value="subdomain"):
|
|
|
|
result = self._get_home_page()
|
|
|
|
self._sanity_check(result)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def send_test_message(self, content: str, sender_name: str='iago',
|
|
|
|
stream_name: str='Denmark', topic_name: str='foo') -> None:
|
2017-07-16 09:41:38 +02:00
|
|
|
sender = self.example_email(sender_name)
|
2017-10-28 17:04:30 +02:00
|
|
|
self.send_stream_message(sender, stream_name,
|
|
|
|
content=content, topic_name=topic_name)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def soft_activate_and_get_unread_count(self, stream: str='Denmark', topic: str='foo') -> int:
|
2017-07-16 09:41:38 +02:00
|
|
|
stream_narrow = self._get_home_page(stream=stream, topic=topic)
|
|
|
|
page_params = self._get_page_params(stream_narrow)
|
|
|
|
return page_params['unread_msgs']['count']
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_unread_count_user_soft_deactivation(self) -> None:
|
2017-07-16 09:41:38 +02:00
|
|
|
# In this test we make sure if a soft deactivated user had unread
|
|
|
|
# messages before deactivation they remain same way after activation.
|
|
|
|
long_term_idle_user = self.example_user('hamlet')
|
|
|
|
self.login(long_term_idle_user.email)
|
|
|
|
message = 'Test Message 1'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 1)
|
|
|
|
query_count = len(queries)
|
|
|
|
user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
|
|
|
|
|
|
|
self.login(long_term_idle_user.email)
|
|
|
|
message = 'Test Message 2'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertNotEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
with queries_captured() as queries:
|
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 2)
|
|
|
|
# Test here for query count to be at least 5 greater than previous count
|
|
|
|
# This will assure indirectly that add_missing_messages() was called.
|
|
|
|
self.assertGreaterEqual(len(queries) - query_count, 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
2017-10-28 00:57:15 +02:00
|
|
|
@slow("Loads home page data several times testing different cases")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_user_soft_deactivations(self) -> None:
|
2017-07-16 09:41:38 +02:00
|
|
|
long_term_idle_user = self.example_user('hamlet')
|
2017-08-18 10:09:54 +02:00
|
|
|
# We are sending this message to ensure that long_term_idle_user has
|
|
|
|
# at least one UserMessage row.
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message('Testing', sender_name='hamlet')
|
2017-07-16 09:41:38 +02:00
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
|
|
|
|
|
|
|
message = 'Test Message 1'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
self.login(long_term_idle_user.email)
|
|
|
|
with queries_captured() as queries:
|
2017-08-18 10:09:54 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 2)
|
2017-07-16 09:41:38 +02:00
|
|
|
query_count = len(queries)
|
|
|
|
long_term_idle_user.refresh_from_db()
|
|
|
|
self.assertFalse(long_term_idle_user.long_term_idle)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
|
|
|
message = 'Test Message 2'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2017-08-18 10:09:54 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 3)
|
2017-07-16 09:41:38 +02:00
|
|
|
# Test here for query count to be at least 5 less than previous count.
|
|
|
|
# This will assure add_missing_messages() isn't repeatedly called.
|
|
|
|
self.assertGreaterEqual(query_count - len(queries), 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
|
|
|
|
|
|
|
message = 'Test Message 3'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
self.login(long_term_idle_user.email)
|
|
|
|
with queries_captured() as queries:
|
2017-08-18 10:09:54 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 4)
|
2017-07-16 09:41:38 +02:00
|
|
|
query_count = len(queries)
|
|
|
|
long_term_idle_user.refresh_from_db()
|
|
|
|
self.assertFalse(long_term_idle_user.long_term_idle)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
|
|
|
message = 'Test Message 4'
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2017-08-18 10:09:54 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 5)
|
2017-07-16 09:41:38 +02:00
|
|
|
self.assertGreaterEqual(query_count - len(queries), 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|