2020-05-07 09:48:43 +02:00
|
|
|
import calendar
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import timedelta, timezone
|
sentry: Add frontend event monitoring.
Zulip already has integrations for server-side Sentry integration;
however, it has historically used the Zulip-specific `blueslip`
library for monitoring browser-side errors. However, the latter sends
errors to email, as well optionally to an internal `#errors` stream.
While this is sufficient for low volumes of users, and useful in that
it does not rely on outside services, at higher volumes it is very
difficult to do any analysis or filtering of the errors. Client-side
errors are exceptionally noisy, with many false positives due to
browser extensions or similar, so determining real real errors from a
stream of un-grouped emails or messages in a stream is quite
difficult.
Add a client-side Javascript sentry integration. To provide useful
backtraces, this requires extending the pre-deploy hooks to upload the
source-maps to Sentry. Additional keys are added to the non-public
API of `page_params` to control the DSN, realm identifier, and sample
rates.
2023-02-13 20:50:57 +01:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest.mock import patch
|
2023-12-05 21:25:00 +01:00
|
|
|
from urllib.parse import urlsplit
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2023-11-28 19:16:58 +01:00
|
|
|
import time_machine
|
2020-01-14 18:19:35 +01:00
|
|
|
from django.conf import settings
|
2021-07-16 22:11:10 +02:00
|
|
|
from django.test import override_settings
|
2019-01-31 22:58:28 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
from corporate.models import Customer, CustomerPlan
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user
|
2022-04-19 11:47:26 +02:00
|
|
|
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import change_user_is_active
|
2021-06-04 10:19:50 +02:00
|
|
|
from zerver.lib.compatibility import LAST_SERVER_UPGRADE_TIME, is_outdated_server
|
2021-06-14 12:38:43 +02:00
|
|
|
from zerver.lib.home import (
|
|
|
|
get_billing_info,
|
|
|
|
get_furthest_read_time,
|
|
|
|
promote_sponsoring_zulip_in_realm,
|
|
|
|
)
|
2017-07-16 09:41:38 +02:00
|
|
|
from zerver.lib.soft_deactivation import do_soft_deactivate_users
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.test_helpers import get_user_messages, queries_captured
|
2023-11-23 22:07:41 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models import DefaultStream, Draft, Realm, UserActivity, UserProfile
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot, get_user
|
2020-05-07 09:48:43 +02:00
|
|
|
from zerver.worker.queue_processors import UserActivityWorker
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2020-08-09 14:15:58 +02:00
|
|
|
logger_string = "zulip.soft_deactivation"
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-08 11:57:55 +01:00
|
|
|
class HomeTest(ZulipTestCase):
|
2020-09-27 06:48:20 +02:00
|
|
|
# Keep this list sorted!!!
|
|
|
|
expected_page_params_keys = [
|
2021-06-14 12:38:43 +02:00
|
|
|
"apps_page_url",
|
2024-02-13 02:56:26 +01:00
|
|
|
"bot_types",
|
|
|
|
"corporate_enabled",
|
|
|
|
"development_environment",
|
|
|
|
"furthest_read_time",
|
|
|
|
"insecure_desktop_app",
|
|
|
|
"is_spectator",
|
|
|
|
"language_list",
|
|
|
|
"login_page",
|
|
|
|
"narrow",
|
|
|
|
"narrow_stream",
|
|
|
|
"needs_tutorial",
|
|
|
|
"no_event_queue",
|
2024-02-16 22:56:36 +01:00
|
|
|
"page_type",
|
2024-02-13 02:56:26 +01:00
|
|
|
"promote_sponsoring_zulip",
|
|
|
|
"request_language",
|
|
|
|
"server_sentry_dsn",
|
|
|
|
"show_billing",
|
|
|
|
"show_plans",
|
|
|
|
"show_remote_billing",
|
|
|
|
"show_webathena",
|
|
|
|
"sponsorship_pending",
|
|
|
|
"state_data",
|
|
|
|
"test_suite",
|
|
|
|
"translation_data",
|
|
|
|
"two_fa_enabled",
|
|
|
|
"two_fa_enabled_user",
|
|
|
|
"warn_no_email",
|
|
|
|
]
|
|
|
|
expected_state_data_keys = [
|
|
|
|
"alert_words",
|
2020-09-27 06:48:20 +02:00
|
|
|
"avatar_source",
|
|
|
|
"avatar_url",
|
|
|
|
"avatar_url_medium",
|
2021-03-27 05:48:37 +01:00
|
|
|
"can_create_private_streams",
|
|
|
|
"can_create_public_streams",
|
2020-09-27 06:48:20 +02:00
|
|
|
"can_create_streams",
|
2021-10-04 19:28:33 +02:00
|
|
|
"can_create_web_public_streams",
|
2021-04-03 21:09:26 +02:00
|
|
|
"can_invite_others_to_realm",
|
2020-09-27 06:48:20 +02:00
|
|
|
"can_subscribe_other_users",
|
|
|
|
"cross_realm_bots",
|
|
|
|
"custom_profile_field_types",
|
|
|
|
"custom_profile_fields",
|
|
|
|
"delivery_email",
|
|
|
|
"development_environment",
|
2021-07-24 06:56:56 +02:00
|
|
|
"drafts",
|
2020-09-27 06:48:20 +02:00
|
|
|
"email",
|
2021-06-04 15:12:37 +02:00
|
|
|
"event_queue_longpoll_timeout_seconds",
|
2020-09-27 06:48:20 +02:00
|
|
|
"full_name",
|
2021-03-19 13:21:18 +01:00
|
|
|
"giphy_api_key",
|
2021-03-31 13:10:46 +02:00
|
|
|
"giphy_rating_options",
|
2020-09-27 06:48:20 +02:00
|
|
|
"has_zoom_token",
|
|
|
|
"is_admin",
|
2021-05-28 12:51:50 +02:00
|
|
|
"is_billing_admin",
|
2020-09-27 06:48:20 +02:00
|
|
|
"is_guest",
|
2021-04-19 20:56:15 +02:00
|
|
|
"is_moderator",
|
2020-09-27 06:48:20 +02:00
|
|
|
"is_owner",
|
|
|
|
"jitsi_server_url",
|
|
|
|
"last_event_id",
|
|
|
|
"max_avatar_file_size_mib",
|
|
|
|
"max_file_upload_size_mib",
|
2021-05-22 09:02:47 +02:00
|
|
|
"max_icon_file_size_mib",
|
2021-05-25 18:49:36 +02:00
|
|
|
"max_logo_file_size_mib",
|
2020-09-27 06:48:20 +02:00
|
|
|
"max_message_id",
|
2021-04-13 01:03:31 +02:00
|
|
|
"max_message_length",
|
2021-04-10 17:50:58 +02:00
|
|
|
"max_stream_description_length",
|
|
|
|
"max_stream_name_length",
|
|
|
|
"max_topic_length",
|
2020-09-27 06:48:20 +02:00
|
|
|
"muted_topics",
|
2021-03-27 12:23:32 +01:00
|
|
|
"muted_users",
|
2020-09-27 06:48:20 +02:00
|
|
|
"never_subscribed",
|
2023-12-02 11:30:35 +01:00
|
|
|
"onboarding_steps",
|
2020-09-27 06:48:20 +02:00
|
|
|
"password_min_guesses",
|
|
|
|
"password_min_length",
|
|
|
|
"presences",
|
|
|
|
"queue_id",
|
2021-05-04 19:02:24 +02:00
|
|
|
"realm_add_custom_emoji_policy",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_allow_edit_history",
|
|
|
|
"realm_allow_message_editing",
|
|
|
|
"realm_authentication_methods",
|
|
|
|
"realm_available_video_chat_providers",
|
|
|
|
"realm_avatar_changes_disabled",
|
|
|
|
"realm_bot_creation_policy",
|
|
|
|
"realm_bot_domain",
|
|
|
|
"realm_bots",
|
2023-03-23 15:42:00 +01:00
|
|
|
"realm_can_access_all_users_group",
|
2023-08-09 15:06:56 +02:00
|
|
|
"realm_create_multiuse_invite_group",
|
2021-03-27 05:48:37 +01:00
|
|
|
"realm_create_private_stream_policy",
|
|
|
|
"realm_create_public_stream_policy",
|
2021-10-04 08:33:31 +02:00
|
|
|
"realm_create_web_public_stream_policy",
|
2023-08-11 19:46:58 +02:00
|
|
|
"realm_date_created",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_default_code_block_language",
|
|
|
|
"realm_default_external_accounts",
|
|
|
|
"realm_default_language",
|
|
|
|
"realm_default_stream_groups",
|
|
|
|
"realm_default_streams",
|
2021-06-08 13:45:14 +02:00
|
|
|
"realm_delete_own_message_policy",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_description",
|
|
|
|
"realm_digest_emails_enabled",
|
|
|
|
"realm_digest_weekday",
|
|
|
|
"realm_disallow_disposable_email_addresses",
|
|
|
|
"realm_domains",
|
2021-05-26 12:21:37 +02:00
|
|
|
"realm_edit_topic_policy",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_email_auth_enabled",
|
|
|
|
"realm_email_changes_disabled",
|
|
|
|
"realm_emails_restricted_to_domains",
|
|
|
|
"realm_embedded_bots",
|
|
|
|
"realm_emoji",
|
2023-09-13 13:17:00 +02:00
|
|
|
"realm_enable_guest_user_indicator",
|
2022-08-04 11:43:59 +02:00
|
|
|
"realm_enable_read_receipts",
|
2021-10-03 14:16:07 +02:00
|
|
|
"realm_enable_spectator_access",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_filters",
|
2021-03-31 13:10:46 +02:00
|
|
|
"realm_giphy_rating",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_icon_source",
|
|
|
|
"realm_icon_url",
|
|
|
|
"realm_incoming_webhook_bots",
|
|
|
|
"realm_inline_image_preview",
|
|
|
|
"realm_inline_url_embed_preview",
|
|
|
|
"realm_invite_required",
|
2021-04-02 18:47:08 +02:00
|
|
|
"realm_invite_to_realm_policy",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_invite_to_stream_policy",
|
|
|
|
"realm_is_zephyr_mirror_realm",
|
2023-09-19 19:03:08 +02:00
|
|
|
"realm_jitsi_server_url",
|
2021-03-30 12:51:54 +02:00
|
|
|
"realm_linkifiers",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_logo_source",
|
|
|
|
"realm_logo_url",
|
|
|
|
"realm_mandatory_topics",
|
|
|
|
"realm_message_content_allowed_in_email_notifications",
|
|
|
|
"realm_message_content_delete_limit_seconds",
|
|
|
|
"realm_message_content_edit_limit_seconds",
|
|
|
|
"realm_message_retention_days",
|
2022-10-11 13:19:49 +02:00
|
|
|
"realm_move_messages_between_streams_limit_seconds",
|
2021-04-08 19:24:01 +02:00
|
|
|
"realm_move_messages_between_streams_policy",
|
2023-01-26 12:53:27 +01:00
|
|
|
"realm_move_messages_within_stream_limit_seconds",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_name",
|
|
|
|
"realm_name_changes_disabled",
|
2024-02-07 12:13:02 +01:00
|
|
|
"realm_new_stream_announcements_stream_id",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_night_logo_source",
|
|
|
|
"realm_night_logo_url",
|
|
|
|
"realm_non_active_users",
|
2022-04-11 19:26:16 +02:00
|
|
|
"realm_org_type",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_password_auth_enabled",
|
|
|
|
"realm_plan_type",
|
2020-10-28 04:00:46 +01:00
|
|
|
"realm_playgrounds",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_presence_disabled",
|
|
|
|
"realm_private_message_policy",
|
|
|
|
"realm_push_notifications_enabled",
|
2023-11-23 22:07:41 +01:00
|
|
|
"realm_push_notifications_enabled_end_timestamp",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_send_welcome_emails",
|
2024-02-07 17:11:43 +01:00
|
|
|
"realm_signup_announcements_stream_id",
|
2021-05-22 09:39:09 +02:00
|
|
|
"realm_upload_quota_mib",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_uri",
|
|
|
|
"realm_user_group_edit_policy",
|
|
|
|
"realm_user_groups",
|
2021-07-21 13:40:46 +02:00
|
|
|
"realm_user_settings_defaults",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_users",
|
|
|
|
"realm_video_chat_provider",
|
|
|
|
"realm_waiting_period_threshold",
|
2022-04-22 18:45:30 +02:00
|
|
|
"realm_want_advertise_in_communities_directory",
|
2020-09-27 06:48:20 +02:00
|
|
|
"realm_wildcard_mention_policy",
|
2024-01-26 14:45:37 +01:00
|
|
|
"realm_zulip_update_announcements_stream_id",
|
2020-09-27 06:48:20 +02:00
|
|
|
"recent_private_conversations",
|
2023-04-20 04:40:41 +02:00
|
|
|
"scheduled_messages",
|
2020-09-27 06:48:20 +02:00
|
|
|
"server_avatar_changes_disabled",
|
2022-06-21 23:56:52 +02:00
|
|
|
"server_emoji_data_url",
|
2020-09-27 06:48:20 +02:00
|
|
|
"server_generation",
|
|
|
|
"server_inline_image_preview",
|
|
|
|
"server_inline_url_embed_preview",
|
2023-09-19 19:03:08 +02:00
|
|
|
"server_jitsi_server_url",
|
2020-09-27 06:48:20 +02:00
|
|
|
"server_name_changes_disabled",
|
2021-04-24 13:25:20 +02:00
|
|
|
"server_needs_upgrade",
|
2023-02-20 22:39:40 +01:00
|
|
|
"server_presence_offline_threshold_seconds",
|
|
|
|
"server_presence_ping_interval_seconds",
|
2023-10-19 16:50:26 +02:00
|
|
|
"server_supported_permission_settings",
|
2021-05-20 20:01:51 +02:00
|
|
|
"server_timestamp",
|
2023-08-17 14:42:41 +02:00
|
|
|
"server_typing_started_expiry_period_milliseconds",
|
|
|
|
"server_typing_started_wait_period_milliseconds",
|
|
|
|
"server_typing_stopped_wait_period_milliseconds",
|
2021-11-19 11:06:59 +01:00
|
|
|
"server_web_public_streams_enabled",
|
2020-09-27 06:48:20 +02:00
|
|
|
"settings_send_digest_emails",
|
|
|
|
"starred_messages",
|
|
|
|
"stop_words",
|
|
|
|
"subscriptions",
|
|
|
|
"unread_msgs",
|
|
|
|
"unsubscribed",
|
|
|
|
"upgrade_text_for_wide_organization_logo",
|
|
|
|
"user_id",
|
2021-07-26 19:55:14 +02:00
|
|
|
"user_settings",
|
2020-09-27 06:48:20 +02:00
|
|
|
"user_status",
|
2022-02-25 21:48:56 +01:00
|
|
|
"user_topics",
|
2020-09-27 06:48:20 +02:00
|
|
|
"zulip_feature_level",
|
2021-06-14 13:19:27 +02:00
|
|
|
"zulip_merge_base",
|
2020-09-27 06:48:20 +02:00
|
|
|
"zulip_plan_is_not_limited",
|
|
|
|
"zulip_version",
|
|
|
|
]
|
|
|
|
|
|
|
|
def test_home(self) -> None:
|
2017-03-08 11:57:55 +01:00
|
|
|
# Keep this list sorted!!!
|
|
|
|
html_bits = [
|
2022-01-19 11:03:00 +01:00
|
|
|
"message_feed_errors_container",
|
2023-02-02 20:03:01 +01:00
|
|
|
"app-loading-logo",
|
2018-05-02 15:28:58 +02:00
|
|
|
# Verify that the app styles get included
|
2021-02-12 08:20:45 +01:00
|
|
|
"app-stubentry.js",
|
|
|
|
"data-params",
|
2017-03-08 11:57:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2017-04-21 08:24:30 +02:00
|
|
|
# Create bot for realm_bots testing. Must be done before fetching home_page.
|
2017-03-08 11:57:55 +01:00
|
|
|
bot_info = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "The Bot of Hamlet",
|
|
|
|
"short_name": "hambot",
|
2017-03-08 11:57:55 +01:00
|
|
|
}
|
|
|
|
self.client_post("/json/bots", bot_info)
|
|
|
|
|
|
|
|
# Verify succeeds once logged-in
|
2024-01-26 14:45:37 +01:00
|
|
|
with self.assert_database_query_count(51):
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.cache.cache_set") as cache_mock:
|
|
|
|
result = self._get_home_page(stream="Denmark")
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(result["Cache-Control"].split(", ")), {"must-revalidate", "no-store", "no-cache"}
|
|
|
|
)
|
2017-09-19 17:44:16 +02:00
|
|
|
|
2023-09-27 02:10:49 +02:00
|
|
|
self.assert_length(cache_mock.call_args_list, 6)
|
2017-09-19 17:44:16 +02:00
|
|
|
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
for html_bit in html_bits:
|
|
|
|
if html_bit not in html:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"{html_bit} not in result")
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertCountEqual(page_params, self.expected_page_params_keys)
|
|
|
|
self.assertCountEqual(page_params["state_data"], self.expected_state_data_keys)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
|
|
|
# TODO: Inspect the page_params data further.
|
2020-08-07 01:09:47 +02:00
|
|
|
# print(orjson.dumps(page_params, option=orjson.OPT_INDENT_2).decode())
|
2017-04-21 08:24:30 +02:00
|
|
|
realm_bots_expected_keys = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"api_key",
|
|
|
|
"avatar_url",
|
|
|
|
"bot_type",
|
|
|
|
"default_all_public_streams",
|
|
|
|
"default_events_register_stream",
|
|
|
|
"default_sending_stream",
|
|
|
|
"email",
|
|
|
|
"full_name",
|
|
|
|
"is_active",
|
|
|
|
"owner_id",
|
|
|
|
"services",
|
|
|
|
"user_id",
|
2017-03-08 11:57:55 +01:00
|
|
|
]
|
|
|
|
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertCountEqual(page_params["state_data"]["realm_bots"][0], realm_bots_expected_keys)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2021-09-04 01:42:30 +02:00
|
|
|
def test_home_demo_organization(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
# We construct a scheduled deletion date that's definitely in
|
|
|
|
# the future, regardless of how long ago the Zulip realm was
|
|
|
|
# created.
|
2023-11-19 19:45:19 +01:00
|
|
|
realm.demo_organization_scheduled_deletion_date = timezone_now() + timedelta(days=1)
|
2021-09-04 01:42:30 +02:00
|
|
|
realm.save()
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
# Verify succeeds once logged-in
|
|
|
|
with queries_captured():
|
|
|
|
with patch("zerver.lib.cache.cache_set"):
|
|
|
|
result = self._get_home_page(stream="Denmark")
|
|
|
|
self.check_rendered_logged_in_app(result)
|
|
|
|
|
|
|
|
page_params = self._get_page_params(result)
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertCountEqual(page_params, self.expected_page_params_keys)
|
|
|
|
expected_state_data_keys = [
|
|
|
|
*self.expected_state_data_keys,
|
2023-01-26 00:12:09 +01:00
|
|
|
"demo_organization_scheduled_deletion_date",
|
2021-09-04 01:42:30 +02:00
|
|
|
]
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertCountEqual(page_params["state_data"], expected_state_data_keys)
|
2021-09-04 01:42:30 +02:00
|
|
|
|
2020-09-27 06:49:16 +02:00
|
|
|
def test_logged_out_home(self) -> None:
|
2021-10-03 14:16:07 +02:00
|
|
|
realm = get_realm("zulip")
|
2022-04-19 11:47:26 +02:00
|
|
|
do_set_realm_property(realm, "enable_spectator_access", False, acting_user=None)
|
2021-10-06 14:11:48 +02:00
|
|
|
|
2022-04-19 11:47:26 +02:00
|
|
|
# Redirect to login if spectator access is disabled.
|
|
|
|
result = self.client_get("/")
|
2021-10-03 14:16:07 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "/login/")
|
2021-10-03 14:16:07 +02:00
|
|
|
|
2022-09-08 22:51:43 +02:00
|
|
|
# Load web app directly if spectator access is enabled.
|
2022-04-19 11:47:26 +02:00
|
|
|
do_set_realm_property(realm, "enable_spectator_access", True, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/")
|
2020-09-27 06:49:16 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
|
2022-04-19 11:47:26 +02:00
|
|
|
# Check no unnecessary params are passed to spectators.
|
2020-09-27 06:49:16 +02:00
|
|
|
page_params = self._get_page_params(result)
|
2022-04-19 11:47:26 +02:00
|
|
|
self.assertEqual(page_params["is_spectator"], True)
|
2022-05-03 23:22:41 +02:00
|
|
|
expected_keys = [
|
|
|
|
"apps_page_url",
|
|
|
|
"bot_types",
|
|
|
|
"corporate_enabled",
|
|
|
|
"development_environment",
|
|
|
|
"furthest_read_time",
|
|
|
|
"insecure_desktop_app",
|
|
|
|
"is_spectator",
|
2022-05-02 11:13:03 +02:00
|
|
|
"language_cookie_name",
|
2022-05-03 23:22:41 +02:00
|
|
|
"language_list",
|
|
|
|
"login_page",
|
|
|
|
"needs_tutorial",
|
|
|
|
"no_event_queue",
|
2024-02-16 22:56:36 +01:00
|
|
|
"page_type",
|
2022-05-03 23:22:41 +02:00
|
|
|
"promote_sponsoring_zulip",
|
|
|
|
"realm_rendered_description",
|
|
|
|
"request_language",
|
sentry: Add frontend event monitoring.
Zulip already has integrations for server-side Sentry integration;
however, it has historically used the Zulip-specific `blueslip`
library for monitoring browser-side errors. However, the latter sends
errors to email, as well optionally to an internal `#errors` stream.
While this is sufficient for low volumes of users, and useful in that
it does not rely on outside services, at higher volumes it is very
difficult to do any analysis or filtering of the errors. Client-side
errors are exceptionally noisy, with many false positives due to
browser extensions or similar, so determining real real errors from a
stream of un-grouped emails or messages in a stream is quite
difficult.
Add a client-side Javascript sentry integration. To provide useful
backtraces, this requires extending the pre-deploy hooks to upload the
source-maps to Sentry. Additional keys are added to the non-public
API of `page_params` to control the DSN, realm identifier, and sample
rates.
2023-02-13 20:50:57 +01:00
|
|
|
"server_sentry_dsn",
|
2022-05-03 23:22:41 +02:00
|
|
|
"show_billing",
|
|
|
|
"show_plans",
|
2023-11-15 22:44:24 +01:00
|
|
|
"show_remote_billing",
|
2022-05-03 23:22:41 +02:00
|
|
|
"show_webathena",
|
2023-11-04 14:24:04 +01:00
|
|
|
"sponsorship_pending",
|
2024-02-13 02:56:26 +01:00
|
|
|
"state_data",
|
2022-05-03 23:22:41 +02:00
|
|
|
"test_suite",
|
|
|
|
"translation_data",
|
|
|
|
"two_fa_enabled",
|
|
|
|
"two_fa_enabled_user",
|
|
|
|
"warn_no_email",
|
2020-09-27 06:49:16 +02:00
|
|
|
]
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertCountEqual(page_params, expected_keys)
|
2024-02-19 07:45:19 +01:00
|
|
|
self.assertIsNone(page_params["state_data"])
|
2022-02-16 13:40:43 +01:00
|
|
|
|
2024-02-05 23:52:25 +01:00
|
|
|
def test_realm_authentication_methods(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.login("desdemona")
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
|
|
|
"zproject.backends.AzureADAuthBackend",
|
|
|
|
)
|
|
|
|
):
|
|
|
|
result = self._get_home_page()
|
|
|
|
state_data = self._get_page_params(result)["state_data"]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
state_data["realm_authentication_methods"],
|
|
|
|
{
|
|
|
|
"Email": {"enabled": True, "available": True},
|
|
|
|
"AzureAD": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": False,
|
|
|
|
"unavailable_reason": "You need to upgrade to the Zulip Cloud Standard plan to use this authentication method.",
|
|
|
|
},
|
|
|
|
"SAML": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": False,
|
|
|
|
"unavailable_reason": "You need to upgrade to the Zulip Cloud Plus plan to use this authentication method.",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now try with BILLING_ENABLED=False. This simulates a self-hosted deployment
|
|
|
|
# instead of Zulip Cloud. In this case, all authentication methods should be available.
|
|
|
|
with self.settings(BILLING_ENABLED=False):
|
|
|
|
result = self._get_home_page()
|
|
|
|
state_data = self._get_page_params(result)["state_data"]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
state_data["realm_authentication_methods"],
|
|
|
|
{
|
|
|
|
"Email": {"enabled": True, "available": True},
|
|
|
|
"AzureAD": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": True,
|
|
|
|
},
|
|
|
|
"SAML": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": True,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
|
|
|
)
|
|
|
|
):
|
|
|
|
result = self._get_home_page()
|
|
|
|
state_data = self._get_page_params(result)["state_data"]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
state_data["realm_authentication_methods"],
|
|
|
|
{
|
|
|
|
"Email": {"enabled": True, "available": True},
|
|
|
|
"SAML": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": False,
|
|
|
|
"unavailable_reason": "You need to upgrade to the Zulip Cloud Plus plan to use this authentication method.",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Changing the plan_type to Standard grants access to AzureAD, but not SAML:
|
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD, acting_user=None)
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
|
|
|
"zproject.backends.AzureADAuthBackend",
|
|
|
|
)
|
|
|
|
):
|
|
|
|
result = self._get_home_page()
|
|
|
|
state_data = self._get_page_params(result)["state_data"]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
state_data["realm_authentication_methods"],
|
|
|
|
{
|
|
|
|
"Email": {"enabled": True, "available": True},
|
|
|
|
"AzureAD": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": True,
|
|
|
|
},
|
|
|
|
"SAML": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": False,
|
|
|
|
"unavailable_reason": "You need to upgrade to the Zulip Cloud Plus plan to use this authentication method.",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now upgrade to Plus and verify that both SAML and AzureAD are available.
|
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_PLUS, acting_user=None)
|
|
|
|
result = self._get_home_page()
|
|
|
|
state_data = self._get_page_params(result)["state_data"]
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
state_data["realm_authentication_methods"],
|
|
|
|
{
|
|
|
|
"Email": {"enabled": True, "available": True},
|
|
|
|
"AzureAD": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": True,
|
|
|
|
},
|
|
|
|
"SAML": {
|
|
|
|
"enabled": True,
|
|
|
|
"available": True,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
sentry: Add frontend event monitoring.
Zulip already has integrations for server-side Sentry integration;
however, it has historically used the Zulip-specific `blueslip`
library for monitoring browser-side errors. However, the latter sends
errors to email, as well optionally to an internal `#errors` stream.
While this is sufficient for low volumes of users, and useful in that
it does not rely on outside services, at higher volumes it is very
difficult to do any analysis or filtering of the errors. Client-side
errors are exceptionally noisy, with many false positives due to
browser extensions or similar, so determining real real errors from a
stream of un-grouped emails or messages in a stream is quite
difficult.
Add a client-side Javascript sentry integration. To provide useful
backtraces, this requires extending the pre-deploy hooks to upload the
source-maps to Sentry. Additional keys are added to the non-public
API of `page_params` to control the DSN, realm identifier, and sample
rates.
2023-02-13 20:50:57 +01:00
|
|
|
def test_sentry_keys(self) -> None:
|
|
|
|
def home_params() -> Dict[str, Any]:
|
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
return self._get_page_params(result)
|
|
|
|
|
|
|
|
self.login("hamlet")
|
|
|
|
page_params = home_params()
|
|
|
|
self.assertEqual(page_params["server_sentry_dsn"], None)
|
|
|
|
self.assertEqual(
|
|
|
|
[], [key for key in page_params if key != "server_sentry_dsn" and "sentry" in key]
|
|
|
|
)
|
|
|
|
|
|
|
|
with self.settings(SENTRY_FRONTEND_DSN="https://aaa@bbb.ingest.sentry.io/1234"):
|
|
|
|
page_params = home_params()
|
|
|
|
self.assertEqual(
|
|
|
|
page_params["server_sentry_dsn"], "https://aaa@bbb.ingest.sentry.io/1234"
|
|
|
|
)
|
|
|
|
self.assertEqual(page_params["realm_sentry_key"], "zulip")
|
|
|
|
self.assertEqual(page_params["server_sentry_environment"], "development")
|
|
|
|
self.assertEqual(page_params["server_sentry_sample_rate"], 1.0)
|
|
|
|
self.assertEqual(page_params["server_sentry_trace_rate"], 0.1)
|
|
|
|
|
|
|
|
# Make sure these still exist for logged-out users as well
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
do_set_realm_property(realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
self.logout()
|
|
|
|
page_params = home_params()
|
|
|
|
self.assertEqual(page_params["server_sentry_dsn"], None)
|
|
|
|
self.assertEqual(
|
|
|
|
[], [key for key in page_params if key != "server_sentry_dsn" and "sentry" in key]
|
|
|
|
)
|
|
|
|
|
|
|
|
with self.settings(SENTRY_FRONTEND_DSN="https://aaa@bbb.ingest.sentry.io/1234"):
|
|
|
|
page_params = home_params()
|
|
|
|
self.assertEqual(
|
|
|
|
page_params["server_sentry_dsn"], "https://aaa@bbb.ingest.sentry.io/1234"
|
|
|
|
)
|
|
|
|
self.assertEqual(page_params["realm_sentry_key"], "zulip")
|
|
|
|
self.assertEqual(page_params["server_sentry_environment"], "development")
|
|
|
|
self.assertEqual(page_params["server_sentry_sample_rate"], 1.0)
|
|
|
|
self.assertEqual(page_params["server_sentry_trace_rate"], 0.1)
|
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
def test_home_under_2fa_without_otp_device(self) -> None:
|
|
|
|
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-07-13 13:42:57 +02:00
|
|
|
result = self._get_home_page()
|
|
|
|
# Should be successful because otp device is not configured.
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
2017-07-13 13:42:57 +02:00
|
|
|
|
|
|
|
def test_home_under_2fa_with_otp_device(self) -> None:
|
|
|
|
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("iago")
|
2017-07-13 13:42:57 +02:00
|
|
|
self.create_default_device(user_profile)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_profile)
|
2017-07-13 13:42:57 +02:00
|
|
|
result = self._get_home_page()
|
|
|
|
# User should not log in because otp device is configured but
|
|
|
|
# 2fa login function was not called.
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
self.login_2fa(user_profile)
|
|
|
|
result = self._get_home_page()
|
|
|
|
# Should be successful after calling 2fa login function.
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2018-03-27 20:53:34 +02:00
|
|
|
def test_num_queries_for_realm_admin(self) -> None:
|
|
|
|
# Verify number of queries for Realm admin isn't much higher than for normal users.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2024-01-26 14:45:37 +01:00
|
|
|
with self.assert_database_query_count(51):
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.cache.cache_set") as cache_mock:
|
2018-03-27 20:53:34 +02:00
|
|
|
result = self._get_home_page()
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
self.assert_length(cache_mock.call_args_list, 7)
|
2018-03-27 20:53:34 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_num_queries_with_streams(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
main_user = self.example_user("hamlet")
|
|
|
|
other_user = self.example_user("cordelia")
|
2017-09-19 23:34:07 +02:00
|
|
|
|
|
|
|
realm_id = main_user.realm_id
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(main_user)
|
2017-09-19 23:34:07 +02:00
|
|
|
|
|
|
|
# Try to make page-load do extra work for various subscribed
|
|
|
|
# streams.
|
|
|
|
for i in range(10):
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name = "test_stream_" + str(i)
|
2017-09-19 23:34:07 +02:00
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
DefaultStream.objects.create(
|
|
|
|
realm_id=realm_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
stream_id=stream.id,
|
2017-09-19 23:34:07 +02:00
|
|
|
)
|
|
|
|
for user in [main_user, other_user]:
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
|
|
|
|
# Simulate hitting the page the first time to avoid some noise
|
|
|
|
# related to initial logins.
|
|
|
|
self._get_home_page()
|
|
|
|
|
|
|
|
# Then for the second page load, measure the number of queries.
|
2024-01-26 14:45:37 +01:00
|
|
|
with self.assert_database_query_count(46):
|
2017-09-19 23:34:07 +02:00
|
|
|
result = self._get_home_page()
|
|
|
|
|
|
|
|
# Do a sanity check that our new streams were in the payload.
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("test_stream_7", html)
|
2017-09-19 23:34:07 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def _get_home_page(self, **kwargs: Any) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.events.request_event_queue", return_value=42), patch(
|
|
|
|
"zerver.lib.events.get_user_events", return_value=[]
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/", dict(**kwargs))
|
2017-03-08 11:57:55 +01:00
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def _sanity_check(self, result: "TestHttpResponse") -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-03-08 11:57:55 +01:00
|
|
|
Use this for tests that are geared toward specific edge cases, but
|
|
|
|
which still want the home page to load properly.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2022-01-19 11:03:00 +01:00
|
|
|
if "message_feed_errors_container" not in html:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Home page probably did not load.")
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_terms_of_service(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2023-05-08 09:17:57 +02:00
|
|
|
for user_tos_version in [None, "-1", "1.1", "2.0.3.4"]:
|
2017-03-08 11:57:55 +01:00
|
|
|
user.tos_version = user_tos_version
|
|
|
|
user.save()
|
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
with self.settings(TERMS_OF_SERVICE_VERSION="99.99"):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/", dict(stream="Denmark"))
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2021-12-19 15:24:47 +01:00
|
|
|
self.assertIn("Accept the Terms of Service", html)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2020-03-25 02:00:28 +01:00
|
|
|
def test_banned_desktop_app_versions(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-25 02:00:28 +01:00
|
|
|
self.login_user(user)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/", HTTP_USER_AGENT="ZulipElectron/2.3.82")
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("You are using old version of the Zulip desktop", html)
|
2020-03-25 02:00:28 +01:00
|
|
|
|
2020-04-20 14:00:03 +02:00
|
|
|
def test_unsupported_browser(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-04-20 14:00:03 +02:00
|
|
|
self.login_user(user)
|
|
|
|
|
|
|
|
# currently we don't support IE, so some of IE's user agents are added.
|
|
|
|
unsupported_user_agents = [
|
|
|
|
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2)",
|
|
|
|
"Mozilla/5.0 (Windows NT 10.0; Trident/7.0; rv:11.0) like Gecko",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)",
|
2020-04-20 14:00:03 +02:00
|
|
|
]
|
|
|
|
for user_agent in unsupported_user_agents:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/", HTTP_USER_AGENT=user_agent)
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("Internet Explorer is not supported by Zulip.", html)
|
2020-04-20 14:00:03 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_terms_of_service_first_time_template(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2023-05-08 09:17:57 +02:00
|
|
|
user.tos_version = UserProfile.TOS_VERSION_BEFORE_FIRST_LOGIN
|
2017-03-25 21:23:54 +01:00
|
|
|
user.save()
|
|
|
|
|
2022-08-16 13:27:45 +02:00
|
|
|
with self.settings(
|
|
|
|
FIRST_TIME_TERMS_OF_SERVICE_TEMPLATE="corporate/hello.html"
|
|
|
|
), self.settings(TERMS_OF_SERVICE_VERSION="99.99"):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/accept_terms/")
|
2017-03-25 21:23:54 +01:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("I agree to the", result)
|
2023-09-28 16:42:56 +02:00
|
|
|
self.assert_in_response("your mission-critical communications with Zulip", result)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_accept_terms_of_service(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/accept_terms/")
|
2017-03-25 21:23:54 +01:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("I agree to the", result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/accept_terms/", {"terms": True})
|
2017-03-25 21:23:54 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["Location"], "/")
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2023-04-24 16:51:02 +02:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
user.tos_version = "-1"
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
result = self.client_post("/accounts/accept_terms/")
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("I agree to the", result)
|
|
|
|
self.assert_in_response(
|
|
|
|
"Administrators of this Zulip organization will be able to see this email address.",
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_post(
|
|
|
|
"/accounts/accept_terms/",
|
|
|
|
{
|
|
|
|
"terms": True,
|
|
|
|
"email_address_visibility": UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/")
|
|
|
|
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.assertEqual(
|
|
|
|
user.email_address_visibility, UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_set_email_address_visibility_without_terms_of_service(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
user.tos_version = "-1"
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
with self.settings(TERMS_OF_SERVICE_VERSION=None):
|
|
|
|
result = self.client_get("/", dict(stream="Denmark"))
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response(
|
|
|
|
"Administrators of this Zulip organization will be able to see this email address.",
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_post(
|
|
|
|
"/accounts/accept_terms/",
|
|
|
|
{
|
|
|
|
"email_address_visibility": UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/")
|
|
|
|
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.assertEqual(
|
|
|
|
user.email_address_visibility, UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_bad_narrow(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with self.assertLogs(level="WARNING") as m:
|
|
|
|
result = self._get_home_page(stream="Invalid Stream")
|
|
|
|
self.assertEqual(m.output, ["WARNING:root:Invalid narrow requested, ignoring"])
|
2017-03-08 11:57:55 +01:00
|
|
|
self._sanity_check(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_topic_narrow(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
result = self._get_home_page(stream="Denmark", topic="lunch")
|
2017-03-08 11:57:55 +01:00
|
|
|
self._sanity_check(result)
|
2021-08-02 23:20:39 +02:00
|
|
|
html = result.content.decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("lunch", html)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(result["Cache-Control"].split(", ")), {"must-revalidate", "no-store", "no-cache"}
|
|
|
|
)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2024-02-07 12:13:02 +01:00
|
|
|
def test_new_stream_announcements_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2024-02-07 12:13:02 +01:00
|
|
|
realm.new_stream_announcements_stream_id = get_stream("Denmark", realm).id
|
2017-03-08 11:57:55 +01:00
|
|
|
realm.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2024-02-07 12:13:02 +01:00
|
|
|
page_params["state_data"]["realm_new_stream_announcements_stream_id"],
|
2024-02-13 02:56:26 +01:00
|
|
|
get_stream("Denmark", realm).id,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_bot(self, owner: UserProfile, bot_email: str, bot_name: str) -> UserProfile:
|
2017-10-21 19:42:11 +02:00
|
|
|
user = do_create_user(
|
|
|
|
email=bot_email,
|
2021-02-12 08:20:45 +01:00
|
|
|
password="123",
|
2017-10-21 19:42:11 +02:00
|
|
|
realm=owner.realm,
|
|
|
|
full_name=bot_name,
|
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
bot_owner=owner,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2017-10-21 19:42:11 +02:00
|
|
|
)
|
|
|
|
return user
|
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_non_active_user(self, realm: Realm, email: str, name: str) -> UserProfile:
|
2017-10-21 19:42:11 +02:00
|
|
|
user = do_create_user(
|
2021-02-06 14:27:06 +01:00
|
|
|
email=email, password="123", realm=realm, full_name=name, acting_user=None
|
2017-10-21 19:42:11 +02:00
|
|
|
)
|
2017-10-28 19:22:02 +02:00
|
|
|
|
|
|
|
# Doing a full-stack deactivation would be expensive here,
|
|
|
|
# and we really only need to flip the flag to get a valid
|
|
|
|
# test.
|
2021-02-14 00:03:40 +01:00
|
|
|
change_user_is_active(user, False)
|
2017-10-21 19:42:11 +02:00
|
|
|
return user
|
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
def test_signup_announcements_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2024-02-07 17:11:43 +01:00
|
|
|
realm.signup_announcements_stream = get_stream("Denmark", realm)
|
2017-10-20 16:55:04 +02:00
|
|
|
realm.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-10-20 16:55:04 +02:00
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2024-02-07 17:11:43 +01:00
|
|
|
page_params["state_data"]["realm_signup_announcements_stream_id"],
|
2024-02-13 02:56:26 +01:00
|
|
|
get_stream("Denmark", realm).id,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-20 16:55:04 +02:00
|
|
|
|
2024-01-26 14:45:37 +01:00
|
|
|
def test_zulip_update_announcements_stream(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.zulip_update_announcements_stream = get_stream("Denmark", realm)
|
|
|
|
realm.save()
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
self.assertEqual(
|
|
|
|
page_params["state_data"]["realm_zulip_update_announcements_stream_id"],
|
|
|
|
get_stream("Denmark", realm).id,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_people(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
realm = get_realm("zulip")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
bots = {}
|
2017-10-21 19:42:11 +02:00
|
|
|
for i in range(3):
|
2020-03-12 14:17:25 +01:00
|
|
|
bots[i] = self.create_bot(
|
2017-10-21 19:42:11 +02:00
|
|
|
owner=hamlet,
|
2021-02-12 08:20:45 +01:00
|
|
|
bot_email=f"bot-{i}@zulip.com",
|
|
|
|
bot_name=f"Bot {i}",
|
2017-10-21 19:42:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
for i in range(3):
|
2020-03-12 14:17:25 +01:00
|
|
|
defunct_user = self.create_non_active_user(
|
2017-10-21 19:42:11 +02:00
|
|
|
realm=realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
email=f"defunct-{i}@zulip.com",
|
|
|
|
name=f"Defunct User {i}",
|
2017-10-21 19:42:11 +02:00
|
|
|
)
|
|
|
|
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self._get_home_page()
|
|
|
|
page_params = self._get_page_params(result)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-21 19:42:11 +02:00
|
|
|
We send three lists of users. The first two below are disjoint
|
|
|
|
lists of users, and the records we send for them have identical
|
|
|
|
structure.
|
|
|
|
|
|
|
|
The realm_bots bucket is somewhat redundant, since all bots will
|
|
|
|
be in one of the first two buckets. They do include fields, however,
|
|
|
|
that normal users don't care about, such as default_sending_stream.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-21 19:42:11 +02:00
|
|
|
|
|
|
|
buckets = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_users",
|
|
|
|
"realm_non_active_users",
|
|
|
|
"realm_bots",
|
2017-10-21 19:42:11 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for field in buckets:
|
2024-02-13 02:56:26 +01:00
|
|
|
users = page_params["state_data"][field]
|
2021-07-13 19:43:29 +02:00
|
|
|
self.assertGreaterEqual(len(users), 3, field)
|
2017-10-21 19:42:11 +02:00
|
|
|
for rec in users:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(rec["user_id"], get_user(rec["email"], realm).id)
|
|
|
|
if field == "realm_bots":
|
|
|
|
self.assertNotIn("is_bot", rec)
|
|
|
|
self.assertIn("is_active", rec)
|
|
|
|
self.assertIn("owner_id", rec)
|
2017-10-21 19:42:11 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("is_bot", rec)
|
|
|
|
self.assertNotIn("is_active", rec)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
2024-02-13 02:56:26 +01:00
|
|
|
active_ids = {p["user_id"] for p in page_params["state_data"]["realm_users"]}
|
|
|
|
non_active_ids = {p["user_id"] for p in page_params["state_data"]["realm_non_active_users"]}
|
|
|
|
bot_ids = {p["user_id"] for p in page_params["state_data"]["realm_bots"]}
|
2017-10-21 19:42:11 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertIn(hamlet.id, active_ids)
|
|
|
|
self.assertIn(defunct_user.id, non_active_ids)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
|
|
|
# Bots can show up in multiple buckets.
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertIn(bots[2].id, bot_ids)
|
|
|
|
self.assertIn(bots[2].id, active_ids)
|
2017-10-21 19:42:11 +02:00
|
|
|
|
2023-10-09 20:54:10 +02:00
|
|
|
# Make sure nobody got misbucketed.
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertNotIn(hamlet.id, non_active_ids)
|
|
|
|
self.assertNotIn(defunct_user.id, active_ids)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2024-02-13 02:56:26 +01:00
|
|
|
cross_bots = page_params["state_data"]["cross_realm_bots"]
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(cross_bots, 3)
|
2021-02-12 08:20:45 +01:00
|
|
|
cross_bots.sort(key=lambda d: d["email"])
|
2018-08-02 00:06:38 +02:00
|
|
|
for cross_bot in cross_bots:
|
|
|
|
# These are either nondeterministic or boring
|
2021-02-12 08:20:45 +01:00
|
|
|
del cross_bot["timezone"]
|
|
|
|
del cross_bot["avatar_url"]
|
|
|
|
del cross_bot["date_joined"]
|
2017-05-08 17:42:50 +02:00
|
|
|
|
2021-03-08 11:39:48 +01:00
|
|
|
admin_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
cross_realm_notification_bot = self.notification_bot(admin_realm)
|
|
|
|
cross_realm_email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT, admin_realm.id)
|
|
|
|
cross_realm_welcome_bot = get_system_bot(settings.WELCOME_BOT, admin_realm.id)
|
2017-05-08 17:42:50 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
by_email = lambda d: d["email"]
|
2017-11-10 23:36:13 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(cross_bots, key=by_email),
|
|
|
|
sorted(
|
|
|
|
[
|
|
|
|
dict(
|
2021-03-08 11:39:48 +01:00
|
|
|
avatar_version=cross_realm_email_gateway_bot.avatar_version,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner_id=None,
|
|
|
|
bot_type=1,
|
2021-12-11 08:17:57 +01:00
|
|
|
delivery_email=cross_realm_email_gateway_bot.delivery_email,
|
2021-03-08 11:39:48 +01:00
|
|
|
email=cross_realm_email_gateway_bot.email,
|
|
|
|
user_id=cross_realm_email_gateway_bot.id,
|
|
|
|
full_name=cross_realm_email_gateway_bot.full_name,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_active=True,
|
|
|
|
is_bot=True,
|
|
|
|
is_admin=False,
|
|
|
|
is_owner=False,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=False,
|
2021-03-08 11:39:48 +01:00
|
|
|
role=cross_realm_email_gateway_bot.role,
|
2021-03-07 15:51:55 +01:00
|
|
|
is_system_bot=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_guest=False,
|
|
|
|
),
|
|
|
|
dict(
|
2021-03-08 11:39:48 +01:00
|
|
|
avatar_version=cross_realm_notification_bot.avatar_version,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner_id=None,
|
|
|
|
bot_type=1,
|
2021-12-11 08:17:57 +01:00
|
|
|
delivery_email=cross_realm_notification_bot.delivery_email,
|
2021-03-08 11:39:48 +01:00
|
|
|
email=cross_realm_notification_bot.email,
|
|
|
|
user_id=cross_realm_notification_bot.id,
|
|
|
|
full_name=cross_realm_notification_bot.full_name,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_active=True,
|
|
|
|
is_bot=True,
|
|
|
|
is_admin=False,
|
|
|
|
is_owner=False,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=False,
|
2021-03-08 11:39:48 +01:00
|
|
|
role=cross_realm_notification_bot.role,
|
2021-03-07 15:51:55 +01:00
|
|
|
is_system_bot=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_guest=False,
|
|
|
|
),
|
|
|
|
dict(
|
2021-03-08 11:39:48 +01:00
|
|
|
avatar_version=cross_realm_welcome_bot.avatar_version,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner_id=None,
|
|
|
|
bot_type=1,
|
2021-12-11 08:17:57 +01:00
|
|
|
delivery_email=cross_realm_welcome_bot.delivery_email,
|
2021-03-08 11:39:48 +01:00
|
|
|
email=cross_realm_welcome_bot.email,
|
|
|
|
user_id=cross_realm_welcome_bot.id,
|
|
|
|
full_name=cross_realm_welcome_bot.full_name,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_active=True,
|
|
|
|
is_bot=True,
|
|
|
|
is_admin=False,
|
|
|
|
is_owner=False,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=False,
|
2021-03-08 11:39:48 +01:00
|
|
|
role=cross_realm_welcome_bot.role,
|
2021-03-07 15:51:55 +01:00
|
|
|
is_system_bot=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_guest=False,
|
|
|
|
),
|
|
|
|
],
|
|
|
|
key=by_email,
|
2017-11-10 23:36:13 +01:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_new_stream(self) -> None:
|
2017-08-25 06:01:29 +02:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name = "New stream"
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(user_profile, stream_name)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_profile)
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self._get_home_page(stream=stream_name)
|
|
|
|
page_params = self._get_page_params(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(page_params["narrow_stream"], stream_name)
|
|
|
|
self.assertEqual(page_params["narrow"], [dict(operator="stream", operand=stream_name)])
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertEqual(page_params["state_data"]["max_message_id"], -1)
|
2017-03-08 11:57:55 +01:00
|
|
|
|
2023-11-15 22:44:24 +01:00
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
|
2021-06-14 12:38:43 +02:00
|
|
|
def test_get_billing_info(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("desdemona")
|
2020-07-14 14:40:39 +02:00
|
|
|
user.role = UserProfile.ROLE_REALM_OWNER
|
|
|
|
user.save(update_fields=["role"])
|
2021-06-14 12:38:43 +02:00
|
|
|
# realm owner, but no CustomerPlan and realm plan_type SELF_HOSTED -> neither billing link or plans
|
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2018-08-15 18:49:25 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
# realm owner, with inactive CustomerPlan and realm plan_type SELF_HOSTED -> show only billing link
|
|
|
|
customer = Customer.objects.create(realm=get_realm("zulip"), stripe_customer_id="cus_id")
|
2021-02-12 08:19:30 +01:00
|
|
|
CustomerPlan.objects.create(
|
|
|
|
customer=customer,
|
|
|
|
billing_cycle_anchor=timezone_now(),
|
2023-11-30 07:55:53 +01:00
|
|
|
billing_schedule=CustomerPlan.BILLING_SCHEDULE_ANNUAL,
|
2021-02-12 08:19:30 +01:00
|
|
|
next_invoice_date=timezone_now(),
|
2023-11-30 07:43:06 +01:00
|
|
|
tier=CustomerPlan.TIER_CLOUD_STANDARD,
|
2021-02-12 08:19:30 +01:00
|
|
|
status=CustomerPlan.ENDED,
|
|
|
|
)
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertTrue(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2021-06-14 12:38:43 +02:00
|
|
|
|
|
|
|
# realm owner, with inactive CustomerPlan and realm plan_type LIMITED -> show billing link and plans
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertTrue(billing_info.show_billing)
|
|
|
|
self.assertTrue(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2021-06-14 12:38:43 +02:00
|
|
|
|
|
|
|
# Always false without CORPORATE_ENABLED
|
|
|
|
with self.settings(CORPORATE_ENABLED=False):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
# show_remote_billing is independent of CORPORATE_ENABLED
|
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2021-06-14 12:38:43 +02:00
|
|
|
|
|
|
|
# Always false without a UserProfile
|
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(None)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertFalse(billing_info.show_remote_billing)
|
2021-06-14 12:38:43 +02:00
|
|
|
|
2023-11-04 14:24:04 +01:00
|
|
|
# realm admin, with CustomerPlan and realm plan_type LIMITED -> don't show any links
|
|
|
|
# Only billing admin and realm owner have access to billing.
|
2020-07-14 14:40:39 +02:00
|
|
|
user.role = UserProfile.ROLE_REALM_ADMINISTRATOR
|
|
|
|
user.save(update_fields=["role"])
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.show_plans)
|
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertFalse(billing_info.show_remote_billing)
|
2020-07-14 14:40:39 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
# billing admin, with CustomerPlan and realm plan_type STANDARD -> show only billing link
|
2020-06-09 12:24:32 +02:00
|
|
|
user.role = UserProfile.ROLE_MEMBER
|
2018-08-15 18:49:25 +02:00
|
|
|
user.is_billing_admin = True
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user.realm, Realm.PLAN_TYPE_STANDARD, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
user.save(update_fields=["role", "is_billing_admin"])
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertTrue(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2018-08-15 18:49:25 +02:00
|
|
|
|
2021-09-16 16:05:26 +02:00
|
|
|
# billing admin, with CustomerPlan and realm plan_type PLUS -> show only billing link
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user.realm, Realm.PLAN_TYPE_PLUS, acting_user=None)
|
2021-09-16 16:05:26 +02:00
|
|
|
user.save(update_fields=["role", "is_billing_admin"])
|
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertTrue(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2021-09-16 16:05:26 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
# member, with CustomerPlan and realm plan_type STANDARD -> neither billing link or plans
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user.realm, Realm.PLAN_TYPE_STANDARD, acting_user=None)
|
2020-06-09 12:24:32 +02:00
|
|
|
user.is_billing_admin = False
|
2021-02-12 08:20:45 +01:00
|
|
|
user.save(update_fields=["is_billing_admin"])
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertFalse(billing_info.show_remote_billing)
|
2020-06-09 12:24:32 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
# guest, with CustomerPlan and realm plan_type SELF_HOSTED -> neither billing link or plans
|
2020-06-09 12:24:32 +02:00
|
|
|
user.role = UserProfile.ROLE_GUEST
|
2021-02-12 08:20:45 +01:00
|
|
|
user.save(update_fields=["role"])
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user.realm, Realm.PLAN_TYPE_SELF_HOSTED, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertFalse(billing_info.show_remote_billing)
|
2020-06-09 12:24:32 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
# billing admin, but no CustomerPlan and realm plan_type SELF_HOSTED -> neither billing link or plans
|
2020-06-09 12:24:32 +02:00
|
|
|
user.role = UserProfile.ROLE_MEMBER
|
|
|
|
user.is_billing_admin = True
|
2021-02-12 08:20:45 +01:00
|
|
|
user.save(update_fields=["role", "is_billing_admin"])
|
2019-01-31 22:58:28 +01:00
|
|
|
CustomerPlan.objects.all().delete()
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2018-08-15 18:49:25 +02:00
|
|
|
|
2023-11-04 14:24:04 +01:00
|
|
|
# billing admin, with sponsorship pending and realm plan_type SELF_HOSTED -> show only sponsorship pending link
|
2020-06-09 12:24:32 +02:00
|
|
|
customer.sponsorship_pending = True
|
|
|
|
customer.save(update_fields=["sponsorship_pending"])
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.show_billing)
|
2021-06-14 12:38:43 +02:00
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertTrue(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
2020-06-09 12:24:32 +02:00
|
|
|
|
2023-11-04 14:24:04 +01:00
|
|
|
# billing admin, no customer object and realm plan_type SELF_HOSTED -> no links
|
2018-08-15 18:49:25 +02:00
|
|
|
customer.delete()
|
2021-06-14 12:38:43 +02:00
|
|
|
with self.settings(CORPORATE_ENABLED=True):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_billing)
|
|
|
|
self.assertFalse(billing_info.show_plans)
|
2023-11-04 14:24:04 +01:00
|
|
|
self.assertFalse(billing_info.sponsorship_pending)
|
2023-11-15 22:44:24 +01:00
|
|
|
self.assertTrue(billing_info.show_remote_billing)
|
|
|
|
|
|
|
|
# If the server doesn't have the push bouncer configured,
|
|
|
|
# don't show remote billing.
|
|
|
|
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=None):
|
|
|
|
billing_info = get_billing_info(user)
|
|
|
|
self.assertFalse(billing_info.show_remote_billing)
|
2018-08-15 18:49:25 +02:00
|
|
|
|
2021-06-14 12:38:43 +02:00
|
|
|
def test_promote_sponsoring_zulip_in_realm(self) -> None:
|
2018-08-15 18:49:25 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
promote_zulip = promote_sponsoring_zulip_in_realm(realm)
|
|
|
|
self.assertTrue(promote_zulip)
|
2021-03-05 18:39:02 +01:00
|
|
|
|
|
|
|
with self.settings(PROMOTE_SPONSORING_ZULIP=False):
|
2021-06-14 12:38:43 +02:00
|
|
|
promote_zulip = promote_sponsoring_zulip_in_realm(realm)
|
|
|
|
self.assertFalse(promote_zulip)
|
|
|
|
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
promote_zulip = promote_sponsoring_zulip_in_realm(realm)
|
|
|
|
self.assertTrue(promote_zulip)
|
2021-03-05 18:39:02 +01:00
|
|
|
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
promote_zulip = promote_sponsoring_zulip_in_realm(realm)
|
|
|
|
self.assertFalse(promote_zulip)
|
2021-03-05 18:39:02 +01:00
|
|
|
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD, acting_user=None)
|
2021-06-14 12:38:43 +02:00
|
|
|
promote_zulip = promote_sponsoring_zulip_in_realm(realm)
|
|
|
|
self.assertFalse(promote_zulip)
|
2021-03-05 18:39:02 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_desktop_home(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-08 11:57:55 +01:00
|
|
|
result = self.client_get("/desktop_home")
|
|
|
|
self.assertEqual(result.status_code, 301)
|
|
|
|
self.assertTrue(result["Location"].endswith("/desktop_home/"))
|
|
|
|
result = self.client_get("/desktop_home/")
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2023-12-05 21:25:00 +01:00
|
|
|
path = urlsplit(result["Location"]).path
|
2017-03-08 11:57:55 +01:00
|
|
|
self.assertEqual(path, "/")
|
|
|
|
|
2021-04-28 02:15:16 +02:00
|
|
|
@override_settings(SERVER_UPGRADE_NAG_DEADLINE_DAYS=365)
|
2021-04-24 13:25:20 +02:00
|
|
|
def test_is_outdated_server(self) -> None:
|
|
|
|
# Check when server_upgrade_nag_deadline > last_server_upgrade_time
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
2023-11-19 19:45:19 +01:00
|
|
|
now = LAST_SERVER_UPGRADE_TIME.replace(tzinfo=timezone.utc)
|
2024-01-05 16:52:46 +01:00
|
|
|
with patch("os.path.getmtime", return_value=now.timestamp()):
|
|
|
|
with time_machine.travel((now + timedelta(days=10)), tick=False):
|
|
|
|
self.assertEqual(is_outdated_server(iago), False)
|
|
|
|
self.assertEqual(is_outdated_server(hamlet), False)
|
|
|
|
self.assertEqual(is_outdated_server(None), False)
|
|
|
|
|
|
|
|
with time_machine.travel((now + timedelta(days=397)), tick=False):
|
|
|
|
self.assertEqual(is_outdated_server(iago), True)
|
|
|
|
self.assertEqual(is_outdated_server(hamlet), True)
|
|
|
|
self.assertEqual(is_outdated_server(None), True)
|
|
|
|
|
|
|
|
with time_machine.travel((now + timedelta(days=380)), tick=False):
|
|
|
|
self.assertEqual(is_outdated_server(iago), True)
|
|
|
|
self.assertEqual(is_outdated_server(hamlet), False)
|
|
|
|
self.assertEqual(is_outdated_server(None), False)
|
2021-04-24 13:25:20 +02:00
|
|
|
|
2020-05-07 09:48:43 +02:00
|
|
|
def test_furthest_read_time(self) -> None:
|
|
|
|
msg_id = self.send_test_message("hello!", sender_name="iago")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-05-07 09:48:43 +02:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([msg_id]).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
2020-05-07 09:48:43 +02:00
|
|
|
|
|
|
|
# Manually process the UserActivity
|
2020-05-14 20:23:17 +02:00
|
|
|
now = timezone_now()
|
|
|
|
activity_time = calendar.timegm(now.timetuple())
|
2021-02-12 08:19:30 +01:00
|
|
|
user_activity_event = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile_id": hamlet.id,
|
2021-04-16 19:41:37 +02:00
|
|
|
"client_id": 1,
|
2021-02-12 08:20:45 +01:00
|
|
|
"query": "update_message_flags",
|
|
|
|
"time": activity_time,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2020-05-14 20:23:17 +02:00
|
|
|
|
|
|
|
yesterday = now - timedelta(days=1)
|
|
|
|
activity_time_2 = calendar.timegm(yesterday.timetuple())
|
2021-02-12 08:19:30 +01:00
|
|
|
user_activity_event_2 = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile_id": hamlet.id,
|
2021-04-16 19:41:37 +02:00
|
|
|
"client_id": 2,
|
2021-02-12 08:20:45 +01:00
|
|
|
"query": "update_message_flags",
|
|
|
|
"time": activity_time_2,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2020-05-14 20:23:17 +02:00
|
|
|
UserActivityWorker().consume_batch([user_activity_event, user_activity_event_2])
|
|
|
|
|
|
|
|
# verify furthest_read_time is last activity time, irrespective of client
|
2020-05-07 09:48:43 +02:00
|
|
|
furthest_read_time = get_furthest_read_time(hamlet)
|
2022-09-29 23:09:47 +02:00
|
|
|
assert furthest_read_time is not None
|
2020-05-07 09:48:43 +02:00
|
|
|
self.assertGreaterEqual(furthest_read_time, activity_time)
|
|
|
|
|
|
|
|
# Check when user has no activity
|
|
|
|
UserActivity.objects.filter(user_profile=hamlet).delete()
|
|
|
|
furthest_read_time = get_furthest_read_time(hamlet)
|
|
|
|
self.assertIsNone(furthest_read_time)
|
|
|
|
|
|
|
|
# Check no user profile handling
|
|
|
|
furthest_read_time = get_furthest_read_time(None)
|
|
|
|
self.assertIsNotNone(furthest_read_time)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subdomain_homepage(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-08-25 04:32:16 +02:00
|
|
|
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.views.home.get_subdomain", return_value=""):
|
2017-03-25 21:23:54 +01:00
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 200)
|
2023-09-28 16:42:56 +02:00
|
|
|
self.assert_in_response("your mission-critical communications with Zulip", result)
|
2017-03-25 21:23:54 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.views.home.get_subdomain", return_value="subdomain"):
|
2017-03-25 21:23:54 +01:00
|
|
|
result = self._get_home_page()
|
|
|
|
self._sanity_check(result)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2024-01-11 23:56:31 +01:00
|
|
|
def test_special_subdomains_homepage(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
with patch("zerver.views.home.get_subdomain", return_value="auth"):
|
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "http://testserver")
|
|
|
|
|
2024-01-11 23:57:38 +01:00
|
|
|
with patch("zerver.views.home.get_subdomain", return_value="selfhosting"):
|
|
|
|
result = self._get_home_page()
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/serverlogin/")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_test_message(
|
|
|
|
self,
|
|
|
|
content: str,
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_name: str = "iago",
|
|
|
|
stream_name: str = "Denmark",
|
|
|
|
topic_name: str = "foo",
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user(sender_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
return self.send_stream_message(sender, stream_name, content=content, topic_name=topic_name)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def soft_activate_and_get_unread_count(
|
2024-01-15 12:17:50 +01:00
|
|
|
self, stream: str = "Denmark", topic_name: str = "foo"
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2024-01-15 12:17:50 +01:00
|
|
|
stream_narrow = self._get_home_page(stream=stream, topic=topic_name)
|
2017-07-16 09:41:38 +02:00
|
|
|
page_params = self._get_page_params(stream_narrow)
|
2024-02-13 02:56:26 +01:00
|
|
|
return page_params["state_data"]["unread_msgs"]["count"]
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_unread_count_user_soft_deactivation(self) -> None:
|
2017-07-16 09:41:38 +02:00
|
|
|
# In this test we make sure if a soft deactivated user had unread
|
|
|
|
# messages before deactivation they remain same way after activation.
|
2021-02-12 08:20:45 +01:00
|
|
|
long_term_idle_user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(long_term_idle_user)
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 1"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 1)
|
|
|
|
query_count = len(queries)
|
|
|
|
user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(logger_string, level="INFO") as info_log:
|
2020-07-26 02:04:05 +02:00
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
f"INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}",
|
|
|
|
f"INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process",
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(long_term_idle_user)
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 2"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertNotEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
with queries_captured() as queries:
|
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 2)
|
|
|
|
# Test here for query count to be at least 5 greater than previous count
|
|
|
|
# This will assure indirectly that add_missing_messages() was called.
|
|
|
|
self.assertGreaterEqual(len(queries) - query_count, 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_user_soft_deactivations(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
long_term_idle_user = self.example_user("hamlet")
|
2017-08-18 10:09:54 +02:00
|
|
|
# We are sending this message to ensure that long_term_idle_user has
|
|
|
|
# at least one UserMessage row.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_test_message("Testing", sender_name="hamlet")
|
|
|
|
with self.assertLogs(logger_string, level="INFO") as info_log:
|
2020-07-26 02:04:05 +02:00
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
f"INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}",
|
|
|
|
f"INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process",
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 1"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(long_term_idle_user)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2023-09-30 12:40:39 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 1)
|
2017-07-16 09:41:38 +02:00
|
|
|
query_count = len(queries)
|
|
|
|
long_term_idle_user.refresh_from_db()
|
|
|
|
self.assertFalse(long_term_idle_user.long_term_idle)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 2"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2023-09-30 12:40:39 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 2)
|
2017-07-16 09:41:38 +02:00
|
|
|
# Test here for query count to be at least 5 less than previous count.
|
|
|
|
# This will assure add_missing_messages() isn't repeatedly called.
|
|
|
|
self.assertGreaterEqual(query_count - len(queries), 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(logger_string, level="INFO") as info_log:
|
2020-07-26 02:04:05 +02:00
|
|
|
do_soft_deactivate_users([long_term_idle_user])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
f"INFO:{logger_string}:Soft deactivated user {long_term_idle_user.id}",
|
|
|
|
f"INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process",
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 3"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(long_term_idle_user)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2023-09-30 12:40:39 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 3)
|
2017-07-16 09:41:38 +02:00
|
|
|
query_count = len(queries)
|
|
|
|
long_term_idle_user.refresh_from_db()
|
|
|
|
self.assertFalse(long_term_idle_user.long_term_idle)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
message = "Test message 4"
|
2017-10-27 17:57:23 +02:00
|
|
|
self.send_test_message(message)
|
2017-07-16 09:41:38 +02:00
|
|
|
with queries_captured() as queries:
|
2023-09-30 12:40:39 +02:00
|
|
|
self.assertEqual(self.soft_activate_and_get_unread_count(), 4)
|
2017-07-16 09:41:38 +02:00
|
|
|
self.assertGreaterEqual(query_count - len(queries), 5)
|
|
|
|
idle_user_msg_list = get_user_messages(long_term_idle_user)
|
|
|
|
self.assertEqual(idle_user_msg_list[-1].content, message)
|
|
|
|
self.logout()
|
2018-05-03 11:08:50 +02:00
|
|
|
|
2018-05-30 17:30:33 +02:00
|
|
|
def test_url_language(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
user.default_language = "es"
|
2018-05-30 17:30:33 +02:00
|
|
|
user.save()
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-05-30 17:30:33 +02:00
|
|
|
result = self._get_home_page()
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.events.request_event_queue", return_value=42), patch(
|
|
|
|
"zerver.lib.events.get_user_events", return_value=[]
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/de/")
|
2018-05-30 17:30:33 +02:00
|
|
|
page_params = self._get_page_params(result)
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertEqual(page_params["state_data"]["user_settings"]["default_language"], "es")
|
2018-05-30 17:30:33 +02:00
|
|
|
# TODO: Verify that the actual language we're using in the
|
|
|
|
# translation data is German.
|
|
|
|
|
2018-05-03 11:08:50 +02:00
|
|
|
def test_translation_data(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
user.default_language = "es"
|
2018-05-03 11:08:50 +02:00
|
|
|
user.save()
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-05-03 11:08:50 +02:00
|
|
|
result = self._get_home_page()
|
2020-10-02 00:06:46 +02:00
|
|
|
self.check_rendered_logged_in_app(result)
|
2018-05-03 11:08:50 +02:00
|
|
|
|
2018-05-30 17:30:33 +02:00
|
|
|
page_params = self._get_page_params(result)
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertEqual(page_params["state_data"]["user_settings"]["default_language"], "es")
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2021-07-30 19:11:13 +02:00
|
|
|
# TODO: This test would likely be better written as a /register
|
|
|
|
# API test with just the drafts event type, to avoid the
|
|
|
|
# performance cost of fetching /.
|
2021-07-24 06:56:56 +02:00
|
|
|
@override_settings(MAX_DRAFTS_IN_REGISTER_RESPONSE=5)
|
|
|
|
def test_limit_drafts(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
base_time = timezone_now()
|
2021-07-30 19:11:13 +02:00
|
|
|
initial_count = Draft.objects.count()
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
step_value = timedelta(seconds=1)
|
|
|
|
# Create 11 drafts.
|
2021-07-30 19:11:13 +02:00
|
|
|
# TODO: This would be better done as an API request.
|
2023-07-31 22:52:35 +02:00
|
|
|
draft_objects = [
|
|
|
|
Draft(
|
|
|
|
user_profile=hamlet,
|
|
|
|
recipient=None,
|
|
|
|
topic="",
|
|
|
|
content="sample draft",
|
|
|
|
last_edit_time=base_time + i * step_value,
|
2021-07-24 06:56:56 +02:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
for i in range(settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1)
|
|
|
|
]
|
2021-07-24 06:56:56 +02:00
|
|
|
Draft.objects.bulk_create(draft_objects)
|
|
|
|
|
|
|
|
# Now fetch the drafts part of the initial state and make sure
|
|
|
|
# that we only got back settings.MAX_DRAFTS_IN_REGISTER_RESPONSE.
|
|
|
|
# No more. Also make sure that the drafts returned are the most
|
|
|
|
# recently edited ones.
|
|
|
|
self.login("hamlet")
|
|
|
|
page_params = self._get_page_params(self._get_home_page())
|
2024-02-13 02:56:26 +01:00
|
|
|
self.assertEqual(
|
|
|
|
page_params["state_data"]["user_settings"]["enable_drafts_synchronization"], True
|
|
|
|
)
|
|
|
|
self.assert_length(
|
|
|
|
page_params["state_data"]["drafts"], settings.MAX_DRAFTS_IN_REGISTER_RESPONSE
|
|
|
|
)
|
2021-07-30 19:11:13 +02:00
|
|
|
self.assertEqual(
|
|
|
|
Draft.objects.count(), settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1 + initial_count
|
|
|
|
)
|
|
|
|
# +2 for what's already in the test DB.
|
2024-02-13 02:56:26 +01:00
|
|
|
for draft in page_params["state_data"]["drafts"]:
|
2021-07-24 06:56:56 +02:00
|
|
|
self.assertNotEqual(draft["timestamp"], base_time)
|
2023-11-23 22:07:41 +01:00
|
|
|
|
|
|
|
def test_realm_push_notifications_enabled_end_timestamp(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
realm = get_realm("zulip")
|
2023-11-19 19:45:19 +01:00
|
|
|
end_timestamp = timezone_now() + timedelta(days=1)
|
2023-11-23 22:07:41 +01:00
|
|
|
realm.push_notifications_enabled_end_timestamp = end_timestamp
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
result = self._get_home_page(stream="Denmark")
|
|
|
|
page_params = self._get_page_params(result)
|
|
|
|
self.assertEqual(
|
2024-02-13 02:56:26 +01:00
|
|
|
page_params["state_data"]["realm_push_notifications_enabled_end_timestamp"],
|
2023-11-23 22:07:41 +01:00
|
|
|
datetime_to_timestamp(end_timestamp),
|
|
|
|
)
|