python: Remove locally dead code.

These changes are all independent of each other; I just didn’t feel
like making dozens of commits for them.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2021-08-13 16:01:37 -07:00 committed by Tim Abbott
parent 9afe6d0829
commit 4206e5f00b
35 changed files with 13 additions and 338 deletions

View File

@ -352,15 +352,8 @@ def mock_stripe(
return _mock_stripe return _mock_stripe
# A Kandra is a fictional character that can become anything. Used as a
# wildcard when testing for equality.
class Kandra: # nocoverage: TODO
def __eq__(self, other: Any) -> bool:
return True
class StripeTestCase(ZulipTestCase): class StripeTestCase(ZulipTestCase):
def setUp(self, *mocks: Mock) -> None: def setUp(self) -> None:
super().setUp() super().setUp()
realm = get_realm("zulip") realm = get_realm("zulip")
@ -2381,7 +2374,7 @@ class StripeTest(StripeTestCase):
invoice_plans_as_needed(self.next_year) invoice_plans_as_needed(self.next_year)
mocked.assert_not_called() mocked.assert_not_called()
def test_reupgrade_by_billing_admin_after_downgrade(self, *mocks: Mock) -> None: def test_reupgrade_by_billing_admin_after_downgrade(self) -> None:
user = self.example_user("hamlet") user = self.example_user("hamlet")
with patch("corporate.lib.stripe.timezone_now", return_value=self.now): with patch("corporate.lib.stripe.timezone_now", return_value=self.now):
@ -2557,7 +2550,7 @@ class StripeTest(StripeTestCase):
for key, value in line_item_params.items(): for key, value in line_item_params.items():
self.assertEqual(renewal_item.get(key), value) self.assertEqual(renewal_item.get(key), value)
def test_update_licenses_of_automatic_plan_from_billing_page(self, *mocks: Mock) -> None: def test_update_licenses_of_automatic_plan_from_billing_page(self) -> None:
user = self.example_user("hamlet") user = self.example_user("hamlet")
self.login_user(user) self.login_user(user)

View File

@ -30,7 +30,6 @@ from scripts.lib.zulip_tools import (
assert_running_as_root, assert_running_as_root,
get_config, get_config,
get_config_file, get_config_file,
get_tornado_ports,
parse_os_release, parse_os_release,
run_psql_as_postgres, run_psql_as_postgres,
su_to_zulip, su_to_zulip,
@ -97,7 +96,6 @@ deploy_path = args.deploy_path
os.chdir(deploy_path) os.chdir(deploy_path)
config_file = get_config_file() config_file = get_config_file()
tornado_processes = len(get_tornado_ports(config_file))
IS_SERVER_UP = True IS_SERVER_UP = True

View File

@ -555,17 +555,6 @@ def get_config(
return default_value return default_value
def set_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
value: str,
) -> None:
if not config_file.has_section(section):
config_file.add_section(section)
config_file.set(section, key, value)
def get_config_file() -> configparser.RawConfigParser: def get_config_file() -> configparser.RawConfigParser:
config_file = configparser.RawConfigParser() config_file = configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf") config_file.read("/etc/zulip/zulip.conf")

View File

@ -8,7 +8,6 @@ from premailer import Premailer
ZULIP_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../") ZULIP_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../")
EMAIL_TEMPLATES_PATH = os.path.join(ZULIP_PATH, "templates", "zerver", "emails") EMAIL_TEMPLATES_PATH = os.path.join(ZULIP_PATH, "templates", "zerver", "emails")
COMPILED_EMAIL_TEMPLATES_PATH = os.path.join(EMAIL_TEMPLATES_PATH, "compiled")
CSS_SOURCE_PATH = os.path.join(EMAIL_TEMPLATES_PATH, "email.css") CSS_SOURCE_PATH = os.path.join(EMAIL_TEMPLATES_PATH, "email.css")

View File

@ -320,7 +320,7 @@ class `HelloWorldHookTests`:
class HelloWorldHookTests(WebhookTestCase): class HelloWorldHookTests(WebhookTestCase):
STREAM_NAME = 'test' STREAM_NAME = 'test'
URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}" URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}"
FIXTURE_DIR_NAME = 'helloworld' WEBHOOK_DIR_NAME = 'helloworld'
# Note: Include a test function per each distinct message condition your integration supports # Note: Include a test function per each distinct message condition your integration supports
def test_hello_message(self) -> None: def test_hello_message(self) -> None:
@ -333,7 +333,7 @@ class HelloWorldHookTests(WebhookTestCase):
``` ```
In the above example, `STREAM_NAME`, `URL_TEMPLATE`, and `FIXTURE_DIR_NAME` refer In the above example, `STREAM_NAME`, `URL_TEMPLATE`, and `WEBHOOK_DIR_NAME` refer
to class attributes from the base class, `WebhookTestCase`. These are needed by to class attributes from the base class, `WebhookTestCase`. These are needed by
the helper function `check_webhook` to determine how to execute the helper function `check_webhook` to determine how to execute
your test. `STREAM_NAME` should be set to your default stream. If it doesn't exist, your test. `STREAM_NAME` should be set to your default stream. If it doesn't exist,

View File

@ -34,7 +34,7 @@ EXCLUDED_FILES = [
def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: List[str]) -> None: def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: List[str]) -> None:
by_lang = lister.list_files( by_lang = lister.list_files(
targets=targets, targets=targets,
modified_only=args.modified, modified_only=modified_only,
ftypes=["hbs", "html"], ftypes=["hbs", "html"],
group_by_ftype=True, group_by_ftype=True,
exclude=EXCLUDED_FILES, exclude=EXCLUDED_FILES,

View File

@ -2,7 +2,6 @@
import html import html
import os import os
import pprint
import sys import sys
from collections import defaultdict from collections import defaultdict
from typing import Any, Dict, List, Set from typing import Any, Dict, List, Set
@ -40,25 +39,6 @@ def clean_up_pattern(s: str) -> str:
return result return result
def encode_info(info: Any) -> str:
try:
result = ""
try:
info = orjson.loads(info)
result = "(stringified)\n"
except orjson.JSONDecodeError:
pass
result += html.escape(pprint.pformat(info, indent=4))
return "<pre>" + result + "</pre>"
except Exception:
pass
try:
return html.escape(str(info))
except Exception:
pass
return "NOT ENCODABLE"
def fix_test_name(s: str) -> str: def fix_test_name(s: str) -> str:
return s.replace("zerver.tests.", "") return s.replace("zerver.tests.", "")

View File

@ -5,11 +5,6 @@ from typing import Dict, List, Optional, Sequence, Set
from .template_parser import FormattedException, Token, tokenize from .template_parser import FormattedException, Token, tokenize
class HtmlBranchesException(Exception):
# TODO: Have callers pass in line numbers.
pass
class HtmlTreeBranch: class HtmlTreeBranch:
""" """
For <p><div id='yo'>bla<span class='bar'></span></div></p>, store a For <p><div id='yo'>bla<span class='bar'></span></div></p>, store a

View File

@ -76,18 +76,6 @@ except OSError:
) )
sys.exit(1) sys.exit(1)
if platform.architecture()[0] == "64bit":
arch = "amd64"
elif platform.architecture()[0] == "32bit":
arch = "i386"
else:
logging.critical(
"Only x86 is supported; ask on chat.zulip.org if you want another architecture."
)
# Note: It's probably actually not hard to add additional
# architectures.
sys.exit(1)
distro_info = parse_os_release() distro_info = parse_os_release()
vendor = distro_info["ID"] vendor = distro_info["ID"]
os_version = distro_info["VERSION_ID"] os_version = distro_info["VERSION_ID"]

View File

@ -90,7 +90,7 @@ else:
manage_args = [f"--settings={settings_module}"] manage_args = [f"--settings={settings_module}"]
os.environ["DJANGO_SETTINGS_MODULE"] = settings_module os.environ["DJANGO_SETTINGS_MODULE"] = settings_module
from scripts.lib.zulip_tools import CYAN, ENDC, FAIL from scripts.lib.zulip_tools import CYAN, ENDC
proxy_port = base_port proxy_port = base_port
django_port = base_port + 1 django_port = base_port + 1
@ -310,15 +310,6 @@ class TornadoHandler(BaseHandler):
target_port = tornado_port target_port = tornado_port
class ErrorHandler(BaseHandler):
@web.asynchronous
def prepare(self) -> None:
print(FAIL + "Unexpected request: " + ENDC, self.request.path)
self.set_status(500)
self.write("path not supported")
self.finish()
class Application(web.Application): class Application(web.Application):
def __init__(self, enable_logging: bool = False) -> None: def __init__(self, enable_logging: bool = False) -> None:
handlers = [ handlers = [
@ -334,10 +325,6 @@ class Application(web.Application):
super().log_request(handler) super().log_request(handler)
def on_shutdown() -> None:
IOLoop.instance().stop()
def shutdown_handler(*args: Any, **kwargs: Any) -> None: def shutdown_handler(*args: Any, **kwargs: Any) -> None:
io_loop = IOLoop.instance() io_loop = IOLoop.instance()
if io_loop._callbacks: if io_loop._callbacks:

View File

@ -265,7 +265,7 @@ def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None
if emoji_dict["has_img_" + alt_name]: if emoji_dict["has_img_" + alt_name]:
ensure_emoji_image(emoji_dict, src_emoji_farm, target_emoji_farm) ensure_emoji_image(emoji_dict, src_emoji_farm, target_emoji_farm)
skin_variations = emoji_dict.get("skin_variations", {}) skin_variations = emoji_dict.get("skin_variations", {})
for skin_tone, img_info in skin_variations.items(): for img_info in skin_variations.values():
if img_info["has_img_" + alt_name]: if img_info["has_img_" + alt_name]:
ensure_emoji_image(img_info, src_emoji_farm, target_emoji_farm) ensure_emoji_image(img_info, src_emoji_farm, target_emoji_farm)

View File

@ -15,7 +15,6 @@ from emoji_setup_utils import EMOJISETS, emoji_is_universal, get_emoji_code
TOOLS_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) TOOLS_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
ZULIP_PATH = os.path.dirname(TOOLS_DIR) ZULIP_PATH = os.path.dirname(TOOLS_DIR)
EMOJI_MAP_FILE = os.path.join(TOOLS_DIR, "setup", "emoji", "emoji_map.json")
UNIFIED_REACTIONS_FILE = os.path.join( UNIFIED_REACTIONS_FILE = os.path.join(
ZULIP_PATH, "zerver", "management", "data", "unified_reactions.json" ZULIP_PATH, "zerver", "management", "data", "unified_reactions.json"
) )
@ -27,8 +26,6 @@ with open(EMOJI_DATA_FILE, "rb") as fp:
EMOJI_DATA = orjson.loads(fp.read()) EMOJI_DATA = orjson.loads(fp.read())
with open(UNIFIED_REACTIONS_FILE, "rb") as fp: with open(UNIFIED_REACTIONS_FILE, "rb") as fp:
UNIFIED_REACTIONS_MAP = orjson.loads(fp.read()) UNIFIED_REACTIONS_MAP = orjson.loads(fp.read())
with open(EMOJI_MAP_FILE, "rb") as fp:
EMOJI_MAP = orjson.loads(fp.read())
EMOJI_IMAGE_TEMPLATE = """ EMOJI_IMAGE_TEMPLATE = """
<img class="emoji" src="images-{emojiset}-64/{emoji_code}.png" title={emojiset}> <img class="emoji" src="images-{emojiset}-64/{emoji_code}.png" title={emojiset}>

View File

@ -82,17 +82,6 @@ with test_server_running(
site=site, site=site,
) )
# Prepare a generic bot client for curl testing
email = "default-bot@zulip.com"
realm = get_realm("zulip")
bot_user = get_user(email, realm)
api_key = get_api_key(bot_user)
bot_client = Client(
email=email,
api_key=api_key,
site=site,
)
# Prepare the non-admin client # Prepare the non-admin client
email = "guest@zulip.com" # guest is not an admin email = "guest@zulip.com" # guest is not an admin
guest_user = do_create_user( guest_user = do_create_user(

View File

@ -6,7 +6,6 @@ import subprocess
from typing import NoReturn from typing import NoReturn
os.chdir(os.path.join(os.path.dirname(__file__), "..")) os.chdir(os.path.join(os.path.dirname(__file__), ".."))
STATIC_PATH = "static/"
def build_for_prod_or_puppeteer(quiet: bool) -> NoReturn: def build_for_prod_or_puppeteer(quiet: bool) -> NoReturn:

View File

@ -192,42 +192,6 @@ def build_subscription(recipient_id: int, user_id: int, subscription_id: int) ->
return subscription_dict return subscription_dict
def build_public_stream_subscriptions(
zerver_userprofile: List[ZerverFieldsT],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT],
) -> List[ZerverFieldsT]:
"""
This function was only used for HipChat, but it may apply to
future conversions. We often did't get full subscriber data in
the HipChat export, so this function just autosubscribes all
users to every public stream. This returns a list of Subscription
dicts.
"""
subscriptions: List[ZerverFieldsT] = []
public_stream_ids = {stream["id"] for stream in zerver_stream if not stream["invite_only"]}
public_stream_recipient_ids = {
recipient["id"]
for recipient in zerver_recipient
if recipient["type"] == Recipient.STREAM and recipient["type_id"] in public_stream_ids
}
user_ids = [user["id"] for user in zerver_userprofile]
for recipient_id in public_stream_recipient_ids:
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID("subscription"),
)
subscriptions.append(subscription)
return subscriptions
class GetUsers(Protocol): class GetUsers(Protocol):
def __call__(self, stream_id: int = ..., huddle_id: int = ...) -> Set[int]: def __call__(self, stream_id: int = ..., huddle_id: int = ...) -> Set[int]:
... ...
@ -618,37 +582,6 @@ def process_avatars(
return avatar_list + avatar_original_list return avatar_list + avatar_original_list
def write_avatar_png(avatar_folder: str, realm_id: int, user_id: int, bits: bytes) -> ZerverFieldsT:
"""
Use this function for conversions like HipChat where
the bits for the .png file come in something like
a users.json file, and where we don't have to
fetch avatar images externally.
"""
avatar_hash = user_avatar_path_from_ids(
user_profile_id=user_id,
realm_id=realm_id,
)
image_fn = avatar_hash + ".original"
image_path = os.path.join(avatar_folder, image_fn)
with open(image_path, "wb") as image_file:
image_file.write(bits)
# Return metadata that eventually goes in records.json.
metadata = dict(
path=image_path,
s3_path=image_path,
realm_id=realm_id,
user_profile_id=user_id,
# We only write the .original file; ask the importer to do the thumbnailing.
importer_should_thumbnail=True,
)
return metadata
ListJobData = TypeVar("ListJobData") ListJobData = TypeVar("ListJobData")

View File

@ -1,10 +1,10 @@
import importlib import importlib
import json import json
import os
from typing import Any, Callable, Dict, Optional from typing import Any, Callable, Dict, Optional
from django.conf import settings from django.conf import settings
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from zulip_bots.lib import BotIdentity, RateLimit
from zerver.lib.actions import ( from zerver.lib.actions import (
internal_send_huddle_message, internal_send_huddle_message,
@ -22,10 +22,6 @@ from zerver.lib.integrations import EMBEDDED_BOTS
from zerver.lib.topic import get_topic_from_message_info from zerver.lib.topic import get_topic_from_message_info
from zerver.models import UserProfile, get_active_user from zerver.models import UserProfile, get_active_user
our_dir = os.path.dirname(os.path.abspath(__file__))
from zulip_bots.lib import BotIdentity, RateLimit
def get_bot_handler(service_name: str) -> Any: def get_bot_handler(service_name: str) -> Any:

View File

@ -5,7 +5,6 @@ from typing import Any, Callable, Dict, List, Tuple
from django.conf import settings from django.conf import settings
from django.contrib.sessions.models import Session from django.contrib.sessions.models import Session
from django.db.models import Q
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
# This file needs to be different from cache.py because cache.py # This file needs to be different from cache.py because cache.py
@ -17,45 +16,20 @@ from zerver.lib.cache import (
get_remote_cache_requests, get_remote_cache_requests,
get_remote_cache_time, get_remote_cache_time,
get_stream_cache_key, get_stream_cache_key,
to_dict_cache_key_id,
user_profile_by_api_key_cache_key, user_profile_by_api_key_cache_key,
user_profile_cache_key, user_profile_cache_key,
) )
from zerver.lib.message import MessageDict
from zerver.lib.sessions import session_engine from zerver.lib.sessions import session_engine
from zerver.lib.users import get_all_api_keys from zerver.lib.users import get_all_api_keys
from zerver.models import ( from zerver.models import (
Client, Client,
Huddle, Huddle,
Message,
Stream, Stream,
UserProfile, UserProfile,
get_client_cache_key, get_client_cache_key,
huddle_hash_cache_key, huddle_hash_cache_key,
) )
MESSAGE_CACHE_SIZE = 75000
def message_fetch_objects() -> List[Any]:
try:
max_id = Message.objects.only("id").order_by("-id")[0].id
except IndexError:
return []
return Message.objects.select_related().filter(
~Q(sender__email="tabbott/extra@mit.edu"), id__gt=max_id - MESSAGE_CACHE_SIZE
)
def message_cache_items(items_for_remote_cache: Dict[str, Tuple[bytes]], message: Message) -> None:
"""
Note: this code is untested, and the caller has been
commented out for a while.
"""
key = to_dict_cache_key_id(message.id)
value = MessageDict.to_dict_uncached([message])[message.id]
items_for_remote_cache[key] = (value,)
def user_cache_items( def user_cache_items(
items_for_remote_cache: Dict[str, Tuple[UserProfile]], user_profile: UserProfile items_for_remote_cache: Dict[str, Tuple[UserProfile]], user_profile: UserProfile
@ -142,10 +116,6 @@ cache_fillers: Dict[
10000, 10000,
), ),
"stream": (get_streams, stream_cache_items, 3600 * 24 * 7, 10000), "stream": (get_streams, stream_cache_items, 3600 * 24 * 7, 10000),
# Message cache fetching disabled until we can fix the fact that it
# does a bunch of inefficient memcached queries as part of filling
# the display_recipient cache
# 'message': (message_fetch_objects, message_cache_items, 3600 * 24, 1000),
"huddle": ( "huddle": (
lambda: Huddle.objects.select_related().all(), lambda: Huddle.objects.select_related().all(),
huddle_cache_items, huddle_cache_items,

View File

@ -85,14 +85,6 @@ subscription_fields: Sequence[Tuple[str, object]] = [
] ]
equals_add_or_remove = UnionType(
[
# force vertical
Equals("add"),
Equals("remove"),
]
)
value_type = UnionType( value_type = UnionType(
[ [
# force vertical formatting # force vertical formatting

View File

@ -77,13 +77,11 @@ FilterArgs = Dict[str, Any]
IdSource = Tuple[TableName, Field] IdSource = Tuple[TableName, Field]
SourceFilter = Callable[[Record], bool] SourceFilter = Callable[[Record], bool]
# These next two types are callbacks, which mypy does not # This next type is a callback, which mypy does not
# support well, because PEP 484 says "using callbacks # support well, because PEP 484 says "using callbacks
# with keyword arguments is not perceived as a common use case." # with keyword arguments is not perceived as a common use case."
# CustomFetch = Callable[[TableData, Config, Context], None] # CustomFetch = Callable[[TableData, Config, Context], None]
# PostProcessData = Callable[[TableData, Config, Context], None]
CustomFetch = Any # TODO: make more specific, see above CustomFetch = Any # TODO: make more specific, see above
PostProcessData = Any # TODO: make more specific
# The keys of our MessageOutput variables are normally # The keys of our MessageOutput variables are normally
# List[Record], but when we write partials, we can get # List[Record], but when we write partials, we can get

View File

@ -122,13 +122,6 @@ def verbose_compile(pattern: str) -> Pattern[str]:
) )
def normal_compile(pattern: str) -> Pattern[str]:
return re.compile(
fr"^(.*?){pattern}(.*)$",
re.DOTALL | re.UNICODE,
)
STREAM_LINK_REGEX = r""" STREAM_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars (?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks \#\*\* # and after hash sign followed by double asterisks

View File

@ -53,8 +53,6 @@ from zerver.models import (
query_for_ids, query_for_ids,
) )
RealmAlertWord = Dict[int, List[str]]
class RawReactionRow(TypedDict): class RawReactionRow(TypedDict):
emoji_code: str emoji_code: str

View File

@ -19,7 +19,6 @@ from zerver.lib.response import json_method_not_allowed
from zerver.lib.types import ViewFuncT from zerver.lib.types import ViewFuncT
METHODS = ("GET", "HEAD", "POST", "PUT", "DELETE", "PATCH") METHODS = ("GET", "HEAD", "POST", "PUT", "DELETE", "PATCH")
FLAGS = "override_api_url_scheme"
def default_never_cache_responses(view_func: ViewFuncT) -> ViewFuncT: def default_never_cache_responses(view_func: ViewFuncT) -> ViewFuncT:

View File

@ -11,7 +11,6 @@ class ExtraConsoleOutputInTestException(Exception):
class ExtraConsoleOutputFinder: class ExtraConsoleOutputFinder:
def __init__(self) -> None: def __init__(self) -> None:
self.latest_test_name = ""
valid_line_patterns = [ valid_line_patterns = [
# Example: Running zerver.tests.test_attachments.AttachmentsTests.test_delete_unauthenticated # Example: Running zerver.tests.test_attachments.AttachmentsTests.test_delete_unauthenticated
"^Running ", "^Running ",

View File

@ -311,7 +311,6 @@ class HostRequestMock(HttpRequest):
self.POST[key] = str(post_data[key]) self.POST[key] = str(post_data[key])
self.method = "POST" self.method = "POST"
self._log_data: Dict[str, Any] = {}
if meta_data is None: if meta_data is None:
self.META = {"PATH_INFO": "test"} self.META = {"PATH_INFO": "test"}
else: else:

View File

@ -214,7 +214,7 @@ def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dic
canonical_topic_names[canonical_name] = (max_message_id, topic_name) canonical_topic_names[canonical_name] = (max_message_id, topic_name)
history = [] history = []
for canonical_topic, (max_message_id, topic_name) in canonical_topic_names.items(): for max_message_id, topic_name in canonical_topic_names.values():
history.append( history.append(
dict(name=topic_name, max_id=max_message_id), dict(name=topic_name, max_id=max_message_id),
) )

View File

@ -4,8 +4,7 @@ import itertools
import re import re
import secrets import secrets
from itertools import zip_longest from itertools import zip_longest
from time import sleep from typing import Any, Callable, Iterator, List, Optional, Set, Tuple, TypeVar
from typing import Any, Callable, Iterator, List, Optional, Sequence, Set, Tuple, TypeVar
from django.conf import settings from django.conf import settings
@ -58,33 +57,6 @@ class StatsDWrapper:
statsd = StatsDWrapper() statsd = StatsDWrapper()
# Runs the callback with slices of all_list of a given batch_size
def run_in_batches(
all_list: Sequence[T],
batch_size: int,
callback: Callable[[Sequence[T]], None],
sleep_time: int = 0,
logger: Optional[Callable[[str], None]] = None,
) -> None:
if len(all_list) == 0:
return
limit = (len(all_list) // batch_size) + 1
for i in range(limit):
start = i * batch_size
end = (i + 1) * batch_size
if end >= len(all_list):
end = len(all_list)
batch = all_list[start:end]
if logger:
logger(f"Executing {end-start} in batch {i+1} of {limit}")
callback(batch)
if i != limit - 1:
sleep(sleep_time)
def make_safe_digest(string: str, hash_func: Callable[[bytes], Any] = hashlib.sha1) -> str: def make_safe_digest(string: str, hash_func: Callable[[bytes], Any] = hashlib.sha1) -> str:
""" """

View File

@ -1550,9 +1550,6 @@ class UserProfile(AbstractBaseUser, PermissionsMixin, UserBaseSettings):
# API rate limits, formatted as a comma-separated list of range:max pairs # API rate limits, formatted as a comma-separated list of range:max pairs
rate_limits: str = models.CharField(default="", max_length=100) rate_limits: str = models.CharField(default="", max_length=100)
# Hours to wait before sending another email to a user
EMAIL_REMINDER_WAITPERIOD = 24
# Default streams for some deprecated/legacy classes of bot users. # Default streams for some deprecated/legacy classes of bot users.
default_sending_stream: Optional["Stream"] = models.ForeignKey( default_sending_stream: Optional["Stream"] = models.ForeignKey(
"zerver.Stream", "zerver.Stream",

View File

@ -16,11 +16,10 @@ import json
import os import os
import sys import sys
from functools import wraps from functools import wraps
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, TypeVar, cast from typing import Any, Callable, Dict, List, Set, TypeVar, cast
from zulip import Client from zulip import Client
from zerver.lib import mdiff
from zerver.models import get_realm, get_user from zerver.models import get_realm, get_user
from zerver.openapi.openapi import validate_against_openapi_schema from zerver.openapi.openapi import validate_against_openapi_schema
@ -1428,58 +1427,6 @@ def test_invalid_stream_error(client: Client) -> None:
# SETUP METHODS FOLLOW # SETUP METHODS FOLLOW
def test_against_fixture(
result: Dict[str, Any],
fixture: Dict[str, Any],
check_if_equal: Optional[Iterable[str]] = None,
check_if_exists: Optional[Iterable[str]] = None,
) -> None:
assertLength(result, fixture)
if check_if_equal is None and check_if_exists is None:
for key, value in fixture.items():
assertEqual(key, result, fixture)
if check_if_equal is not None:
for key in check_if_equal:
assertEqual(key, result, fixture)
if check_if_exists is not None:
for key in check_if_exists:
assertIn(key, result)
def assertEqual(key: str, result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
if result[key] != fixture[key]:
first = f"{key} = {result[key]}"
second = f"{key} = {fixture[key]}"
raise AssertionError(
"Actual and expected outputs do not match; showing diff:\n"
+ mdiff.diff_strings(first, second)
)
else:
assert result[key] == fixture[key]
def assertLength(result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
if len(result) != len(fixture):
result_string = json.dumps(result, indent=4, sort_keys=True)
fixture_string = json.dumps(fixture, indent=4, sort_keys=True)
raise AssertionError(
"The lengths of the actual and expected outputs do not match; showing diff:\n"
+ mdiff.diff_strings(result_string, fixture_string)
)
else:
assert len(result) == len(fixture)
def assertIn(key: str, result: Dict[str, Any]) -> None:
if key not in result.keys():
raise AssertionError(
f"The actual output does not contain the the key `{key}`.",
)
else:
assert key in result
def test_messages(client: Client, nonadmin_client: Client) -> None: def test_messages(client: Client, nonadmin_client: Client) -> None:

View File

@ -819,7 +819,6 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
SIGNUP_URL: str SIGNUP_URL: str
AUTHORIZATION_URL: str AUTHORIZATION_URL: str
AUTH_FINISH_URL: str AUTH_FINISH_URL: str
CONFIG_ERROR_URL: str
ACCESS_TOKEN_URL: str ACCESS_TOKEN_URL: str
USER_INFO_URL: str USER_INFO_URL: str
CLIENT_KEY_SETTING: str CLIENT_KEY_SETTING: str
@ -1780,7 +1779,6 @@ class SAMLAuthBackendTest(SocialAuthBase):
SIGNUP_URL = "/accounts/register/social/saml/test_idp" SIGNUP_URL = "/accounts/register/social/saml/test_idp"
AUTHORIZATION_URL = "https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO" AUTHORIZATION_URL = "https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO"
AUTH_FINISH_URL = "/complete/saml/" AUTH_FINISH_URL = "/complete/saml/"
CONFIG_ERROR_URL = "/config-error/saml"
# We have to define our own social_auth_test as the flow of SAML authentication # We have to define our own social_auth_test as the flow of SAML authentication
# is different from the other social backends. # is different from the other social backends.
@ -2651,7 +2649,6 @@ class AppleAuthMixin:
AUTHORIZATION_URL = "https://appleid.apple.com/auth/authorize" AUTHORIZATION_URL = "https://appleid.apple.com/auth/authorize"
ACCESS_TOKEN_URL = "https://appleid.apple.com/auth/token" ACCESS_TOKEN_URL = "https://appleid.apple.com/auth/token"
AUTH_FINISH_URL = "/complete/apple/" AUTH_FINISH_URL = "/complete/apple/"
CONFIG_ERROR_URL = "/config-error/apple"
def generate_id_token( def generate_id_token(
self, account_data_dict: Dict[str, str], audience: Optional[str] = None self, account_data_dict: Dict[str, str], audience: Optional[str] = None
@ -3033,7 +3030,6 @@ class GenericOpenIdConnectTest(SocialAuthBase):
JWKS_URL = f"{BASE_OIDC_URL}/jwks" JWKS_URL = f"{BASE_OIDC_URL}/jwks"
USER_INFO_URL = f"{BASE_OIDC_URL}/userinfo" USER_INFO_URL = f"{BASE_OIDC_URL}/userinfo"
AUTH_FINISH_URL = "/complete/oidc/" AUTH_FINISH_URL = "/complete/oidc/"
CONFIG_ERROR_URL = "/config-error/oidc"
def social_auth_test( def social_auth_test(
self, self,
@ -3215,7 +3211,6 @@ class GitHubAuthBackendTest(SocialAuthBase):
ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token" ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token"
USER_INFO_URL = "https://api.github.com/user" USER_INFO_URL = "https://api.github.com/user"
AUTH_FINISH_URL = "/complete/github/" AUTH_FINISH_URL = "/complete/github/"
CONFIG_ERROR_URL = "/config-error/github"
email_data: List[Dict[str, Any]] = [] email_data: List[Dict[str, Any]] = []
def social_auth_test_finish( def social_auth_test_finish(
@ -3737,7 +3732,6 @@ class GitLabAuthBackendTest(SocialAuthBase):
ACCESS_TOKEN_URL = "https://gitlab.com/oauth/token" ACCESS_TOKEN_URL = "https://gitlab.com/oauth/token"
USER_INFO_URL = "https://gitlab.com/api/v4/user" USER_INFO_URL = "https://gitlab.com/api/v4/user"
AUTH_FINISH_URL = "/complete/gitlab/" AUTH_FINISH_URL = "/complete/gitlab/"
CONFIG_ERROR_URL = "/config-error/gitlab"
def test_gitlab_auth_enabled(self) -> None: def test_gitlab_auth_enabled(self) -> None:
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.GitLabAuthBackend",)): with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.GitLabAuthBackend",)):
@ -3759,7 +3753,6 @@ class GoogleAuthBackendTest(SocialAuthBase):
ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token" ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
USER_INFO_URL = "https://www.googleapis.com/oauth2/v3/userinfo" USER_INFO_URL = "https://www.googleapis.com/oauth2/v3/userinfo"
AUTH_FINISH_URL = "/complete/google/" AUTH_FINISH_URL = "/complete/google/"
CONFIG_ERROR_URL = "/config-error/google"
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]: def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
return dict(email=email, name=name, email_verified=True) return dict(email=email, name=name, email_verified=True)

View File

@ -2,7 +2,6 @@ import asyncio
import base64 import base64
import datetime import datetime
import itertools import itertools
import os
import re import re
import uuid import uuid
from contextlib import contextmanager from contextlib import contextmanager
@ -87,8 +86,6 @@ if settings.ZILENCER_ENABLED:
RemoteZulipServer, RemoteZulipServer,
) )
ZERVER_DIR = os.path.dirname(os.path.dirname(__file__))
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer") @skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
class BouncerTestCase(ZulipTestCase): class BouncerTestCase(ZulipTestCase):

View File

@ -182,10 +182,6 @@ class ClientDescriptor:
ret.last_connection_time = d["last_connection_time"] ret.last_connection_time = d["last_connection_time"]
return ret return ret
def prepare_for_pickling(self) -> None:
self.current_handler_id = None
self._timeout_handle = None
def add_event(self, event: Mapping[str, Any]) -> None: def add_event(self, event: Mapping[str, Any]) -> None:
if self.current_handler_id is not None: if self.current_handler_id is not None:
handler = get_handler_by_id(self.current_handler_id) handler = get_handler_by_id(self.current_handler_id)
@ -709,11 +705,6 @@ def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]:
return dict(type="async") return dict(type="async")
# Send email notifications to idle users
# after they are idle for 1 hour
NOTIFY_AFTER_IDLE_HOURS = 1
def build_offline_notification(user_profile_id: int, message_id: int) -> Dict[str, Any]: def build_offline_notification(user_profile_id: int, message_id: int) -> Dict[str, Any]:
return { return {
"user_profile_id": user_profile_id, "user_profile_id": user_profile_id,

View File

@ -9,8 +9,6 @@ from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile from zerver.models import UserProfile
CHECK_IS_REPLY = "in reply to"
@webhook_view("Flock") @webhook_view("Flock")
@has_request_variables @has_request_variables

View File

@ -4,7 +4,6 @@ from zerver.lib.test_classes import WebhookTestCase
class SonarqubeHookTests(WebhookTestCase): class SonarqubeHookTests(WebhookTestCase):
STREAM_NAME = "SonarQube" STREAM_NAME = "SonarQube"
URL_TEMPLATE = "/api/v1/external/sonarqube?api_key={api_key}&stream={stream}" URL_TEMPLATE = "/api/v1/external/sonarqube?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = "sonarqube"
WEBHOOK_DIR_NAME = "sonarqube" WEBHOOK_DIR_NAME = "sonarqube"
def test_analysis_success(self) -> None: def test_analysis_success(self) -> None:

View File

@ -13,10 +13,8 @@ class TransifexHookTests(WebhookTestCase):
PROJECT = "project-title" PROJECT = "project-title"
LANGUAGE = "en" LANGUAGE = "en"
RESOURCE = "file" RESOURCE = "file"
REVIEWED = True
def test_transifex_reviewed_message(self) -> None: def test_transifex_reviewed_message(self) -> None:
self.REVIEWED = True
expected_topic = f"{self.PROJECT} in {self.LANGUAGE}" expected_topic = f"{self.PROJECT} in {self.LANGUAGE}"
expected_message = f"Resource {self.RESOURCE} fully reviewed." expected_message = f"Resource {self.RESOURCE} fully reviewed."
self.url = self.build_webhook_url( self.url = self.build_webhook_url(
@ -28,7 +26,6 @@ class TransifexHookTests(WebhookTestCase):
self.check_webhook("", expected_topic, expected_message) self.check_webhook("", expected_topic, expected_message)
def test_transifex_translated_message(self) -> None: def test_transifex_translated_message(self) -> None:
self.REVIEWED = False
expected_topic = f"{self.PROJECT} in {self.LANGUAGE}" expected_topic = f"{self.PROJECT} in {self.LANGUAGE}"
expected_message = f"Resource {self.RESOURCE} fully translated." expected_message = f"Resource {self.RESOURCE} fully translated."
self.url = self.build_webhook_url( self.url = self.build_webhook_url(

View File

@ -1,7 +0,0 @@
from django import forms
class EnterpriseToSForm(forms.Form):
full_name = forms.CharField(max_length=100)
company = forms.CharField(max_length=100)
terms = forms.BooleanField(required=True)