ruff: Fix SIM102 nested `if` statements.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-01-17 20:59:37 -05:00 committed by Tim Abbott
parent 50cf9bc4b8
commit b0e569f07c
38 changed files with 333 additions and 327 deletions

View File

@ -227,14 +227,13 @@ class AnalyticsTestCase(ZulipTestCase):
kwargs[arg_keys[i]] = values[i]
for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value)
if table is not InstallationCount:
if "realm" not in kwargs:
if "user" in kwargs:
kwargs["realm"] = kwargs["user"].realm
elif "stream" in kwargs:
kwargs["realm"] = kwargs["stream"].realm
else:
kwargs["realm"] = self.default_realm
if table is not InstallationCount and "realm" not in kwargs:
if "user" in kwargs:
kwargs["realm"] = kwargs["user"].realm
elif "stream" in kwargs:
kwargs["realm"] = kwargs["stream"].realm
else:
kwargs["realm"] = self.default_realm
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
self.assert_length(arg_values, table.objects.count())

View File

@ -58,9 +58,8 @@ def get_filtered_commands() -> Dict[str, str]:
for command, app in all_commands.items():
if app not in documented_apps:
continue
if app in documented_command_subsets:
if command not in documented_command_subsets[app]:
continue
if app in documented_command_subsets and command not in documented_command_subsets[app]:
continue
documented_commands[command] = app
return documented_commands

View File

@ -47,18 +47,17 @@ if not options.nagios and not options.munin:
print("No output options specified! Please provide --munin or --nagios")
sys.exit(0)
if options.munin:
if options.config == "config":
print(
"""graph_title Send-Receive times
if options.munin and options.config == "config":
print(
"""graph_title Send-Receive times
graph_info The number of seconds it takes to send and receive a message from the server
graph_args -u 5 -l 0
graph_vlabel RTT (seconds)
sendreceive.label Send-receive round trip time
sendreceive.warning 3
sendreceive.critical 5"""
)
sys.exit(0)
)
sys.exit(0)
sys.path.append("/home/zulip/deployments/current")
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"

View File

@ -141,11 +141,13 @@ def from_openapi(node: Dict[str, Any]) -> Any:
return UnionType([from_openapi(n) for n in node["oneOf"]])
if node["type"] == "object":
if "additionalProperties" in node:
if (
"additionalProperties" in node
# this might be a glitch in our current spec? or
# maybe I just understand it yet
if isinstance(node["additionalProperties"], dict):
return StringDictType(from_openapi(node["additionalProperties"]))
and isinstance(node["additionalProperties"], dict)
):
return StringDictType(from_openapi(node["additionalProperties"]))
if "properties" not in node:
return dict

View File

@ -28,9 +28,8 @@ def clean_up_pattern(s: str) -> str:
paren_level += 1
if c == "<" and prior_char == "P":
in_braces = True
if in_braces or (paren_level == 0):
if c != "?":
result += c
if (in_braces or paren_level == 0) and c != "?":
result += c
if c == ")":
paren_level -= 1
if c == ">":

View File

@ -58,9 +58,12 @@ def adjust_block_indentation(tokens: List[Token], fn: str) -> None:
continue
if start_token and token.indent is not None:
if not start_token.indent_is_final and token.indent == start_token.orig_indent:
if token_allows_children_to_skip_indents(start_token):
start_token.child_indent = start_token.indent
if (
not start_token.indent_is_final
and token.indent == start_token.orig_indent
and token_allows_children_to_skip_indents(start_token)
):
start_token.child_indent = start_token.indent
start_token.indent_is_final = True
# Detect start token by its having a end token

View File

@ -465,16 +465,14 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None) -> List[Token
tag = token.tag
if not state.foreign:
if kind == "html_start":
if tag in HTML_VOID_TAGS:
raise TemplateParserError(
f"Tag must be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
)
elif kind == "html_singleton":
if tag not in HTML_VOID_TAGS:
raise TemplateParserError(
f"Tag must not be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
)
if kind == "html_start" and tag in HTML_VOID_TAGS:
raise TemplateParserError(
f"Tag must be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
)
elif kind == "html_singleton" and tag not in HTML_VOID_TAGS:
raise TemplateParserError(
f"Tag must not be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
)
flavor = tag_flavor(token)
if flavor == "start":
@ -511,9 +509,8 @@ def ensure_matching_indentation(fn: str, tokens: List[Token], lines: List[str])
if end_line > start_line + 1:
if is_inline_tag:
end_row_text = lines[end_line - 1]
if end_row_text.lstrip().startswith(end_token.s):
if end_col != start_col:
return True
if end_row_text.lstrip().startswith(end_token.s) and end_col != start_col:
return True
else:
if end_col != start_col:
return True

View File

@ -66,19 +66,20 @@ if __name__ == "__main__":
migration_number = file[0:4]
counter = file_index.count(migration_number)
if counter > 1 and file[0:4] not in stack:
if (
counter > 1
and file[0:4] not in stack
# When we need to backport migrations to a previous
# release, we sometimes end up with multiple having
# the same ID number (which isn't a problem; the
# migrations graph structure, not the IDs, is what
# matters).
if migration_number not in MIGRATIONS_TO_SKIP:
conflicts += [
file_name
for file_name in files_list
if file_name.startswith(migration_number)
]
stack.append(migration_number)
and migration_number not in MIGRATIONS_TO_SKIP
):
conflicts += [
file_name for file_name in files_list if file_name.startswith(migration_number)
]
stack.append(migration_number)
if len(conflicts) > 0:
resolve_conflicts(conflicts, files_list)

View File

@ -668,55 +668,52 @@ def do_update_message(
# newly sent messages anyway) and having magical live-updates
# where possible.
users_to_be_notified = list(map(user_info, ums))
if stream_being_edited is not None:
if stream_being_edited.is_history_public_to_subscribers():
subscriptions = get_active_subscriptions_for_stream_id(
stream_id, include_deactivated_users=False
)
# We exclude long-term idle users, since they by
# definition have no active clients.
subscriptions = subscriptions.exclude(user_profile__long_term_idle=True)
# Remove duplicates by excluding the id of users already
# in users_to_be_notified list. This is the case where a
# user both has a UserMessage row and is a current
# Subscriber
if stream_being_edited is not None and stream_being_edited.is_history_public_to_subscribers():
subscriptions = get_active_subscriptions_for_stream_id(
stream_id, include_deactivated_users=False
)
# We exclude long-term idle users, since they by
# definition have no active clients.
subscriptions = subscriptions.exclude(user_profile__long_term_idle=True)
# Remove duplicates by excluding the id of users already
# in users_to_be_notified list. This is the case where a
# user both has a UserMessage row and is a current
# Subscriber
subscriptions = subscriptions.exclude(
user_profile_id__in=[um.user_profile_id for um in ums]
)
if new_stream is not None:
assert delete_event_notify_user_ids is not None
subscriptions = subscriptions.exclude(user_profile_id__in=delete_event_notify_user_ids)
# All users that are subscribed to the stream must be
# notified when a message is edited
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
if new_stream is not None:
# TODO: Guest users don't see the new moved topic
# unless breadcrumb message for new stream is
# enabled. Excluding these users from receiving this
# event helps us avoid a error traceback for our
# clients. We should figure out a way to inform the
# guest users of this new topic if sending a 'message'
# event for these messages is not an option.
#
# Don't send this event to guest subs who are not
# subscribers of the old stream but are subscribed to
# the new stream; clients will be confused.
old_stream_unsubbed_guests = [
sub
for sub in subs_to_new_stream
if sub.user_profile.is_guest and sub.user_profile_id not in subscriber_ids
]
subscriptions = subscriptions.exclude(
user_profile_id__in=[um.user_profile_id for um in ums]
user_profile_id__in=[sub.user_profile_id for sub in old_stream_unsubbed_guests]
)
if new_stream is not None:
assert delete_event_notify_user_ids is not None
subscriptions = subscriptions.exclude(
user_profile_id__in=delete_event_notify_user_ids
)
# All users that are subscribed to the stream must be
# notified when a message is edited
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
if new_stream is not None:
# TODO: Guest users don't see the new moved topic
# unless breadcrumb message for new stream is
# enabled. Excluding these users from receiving this
# event helps us avoid a error traceback for our
# clients. We should figure out a way to inform the
# guest users of this new topic if sending a 'message'
# event for these messages is not an option.
#
# Don't send this event to guest subs who are not
# subscribers of the old stream but are subscribed to
# the new stream; clients will be confused.
old_stream_unsubbed_guests = [
sub
for sub in subs_to_new_stream
if sub.user_profile.is_guest and sub.user_profile_id not in subscriber_ids
]
subscriptions = subscriptions.exclude(
user_profile_id__in=[sub.user_profile_id for sub in old_stream_unsubbed_guests]
)
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
users_to_be_notified += list(map(subscriber_info, sorted(subscriber_ids)))
users_to_be_notified += list(map(subscriber_info, sorted(subscriber_ids)))
# UserTopic updates and the content of notifications depend on
# whether we've moved the entire topic, or just part of it. We
@ -927,10 +924,12 @@ def check_update_message(
validate_message_edit_payload(message, stream_id, topic_name, propagate_mode, content)
if content is not None:
if (
content is not None
# You cannot edit the content of message sent by someone else.
if message.sender_id != user_profile.id:
raise JsonableError(_("You don't have permission to edit this message"))
and message.sender_id != user_profile.id
):
raise JsonableError(_("You don't have permission to edit this message"))
is_no_topic_msg = message.topic_name() == "(no topic)"

View File

@ -1479,11 +1479,14 @@ def check_message(
limit_unread_user_ids=limit_unread_user_ids,
)
if stream is not None and message_send_dict.rendering_result.mentions_wildcard:
if not wildcard_mention_allowed(sender, stream):
raise JsonableError(
_("You do not have permission to use wildcard mentions in this stream.")
)
if (
stream is not None
and message_send_dict.rendering_result.mentions_wildcard
and not wildcard_mention_allowed(sender, stream)
):
raise JsonableError(
_("You do not have permission to use wildcard mentions in this stream.")
)
return message_send_dict

View File

@ -355,15 +355,14 @@ def get_service_dicts_for_bots(
}
for service in services
]
elif bot_type == UserProfile.EMBEDDED_BOT:
if bot_profile_id in embedded_bot_configs:
bot_config = embedded_bot_configs[bot_profile_id]
service_dicts = [
{
"config_data": bot_config,
"service_name": services[0].name,
}
]
elif bot_type == UserProfile.EMBEDDED_BOT and bot_profile_id in embedded_bot_configs:
bot_config = embedded_bot_configs[bot_profile_id]
service_dicts = [
{
"config_data": bot_config,
"service_name": services[0].name,
}
]
service_dicts_by_uid[bot_profile_id] = service_dicts
return service_dicts_by_uid

View File

@ -79,9 +79,8 @@ def get_avatar_field(
will return None and let the client compute the gravatar
url.
"""
if settings.ENABLE_GRAVATAR:
if avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
return None
if settings.ENABLE_GRAVATAR and avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
return None
"""
If we get this far, we'll compute an avatar URL that may be
@ -139,10 +138,9 @@ def absolute_avatar_url(user_profile: UserProfile) -> str:
def is_avatar_new(ldap_avatar: bytes, user_profile: UserProfile) -> bool:
new_avatar_hash = user_avatar_content_hash(ldap_avatar)
if user_profile.avatar_hash:
if user_profile.avatar_hash == new_avatar_hash:
# If an avatar exists and is the same as the new avatar,
# then, no need to change the avatar.
return False
if user_profile.avatar_hash and user_profile.avatar_hash == new_avatar_hash:
# If an avatar exists and is the same as the new avatar,
# then, no need to change the avatar.
return False
return True

View File

@ -204,14 +204,16 @@ def fetch_initial_state_data(
user_draft_dicts = [draft.to_dict() for draft in user_draft_objects]
state["drafts"] = user_draft_dicts
if want("muted_topics"):
if want("muted_topics") and (
# Suppress muted_topics data for clients that explicitly
# support user_topic. This allows clients to request both the
# user_topic and muted_topics, and receive the duplicate
# muted_topics data only from older servers that don't yet
# support user_topic.
if event_types is None or not want("user_topic"):
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
event_types is None
or not want("user_topic")
):
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
if want("muted_users"):
state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile)
@ -542,21 +544,20 @@ def fetch_initial_state_data(
[] if user_profile is None else get_starred_message_ids(user_profile)
)
if want("stream"):
if include_streams:
# The web app doesn't use the data from here; instead,
# it uses data from state["subscriptions"] and other
# places.
if user_profile is not None:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
else:
# TODO: This line isn't used by the web app because it
# gets these data via the `subscriptions` key; it will
# be used when the mobile apps support logged-out
# access.
state["streams"] = get_web_public_streams(realm) # nocoverage
if want("stream") and include_streams:
# The web app doesn't use the data from here; instead,
# it uses data from state["subscriptions"] and other
# places.
if user_profile is not None:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
else:
# TODO: This line isn't used by the web app because it
# gets these data via the `subscriptions` key; it will
# be used when the mobile apps support logged-out
# access.
state["streams"] = get_web_public_streams(realm) # nocoverage
if want("default_streams"):
if settings_user.is_guest:
# Guest users and logged-out users don't have access to
@ -707,13 +708,17 @@ def apply_event(
# Below, we handle maintaining first_message_id.
for sub_dict in state.get("subscriptions", []):
if event["message"]["stream_id"] == sub_dict["stream_id"]:
if sub_dict["first_message_id"] is None:
sub_dict["first_message_id"] = event["message"]["id"]
if (
event["message"]["stream_id"] == sub_dict["stream_id"]
and sub_dict["first_message_id"] is None
):
sub_dict["first_message_id"] = event["message"]["id"]
for stream_dict in state.get("streams", []):
if event["message"]["stream_id"] == stream_dict["stream_id"]:
if stream_dict["first_message_id"] is None:
stream_dict["first_message_id"] = event["message"]["id"]
if (
event["message"]["stream_id"] == stream_dict["stream_id"]
and stream_dict["first_message_id"] is None
):
stream_dict["first_message_id"] = event["message"]["id"]
elif event["type"] == "heartbeat":
# It may be impossible for a heartbeat event to actually reach
@ -767,9 +772,8 @@ def apply_event(
if event["op"] == "add":
person = copy.deepcopy(person)
if client_gravatar:
if person["avatar_url"].startswith("https://secure.gravatar.com/"):
person["avatar_url"] = None
if client_gravatar and person["avatar_url"].startswith("https://secure.gravatar.com/"):
person["avatar_url"] = None
person["is_active"] = True
if not person["is_bot"]:
person["profile_data"] = {}
@ -857,11 +861,14 @@ def apply_event(
if not was_admin and now_admin:
state["realm_bots"] = get_owned_bot_dicts(user_profile)
if client_gravatar and "avatar_url" in person:
if (
client_gravatar
and "avatar_url" in person
# Respect the client_gravatar setting in the `users` data.
if person["avatar_url"].startswith("https://secure.gravatar.com/"):
person["avatar_url"] = None
person["avatar_url_medium"] = None
and person["avatar_url"].startswith("https://secure.gravatar.com/")
):
person["avatar_url"] = None
person["avatar_url_medium"] = None
if person_user_id in state["raw_users"]:
p = state["raw_users"][person_user_id]
@ -1011,11 +1018,13 @@ def apply_event(
if permission in state:
state[permission] = user_profile.has_permission(policy)
if event["property"] in policy_permission_dict:
if policy_permission_dict[event["property"]] in state:
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
event["property"]
)
if (
event["property"] in policy_permission_dict
and policy_permission_dict[event["property"]] in state
):
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
event["property"]
)
# Finally, we need to recompute this value from its inputs.
state["can_create_streams"] = (

View File

@ -1595,9 +1595,8 @@ def export_files_from_s3(
count = 0
for bkey in bucket.objects.filter(Prefix=object_prefix):
if valid_hashes is not None:
if bkey.Object().key not in valid_hashes:
continue
if valid_hashes is not None and bkey.Object().key not in valid_hashes:
continue
key = bucket.Object(bkey.key)

View File

@ -306,9 +306,8 @@ def url_embed_preview_enabled(
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None and message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
@ -328,9 +327,8 @@ def image_preview_enabled(
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None and message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
@ -1780,12 +1778,16 @@ class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
# a newline.
li1 = self.LI_RE.match(lines[i])
li2 = self.LI_RE.match(lines[i + 1])
if not in_code_fence and lines[i]:
if (li2 and not li1) or (
li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1)
):
copy.insert(i + inserts + 1, "")
inserts += 1
if (
not in_code_fence
and lines[i]
and (
(li2 and not li1)
or (li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1))
)
):
copy.insert(i + inserts + 1, "")
inserts += 1
return copy
@ -1865,9 +1867,8 @@ class UserMentionPattern(CompiledInlineProcessor):
# name matches the full_name of user in mention_data.
# This enforces our decision that
# @**user_1_name|id_for_user_2** should be invalid syntax.
if full_name:
if user and user.full_name != full_name:
return None, None, None
if full_name and user and user.full_name != full_name:
return None, None, None
else:
# For @**name** syntax.
user = db_data.mention_data.get_user_by_name(name)
@ -2538,9 +2539,8 @@ def do_convert(
# * Nothing is passed in other than content -> just run default options (e.g. for docs)
# * message is passed, but no realm is -> look up realm from message
# * message_realm is passed -> use that realm for Markdown purposes
if message is not None:
if message_realm is None:
message_realm = message.get_realm()
if message is not None and message_realm is None:
message_realm = message.get_realm()
if message_realm is None:
linkifiers_key = DEFAULT_MARKDOWN_KEY
else:
@ -2551,12 +2551,15 @@ def do_convert(
else:
logging_message_id = "unknown"
if message is not None and message_realm is not None:
if message_realm.is_zephyr_mirror_realm:
if message.sending_client.name == "zephyr_mirror":
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY
if (
message is not None
and message_realm is not None
and message_realm.is_zephyr_mirror_realm
and message.sending_client.name == "zephyr_mirror"
):
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY
maybe_update_markdown_engines(linkifiers_key, email_gateway)
md_engine_key = (linkifiers_key, email_gateway)

View File

@ -135,9 +135,8 @@ Missing required -X argument in curl command:
for line in lines:
regex = r'curl [-](sS)?X "?(GET|DELETE|PATCH|POST)"?'
if line.startswith("curl"):
if re.search(regex, line) is None:
raise MarkdownRenderingError(error_msg.format(command=line.strip()))
if line.startswith("curl") and re.search(regex, line) is None:
raise MarkdownRenderingError(error_msg.format(command=line.strip()))
CODE_VALIDATORS: Dict[Optional[str], Callable[[List[str]], None]] = {

View File

@ -42,17 +42,18 @@ class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocesso
for code_tag in code_tags:
parent: Any = code_tag.family.parent
grandparent: Any = code_tag.family.grandparent
if parent.tag == "p" and grandparent.tag == "li":
if (
parent.tag == "p"
and grandparent.tag == "li"
and parent.text is None
and len(list(parent)) == 1
and len(list(parent.itertext())) == 1
):
# if the parent (<p>) has no text, and no children,
# that means that the <code> element inside is its
# only thing inside the bullet, we can confidently say
# that this is a nested code block
if (
parent.text is None
and len(list(parent)) == 1
and len(list(parent.itertext())) == 1
):
nested_code_blocks.append(code_tag)
nested_code_blocks.append(code_tag)
return nested_code_blocks

View File

@ -1284,10 +1284,12 @@ def apply_unread_message_event(
topic=topic,
)
if stream_id not in state["muted_stream_ids"]:
if (
stream_id not in state["muted_stream_ids"]
# This next check hits the database.
if not topic_is_muted(user_profile, stream_id, topic):
state["unmuted_stream_msgs"].add(message_id)
and not topic_is_muted(user_profile, stream_id, topic)
):
state["unmuted_stream_msgs"].add(message_id)
elif message_type == "private":
if len(others) == 1:
@ -1311,9 +1313,8 @@ def apply_unread_message_event(
if "mentioned" in flags:
state["mentions"].add(message_id)
if "wildcard_mentioned" in flags:
if message_id in state["unmuted_stream_msgs"]:
state["mentions"].add(message_id)
if "wildcard_mentioned" in flags and message_id in state["unmuted_stream_msgs"]:
state["mentions"].add(message_id)
def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult, message_id: int) -> None:

View File

@ -95,22 +95,23 @@ def filter_by_subscription_history(
stream_messages = stream_messages[i:]
break
final_msg_count = len(stream_messages)
if initial_msg_count == final_msg_count:
if stream_messages[-1]["id"] <= event_last_message_id:
stream_messages = []
if (
initial_msg_count == final_msg_count
and stream_messages[-1]["id"] <= event_last_message_id
):
stream_messages = []
else:
raise AssertionError(f"{log_entry.event_type} is not a subscription event.")
if len(stream_messages) > 0:
# We do this check for last event since if the last subscription
# event was a subscription_deactivated then we don't want to create
# UserMessage rows for any of the remaining messages.
if stream_subscription_logs[-1].event_type in (
RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_CREATED,
):
for stream_message in stream_messages:
store_user_message_to_insert(stream_message)
# We do this check for last event since if the last subscription
# event was a subscription_deactivated then we don't want to create
# UserMessage rows for any of the remaining messages.
if len(stream_messages) > 0 and stream_subscription_logs[-1].event_type in (
RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_CREATED,
):
for stream_message in stream_messages:
store_user_message_to_insert(stream_message)
return user_messages_to_insert

View File

@ -284,9 +284,8 @@ def validate_user_access_to_subscribers_helper(
# With the exception of web-public streams, a guest must
# be subscribed to a stream (even a public one) in order
# to see subscribers.
if user_profile.is_guest:
if check_user_subscribed(user_profile):
return
if user_profile.is_guest and check_user_subscribed(user_profile):
return
# We could explicitly handle the case where guests aren't
# subscribed here in an `else` statement or we can fall
# through to the subsequent logic. Tim prefers the latter.

View File

@ -1019,10 +1019,13 @@ Output:
body=content,
realm=recipient_realm,
)
if not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists():
if not sender.is_bot and not allow_unsubscribed_sender:
raise AssertionError(
f"""
if (
not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists()
and not sender.is_bot
and not allow_unsubscribed_sender
):
raise AssertionError(
f"""
It appears that the sender did not get a UserMessage row, which is
almost certainly an artificial symptom that in your test setup you
have decided to send a message to a stream without the sender being
@ -1034,7 +1037,7 @@ Output:
{self.subscribed_stream_name_list(sender)}
"""
)
)
return message_id

View File

@ -84,7 +84,7 @@ def check_send_webhook_message(
),
unquote_url_parameters: bool = False,
) -> None:
if complete_event_type is not None:
if complete_event_type is not None and (
# Here, we implement Zulip's generic support for filtering
# events sent by the third-party service.
#
@ -95,14 +95,16 @@ def check_send_webhook_message(
#
# We match items in only_events and exclude_events using Unix
# shell-style wildcards.
if (
(
only_events is not None
and all(not fnmatch.fnmatch(complete_event_type, pattern) for pattern in only_events)
) or (
)
or (
exclude_events is not None
and any(fnmatch.fnmatch(complete_event_type, pattern) for pattern in exclude_events)
):
return
)
):
return
client = RequestNotes.get_notes(request).client
assert client is not None
@ -150,9 +152,11 @@ def standardize_headers(input_headers: Union[None, Dict[str, Any]]) -> Dict[str,
for raw_header in input_headers:
polished_header = raw_header.upper().replace("-", "_")
if polished_header not in ["CONTENT_TYPE", "CONTENT_LENGTH"]:
if not polished_header.startswith("HTTP_"):
polished_header = "HTTP_" + polished_header
if polished_header not in [
"CONTENT_TYPE",
"CONTENT_LENGTH",
] and not polished_header.startswith("HTTP_"):
polished_header = "HTTP_" + polished_header
canonical_headers[polished_header] = str(input_headers[raw_header])
return canonical_headers

View File

@ -217,13 +217,16 @@ def get_pull_request_event_message(
main_message = f"{main_message} {branch_info}"
punctuation = ":" if message else "."
if assignees or assignee or (target_branch and base_branch) or (title is None):
main_message = f"{main_message}{punctuation}"
elif title is not None:
if (
assignees
or assignee
or (target_branch and base_branch)
or title is None
# Once we get here, we know that the message ends with a title
# which could already have punctuation at the end
if title[-1] not in string.punctuation:
main_message = f"{main_message}{punctuation}"
or title[-1] not in string.punctuation
):
main_message = f"{main_message}{punctuation}"
if message:
main_message += "\n" + CONTENT_MESSAGE_TEMPLATE.format(message=message)

View File

@ -36,14 +36,11 @@ realms used for testing; consider using deactivate_realm instead."""
from corporate.models import CustomerPlan, get_customer_by_realm
customer = get_customer_by_realm(realm)
if customer:
if (
customer.stripe_customer_id
or CustomerPlan.objects.filter(customer=customer).count() > 0
):
raise CommandError(
"This realm has had a billing relationship associated with it!"
)
if customer and (
customer.stripe_customer_id
or CustomerPlan.objects.filter(customer=customer).count() > 0
):
raise CommandError("This realm has had a billing relationship associated with it!")
print(
"This command will \033[91mPERMANENTLY DELETE\033[0m all data for this realm. "

View File

@ -27,13 +27,10 @@ def migrate_existing_attachment_data(
owner = entry.owner
entry.realm = owner.realm
for message in entry.messages.all():
if owner == message.sender:
if message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = (
not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
)
entry.is_realm_public = entry.is_realm_public or is_realm_public
if owner == message.sender and message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save()

View File

@ -4462,9 +4462,8 @@ def check_valid_user_ids(realm_id: int, val: object, allow_deactivated: bool = F
except UserProfile.DoesNotExist:
raise ValidationError(_("Invalid user ID: {}").format(user_id))
if not allow_deactivated:
if not user_profile.is_active:
raise ValidationError(_("User with ID {} is deactivated").format(user_id))
if not allow_deactivated and not user_profile.is_active:
raise ValidationError(_("User with ID {} is deactivated").format(user_id))
if user_profile.is_bot:
raise ValidationError(_("User with ID {} is a bot").format(user_id))

View File

@ -1011,11 +1011,10 @@ def json_fetch_api_key(
realm = get_realm_from_request(request)
if realm is None:
raise JsonableError(_("Invalid subdomain"))
if password_auth_enabled(user_profile.realm):
if not authenticate(
request=request, username=user_profile.delivery_email, password=password, realm=realm
):
raise JsonableError(_("Password is incorrect."))
if password_auth_enabled(user_profile.realm) and not authenticate(
request=request, username=user_profile.delivery_email, password=password, realm=realm
):
raise JsonableError(_("Password is incorrect."))
api_key = get_api_key(user_profile)
return json_success(request, data={"api_key": api_key, "email": user_profile.delivery_email})

View File

@ -222,16 +222,17 @@ def update_realm_custom_profile_field(
_("Only 2 custom profile fields can be displayed in the profile summary.")
)
if field.field_type == CustomProfileField.EXTERNAL_ACCOUNT:
if (
field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
# HACK: Allow changing the display_in_profile_summary property
# of default external account types, but not any others.
#
# TODO: Make the name/hint/field_data parameters optional, and
# just require that None was passed for all of them for this case.
if is_default_external_field(
field.field_type, orjson.loads(field.field_data)
) and not update_only_display_in_profile_summary(name, hint, field_data, field):
raise JsonableError(_("Default custom field cannot be updated."))
and is_default_external_field(field.field_type, orjson.loads(field.field_data))
and not update_only_display_in_profile_summary(name, hint, field_data, field)
):
raise JsonableError(_("Default custom field cannot be updated."))
validate_custom_profile_field(
name, hint, field.field_type, field_data, display_in_profile_summary

View File

@ -267,36 +267,36 @@ def update_realm(
# Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
# str or integer field, and thus doesn't fit into the do_set_realm_property framework.
if notifications_stream_id is not None:
if realm.notifications_stream is None or (
realm.notifications_stream.id != notifications_stream_id
):
new_notifications_stream = None
if notifications_stream_id >= 0:
(new_notifications_stream, sub) = access_stream_by_id(
user_profile, notifications_stream_id
)
do_set_realm_notifications_stream(
realm, new_notifications_stream, notifications_stream_id, acting_user=user_profile
if notifications_stream_id is not None and (
realm.notifications_stream is None
or (realm.notifications_stream.id != notifications_stream_id)
):
new_notifications_stream = None
if notifications_stream_id >= 0:
(new_notifications_stream, sub) = access_stream_by_id(
user_profile, notifications_stream_id
)
data["notifications_stream_id"] = notifications_stream_id
do_set_realm_notifications_stream(
realm, new_notifications_stream, notifications_stream_id, acting_user=user_profile
)
data["notifications_stream_id"] = notifications_stream_id
if signup_notifications_stream_id is not None:
if realm.signup_notifications_stream is None or (
realm.signup_notifications_stream.id != signup_notifications_stream_id
):
new_signup_notifications_stream = None
if signup_notifications_stream_id >= 0:
(new_signup_notifications_stream, sub) = access_stream_by_id(
user_profile, signup_notifications_stream_id
)
do_set_realm_signup_notifications_stream(
realm,
new_signup_notifications_stream,
signup_notifications_stream_id,
acting_user=user_profile,
if signup_notifications_stream_id is not None and (
realm.signup_notifications_stream is None
or realm.signup_notifications_stream.id != signup_notifications_stream_id
):
new_signup_notifications_stream = None
if signup_notifications_stream_id >= 0:
(new_signup_notifications_stream, sub) = access_stream_by_id(
user_profile, signup_notifications_stream_id
)
data["signup_notifications_stream_id"] = signup_notifications_stream_id
do_set_realm_signup_notifications_stream(
realm,
new_signup_notifications_stream,
signup_notifications_stream_id,
acting_user=user_profile,
)
data["signup_notifications_stream_id"] = signup_notifications_stream_id
if default_code_block_language is not None:
# Migrate '', used in the API to encode the default/None behavior of this feature.

View File

@ -37,9 +37,8 @@ def upload_emoji(
raise JsonableError(_("A custom emoji with this name already exists."))
if len(request.FILES) != 1:
raise JsonableError(_("You must upload exactly one file."))
if emoji_name in valid_built_in_emoji:
if not user_profile.is_realm_admin:
raise JsonableError(_("Only administrators can override built-in emoji."))
if emoji_name in valid_built_in_emoji and not user_profile.is_realm_admin:
raise JsonableError(_("Only administrators can override built-in emoji."))
emoji_file = list(request.FILES.values())[0]
assert isinstance(emoji_file, UploadedFile)
assert emoji_file.size is not None

View File

@ -614,12 +614,11 @@ def create_realm(request: HttpRequest, creation_key: Optional[str] = None) -> Ht
request,
"zerver/realm_creation_link_invalid.html",
)
if not settings.OPEN_REALM_CREATION:
if key_record is None:
return TemplateResponse(
request,
"zerver/realm_creation_disabled.html",
)
if not settings.OPEN_REALM_CREATION and key_record is None:
return TemplateResponse(
request,
"zerver/realm_creation_disabled.html",
)
# When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm,
# with a few restrictions on their email address.

View File

@ -133,11 +133,10 @@ def get_topic_based_on_event(payload: WildValue, event: str) -> str:
def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]:
event_name = payload["eventType"].tame(check_string)
if event_name == "git.push":
if branches is not None:
branch = get_code_push_branch_name(payload)
if branches.find(branch) == -1:
return None
if event_name == "git.push" and branches is not None:
branch = get_code_push_branch_name(payload)
if branches.find(branch) == -1:
return None
if event_name == "git.pullrequest.merged":
status = payload["resource"]["status"].tame(check_string)
merge_status = payload["resource"]["mergeStatus"].tame(check_string)

View File

@ -90,9 +90,8 @@ def api_bitbucket2_webhook(
if not payload["push"]["changes"]:
return json_success(request)
branch = get_branch_name_for_push_event(payload)
if branch and branches:
if branches.find(branch) == -1:
return json_success(request)
if branch and branches and branches.find(branch) == -1:
return json_success(request)
subjects = get_push_subjects(payload)
bodies = get_push_bodies(payload)

View File

@ -203,9 +203,8 @@ def repo_push_handler(
event_target_type = change["ref"]["type"].tame(check_string)
if event_target_type == "BRANCH":
branch = change["ref"]["displayId"].tame(check_string)
if branches:
if branch not in branches:
continue
if branches and branch not in branches:
continue
data.append(repo_push_branch_data(payload, change))
elif event_target_type == "TAG":
data.append(repo_push_tag_data(payload, change))

View File

@ -508,11 +508,10 @@ def get_event(request: HttpRequest, payload: WildValue, branches: Optional[str])
elif event in ["Confidential Note Hook", "Note Hook"]:
action = payload["object_attributes"]["noteable_type"].tame(check_string)
event = f"{event} {action}"
elif event == "Push Hook":
if branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
elif event == "Push Hook" and branches is not None:
branch = get_branch_name(payload)
if branches.find(branch) == -1:
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event

View File

@ -48,11 +48,10 @@ def get_push_branch_name(payload: WildValue) -> str:
def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]:
event_name = payload["event"]["name"].tame(check_string)
if event_name == "repo-push":
if branches is not None:
branch = get_push_branch_name(payload)
if branches.find(branch) == -1:
return None
if event_name == "repo-push" and branches is not None:
branch = get_push_branch_name(payload)
if branches.find(branch) == -1:
return None
if event_name in EVENT_FUNCTION_MAPPER:
return event_name
raise UnsupportedWebhookEventTypeError(event_name)

View File

@ -178,9 +178,8 @@ def saml_auth_enabled(realm: Optional[Realm] = None) -> bool:
def require_email_format_usernames(realm: Optional[Realm] = None) -> bool:
if ldap_auth_enabled(realm):
if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN:
return False
if ldap_auth_enabled(realm) and (settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN):
return False
return True
@ -1164,9 +1163,8 @@ def query_ldap(email: str) -> List[str]:
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
value = ldap_attrs.get(ldap_field, ["LDAP field not present"])[0]
if django_field == "avatar":
if isinstance(value, bytes):
value = "(An avatar image file)"
if django_field == "avatar" and isinstance(value, bytes):
value = "(An avatar image file)"
values.append(f"{django_field}: {value}")
if settings.LDAP_EMAIL_ATTR is not None:
values.append("{}: {}".format("email", ldap_attrs[settings.LDAP_EMAIL_ATTR][0]))

View File

@ -305,12 +305,14 @@ elif REMOTE_POSTGRES_HOST != "":
DATABASES["default"]["OPTIONS"]["sslmode"] = REMOTE_POSTGRES_SSLMODE
else:
DATABASES["default"]["OPTIONS"]["sslmode"] = "verify-full"
elif get_config("postgresql", "database_user", "zulip") != "zulip":
if get_secret("postgres_password") is not None:
DATABASES["default"].update(
PASSWORD=get_secret("postgres_password"),
HOST="localhost",
)
elif (
get_config("postgresql", "database_user", "zulip") != "zulip"
and get_secret("postgres_password") is not None
):
DATABASES["default"].update(
PASSWORD=get_secret("postgres_password"),
HOST="localhost",
)
POSTGRESQL_MISSING_DICTIONARIES = bool(get_config("postgresql", "missing_dictionaries", None))
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"