ruff: Fix SIM102 nested `if` statements.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-01-17 20:59:37 -05:00 committed by Tim Abbott
parent 50cf9bc4b8
commit b0e569f07c
38 changed files with 333 additions and 327 deletions

View File

@ -227,14 +227,13 @@ class AnalyticsTestCase(ZulipTestCase):
kwargs[arg_keys[i]] = values[i] kwargs[arg_keys[i]] = values[i]
for key, value in defaults.items(): for key, value in defaults.items():
kwargs[key] = kwargs.get(key, value) kwargs[key] = kwargs.get(key, value)
if table is not InstallationCount: if table is not InstallationCount and "realm" not in kwargs:
if "realm" not in kwargs: if "user" in kwargs:
if "user" in kwargs: kwargs["realm"] = kwargs["user"].realm
kwargs["realm"] = kwargs["user"].realm elif "stream" in kwargs:
elif "stream" in kwargs: kwargs["realm"] = kwargs["stream"].realm
kwargs["realm"] = kwargs["stream"].realm else:
else: kwargs["realm"] = self.default_realm
kwargs["realm"] = self.default_realm
self.assertEqual(table.objects.filter(**kwargs).count(), 1) self.assertEqual(table.objects.filter(**kwargs).count(), 1)
self.assert_length(arg_values, table.objects.count()) self.assert_length(arg_values, table.objects.count())

View File

@ -58,9 +58,8 @@ def get_filtered_commands() -> Dict[str, str]:
for command, app in all_commands.items(): for command, app in all_commands.items():
if app not in documented_apps: if app not in documented_apps:
continue continue
if app in documented_command_subsets: if app in documented_command_subsets and command not in documented_command_subsets[app]:
if command not in documented_command_subsets[app]: continue
continue
documented_commands[command] = app documented_commands[command] = app
return documented_commands return documented_commands

View File

@ -47,18 +47,17 @@ if not options.nagios and not options.munin:
print("No output options specified! Please provide --munin or --nagios") print("No output options specified! Please provide --munin or --nagios")
sys.exit(0) sys.exit(0)
if options.munin: if options.munin and options.config == "config":
if options.config == "config": print(
print( """graph_title Send-Receive times
"""graph_title Send-Receive times
graph_info The number of seconds it takes to send and receive a message from the server graph_info The number of seconds it takes to send and receive a message from the server
graph_args -u 5 -l 0 graph_args -u 5 -l 0
graph_vlabel RTT (seconds) graph_vlabel RTT (seconds)
sendreceive.label Send-receive round trip time sendreceive.label Send-receive round trip time
sendreceive.warning 3 sendreceive.warning 3
sendreceive.critical 5""" sendreceive.critical 5"""
) )
sys.exit(0) sys.exit(0)
sys.path.append("/home/zulip/deployments/current") sys.path.append("/home/zulip/deployments/current")
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings" os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"

View File

@ -141,11 +141,13 @@ def from_openapi(node: Dict[str, Any]) -> Any:
return UnionType([from_openapi(n) for n in node["oneOf"]]) return UnionType([from_openapi(n) for n in node["oneOf"]])
if node["type"] == "object": if node["type"] == "object":
if "additionalProperties" in node: if (
"additionalProperties" in node
# this might be a glitch in our current spec? or # this might be a glitch in our current spec? or
# maybe I just understand it yet # maybe I just understand it yet
if isinstance(node["additionalProperties"], dict): and isinstance(node["additionalProperties"], dict)
return StringDictType(from_openapi(node["additionalProperties"])) ):
return StringDictType(from_openapi(node["additionalProperties"]))
if "properties" not in node: if "properties" not in node:
return dict return dict

View File

@ -28,9 +28,8 @@ def clean_up_pattern(s: str) -> str:
paren_level += 1 paren_level += 1
if c == "<" and prior_char == "P": if c == "<" and prior_char == "P":
in_braces = True in_braces = True
if in_braces or (paren_level == 0): if (in_braces or paren_level == 0) and c != "?":
if c != "?": result += c
result += c
if c == ")": if c == ")":
paren_level -= 1 paren_level -= 1
if c == ">": if c == ">":

View File

@ -58,9 +58,12 @@ def adjust_block_indentation(tokens: List[Token], fn: str) -> None:
continue continue
if start_token and token.indent is not None: if start_token and token.indent is not None:
if not start_token.indent_is_final and token.indent == start_token.orig_indent: if (
if token_allows_children_to_skip_indents(start_token): not start_token.indent_is_final
start_token.child_indent = start_token.indent and token.indent == start_token.orig_indent
and token_allows_children_to_skip_indents(start_token)
):
start_token.child_indent = start_token.indent
start_token.indent_is_final = True start_token.indent_is_final = True
# Detect start token by its having a end token # Detect start token by its having a end token

View File

@ -465,16 +465,14 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None) -> List[Token
tag = token.tag tag = token.tag
if not state.foreign: if not state.foreign:
if kind == "html_start": if kind == "html_start" and tag in HTML_VOID_TAGS:
if tag in HTML_VOID_TAGS: raise TemplateParserError(
raise TemplateParserError( f"Tag must be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
f"Tag must be self-closing: {tag} at {fn} line {token.line}, col {token.col}" )
) elif kind == "html_singleton" and tag not in HTML_VOID_TAGS:
elif kind == "html_singleton": raise TemplateParserError(
if tag not in HTML_VOID_TAGS: f"Tag must not be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
raise TemplateParserError( )
f"Tag must not be self-closing: {tag} at {fn} line {token.line}, col {token.col}"
)
flavor = tag_flavor(token) flavor = tag_flavor(token)
if flavor == "start": if flavor == "start":
@ -511,9 +509,8 @@ def ensure_matching_indentation(fn: str, tokens: List[Token], lines: List[str])
if end_line > start_line + 1: if end_line > start_line + 1:
if is_inline_tag: if is_inline_tag:
end_row_text = lines[end_line - 1] end_row_text = lines[end_line - 1]
if end_row_text.lstrip().startswith(end_token.s): if end_row_text.lstrip().startswith(end_token.s) and end_col != start_col:
if end_col != start_col: return True
return True
else: else:
if end_col != start_col: if end_col != start_col:
return True return True

View File

@ -66,19 +66,20 @@ if __name__ == "__main__":
migration_number = file[0:4] migration_number = file[0:4]
counter = file_index.count(migration_number) counter = file_index.count(migration_number)
if counter > 1 and file[0:4] not in stack: if (
counter > 1
and file[0:4] not in stack
# When we need to backport migrations to a previous # When we need to backport migrations to a previous
# release, we sometimes end up with multiple having # release, we sometimes end up with multiple having
# the same ID number (which isn't a problem; the # the same ID number (which isn't a problem; the
# migrations graph structure, not the IDs, is what # migrations graph structure, not the IDs, is what
# matters). # matters).
if migration_number not in MIGRATIONS_TO_SKIP: and migration_number not in MIGRATIONS_TO_SKIP
conflicts += [ ):
file_name conflicts += [
for file_name in files_list file_name for file_name in files_list if file_name.startswith(migration_number)
if file_name.startswith(migration_number) ]
] stack.append(migration_number)
stack.append(migration_number)
if len(conflicts) > 0: if len(conflicts) > 0:
resolve_conflicts(conflicts, files_list) resolve_conflicts(conflicts, files_list)

View File

@ -668,55 +668,52 @@ def do_update_message(
# newly sent messages anyway) and having magical live-updates # newly sent messages anyway) and having magical live-updates
# where possible. # where possible.
users_to_be_notified = list(map(user_info, ums)) users_to_be_notified = list(map(user_info, ums))
if stream_being_edited is not None: if stream_being_edited is not None and stream_being_edited.is_history_public_to_subscribers():
if stream_being_edited.is_history_public_to_subscribers(): subscriptions = get_active_subscriptions_for_stream_id(
subscriptions = get_active_subscriptions_for_stream_id( stream_id, include_deactivated_users=False
stream_id, include_deactivated_users=False )
) # We exclude long-term idle users, since they by
# We exclude long-term idle users, since they by # definition have no active clients.
# definition have no active clients. subscriptions = subscriptions.exclude(user_profile__long_term_idle=True)
subscriptions = subscriptions.exclude(user_profile__long_term_idle=True) # Remove duplicates by excluding the id of users already
# Remove duplicates by excluding the id of users already # in users_to_be_notified list. This is the case where a
# in users_to_be_notified list. This is the case where a # user both has a UserMessage row and is a current
# user both has a UserMessage row and is a current # Subscriber
# Subscriber subscriptions = subscriptions.exclude(
user_profile_id__in=[um.user_profile_id for um in ums]
)
if new_stream is not None:
assert delete_event_notify_user_ids is not None
subscriptions = subscriptions.exclude(user_profile_id__in=delete_event_notify_user_ids)
# All users that are subscribed to the stream must be
# notified when a message is edited
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
if new_stream is not None:
# TODO: Guest users don't see the new moved topic
# unless breadcrumb message for new stream is
# enabled. Excluding these users from receiving this
# event helps us avoid a error traceback for our
# clients. We should figure out a way to inform the
# guest users of this new topic if sending a 'message'
# event for these messages is not an option.
#
# Don't send this event to guest subs who are not
# subscribers of the old stream but are subscribed to
# the new stream; clients will be confused.
old_stream_unsubbed_guests = [
sub
for sub in subs_to_new_stream
if sub.user_profile.is_guest and sub.user_profile_id not in subscriber_ids
]
subscriptions = subscriptions.exclude( subscriptions = subscriptions.exclude(
user_profile_id__in=[um.user_profile_id for um in ums] user_profile_id__in=[sub.user_profile_id for sub in old_stream_unsubbed_guests]
) )
if new_stream is not None:
assert delete_event_notify_user_ids is not None
subscriptions = subscriptions.exclude(
user_profile_id__in=delete_event_notify_user_ids
)
# All users that are subscribed to the stream must be
# notified when a message is edited
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True)) subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
if new_stream is not None: users_to_be_notified += list(map(subscriber_info, sorted(subscriber_ids)))
# TODO: Guest users don't see the new moved topic
# unless breadcrumb message for new stream is
# enabled. Excluding these users from receiving this
# event helps us avoid a error traceback for our
# clients. We should figure out a way to inform the
# guest users of this new topic if sending a 'message'
# event for these messages is not an option.
#
# Don't send this event to guest subs who are not
# subscribers of the old stream but are subscribed to
# the new stream; clients will be confused.
old_stream_unsubbed_guests = [
sub
for sub in subs_to_new_stream
if sub.user_profile.is_guest and sub.user_profile_id not in subscriber_ids
]
subscriptions = subscriptions.exclude(
user_profile_id__in=[sub.user_profile_id for sub in old_stream_unsubbed_guests]
)
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
users_to_be_notified += list(map(subscriber_info, sorted(subscriber_ids)))
# UserTopic updates and the content of notifications depend on # UserTopic updates and the content of notifications depend on
# whether we've moved the entire topic, or just part of it. We # whether we've moved the entire topic, or just part of it. We
@ -927,10 +924,12 @@ def check_update_message(
validate_message_edit_payload(message, stream_id, topic_name, propagate_mode, content) validate_message_edit_payload(message, stream_id, topic_name, propagate_mode, content)
if content is not None: if (
content is not None
# You cannot edit the content of message sent by someone else. # You cannot edit the content of message sent by someone else.
if message.sender_id != user_profile.id: and message.sender_id != user_profile.id
raise JsonableError(_("You don't have permission to edit this message")) ):
raise JsonableError(_("You don't have permission to edit this message"))
is_no_topic_msg = message.topic_name() == "(no topic)" is_no_topic_msg = message.topic_name() == "(no topic)"

View File

@ -1479,11 +1479,14 @@ def check_message(
limit_unread_user_ids=limit_unread_user_ids, limit_unread_user_ids=limit_unread_user_ids,
) )
if stream is not None and message_send_dict.rendering_result.mentions_wildcard: if (
if not wildcard_mention_allowed(sender, stream): stream is not None
raise JsonableError( and message_send_dict.rendering_result.mentions_wildcard
_("You do not have permission to use wildcard mentions in this stream.") and not wildcard_mention_allowed(sender, stream)
) ):
raise JsonableError(
_("You do not have permission to use wildcard mentions in this stream.")
)
return message_send_dict return message_send_dict

View File

@ -355,15 +355,14 @@ def get_service_dicts_for_bots(
} }
for service in services for service in services
] ]
elif bot_type == UserProfile.EMBEDDED_BOT: elif bot_type == UserProfile.EMBEDDED_BOT and bot_profile_id in embedded_bot_configs:
if bot_profile_id in embedded_bot_configs: bot_config = embedded_bot_configs[bot_profile_id]
bot_config = embedded_bot_configs[bot_profile_id] service_dicts = [
service_dicts = [ {
{ "config_data": bot_config,
"config_data": bot_config, "service_name": services[0].name,
"service_name": services[0].name, }
} ]
]
service_dicts_by_uid[bot_profile_id] = service_dicts service_dicts_by_uid[bot_profile_id] = service_dicts
return service_dicts_by_uid return service_dicts_by_uid

View File

@ -79,9 +79,8 @@ def get_avatar_field(
will return None and let the client compute the gravatar will return None and let the client compute the gravatar
url. url.
""" """
if settings.ENABLE_GRAVATAR: if settings.ENABLE_GRAVATAR and avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
if avatar_source == UserProfile.AVATAR_FROM_GRAVATAR: return None
return None
""" """
If we get this far, we'll compute an avatar URL that may be If we get this far, we'll compute an avatar URL that may be
@ -139,10 +138,9 @@ def absolute_avatar_url(user_profile: UserProfile) -> str:
def is_avatar_new(ldap_avatar: bytes, user_profile: UserProfile) -> bool: def is_avatar_new(ldap_avatar: bytes, user_profile: UserProfile) -> bool:
new_avatar_hash = user_avatar_content_hash(ldap_avatar) new_avatar_hash = user_avatar_content_hash(ldap_avatar)
if user_profile.avatar_hash: if user_profile.avatar_hash and user_profile.avatar_hash == new_avatar_hash:
if user_profile.avatar_hash == new_avatar_hash: # If an avatar exists and is the same as the new avatar,
# If an avatar exists and is the same as the new avatar, # then, no need to change the avatar.
# then, no need to change the avatar. return False
return False
return True return True

View File

@ -204,14 +204,16 @@ def fetch_initial_state_data(
user_draft_dicts = [draft.to_dict() for draft in user_draft_objects] user_draft_dicts = [draft.to_dict() for draft in user_draft_objects]
state["drafts"] = user_draft_dicts state["drafts"] = user_draft_dicts
if want("muted_topics"): if want("muted_topics") and (
# Suppress muted_topics data for clients that explicitly # Suppress muted_topics data for clients that explicitly
# support user_topic. This allows clients to request both the # support user_topic. This allows clients to request both the
# user_topic and muted_topics, and receive the duplicate # user_topic and muted_topics, and receive the duplicate
# muted_topics data only from older servers that don't yet # muted_topics data only from older servers that don't yet
# support user_topic. # support user_topic.
if event_types is None or not want("user_topic"): event_types is None
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile) or not want("user_topic")
):
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
if want("muted_users"): if want("muted_users"):
state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile) state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile)
@ -542,21 +544,20 @@ def fetch_initial_state_data(
[] if user_profile is None else get_starred_message_ids(user_profile) [] if user_profile is None else get_starred_message_ids(user_profile)
) )
if want("stream"): if want("stream") and include_streams:
if include_streams: # The web app doesn't use the data from here; instead,
# The web app doesn't use the data from here; instead, # it uses data from state["subscriptions"] and other
# it uses data from state["subscriptions"] and other # places.
# places. if user_profile is not None:
if user_profile is not None: state["streams"] = do_get_streams(
state["streams"] = do_get_streams( user_profile, include_all_active=user_profile.is_realm_admin
user_profile, include_all_active=user_profile.is_realm_admin )
) else:
else: # TODO: This line isn't used by the web app because it
# TODO: This line isn't used by the web app because it # gets these data via the `subscriptions` key; it will
# gets these data via the `subscriptions` key; it will # be used when the mobile apps support logged-out
# be used when the mobile apps support logged-out # access.
# access. state["streams"] = get_web_public_streams(realm) # nocoverage
state["streams"] = get_web_public_streams(realm) # nocoverage
if want("default_streams"): if want("default_streams"):
if settings_user.is_guest: if settings_user.is_guest:
# Guest users and logged-out users don't have access to # Guest users and logged-out users don't have access to
@ -707,13 +708,17 @@ def apply_event(
# Below, we handle maintaining first_message_id. # Below, we handle maintaining first_message_id.
for sub_dict in state.get("subscriptions", []): for sub_dict in state.get("subscriptions", []):
if event["message"]["stream_id"] == sub_dict["stream_id"]: if (
if sub_dict["first_message_id"] is None: event["message"]["stream_id"] == sub_dict["stream_id"]
sub_dict["first_message_id"] = event["message"]["id"] and sub_dict["first_message_id"] is None
):
sub_dict["first_message_id"] = event["message"]["id"]
for stream_dict in state.get("streams", []): for stream_dict in state.get("streams", []):
if event["message"]["stream_id"] == stream_dict["stream_id"]: if (
if stream_dict["first_message_id"] is None: event["message"]["stream_id"] == stream_dict["stream_id"]
stream_dict["first_message_id"] = event["message"]["id"] and stream_dict["first_message_id"] is None
):
stream_dict["first_message_id"] = event["message"]["id"]
elif event["type"] == "heartbeat": elif event["type"] == "heartbeat":
# It may be impossible for a heartbeat event to actually reach # It may be impossible for a heartbeat event to actually reach
@ -767,9 +772,8 @@ def apply_event(
if event["op"] == "add": if event["op"] == "add":
person = copy.deepcopy(person) person = copy.deepcopy(person)
if client_gravatar: if client_gravatar and person["avatar_url"].startswith("https://secure.gravatar.com/"):
if person["avatar_url"].startswith("https://secure.gravatar.com/"): person["avatar_url"] = None
person["avatar_url"] = None
person["is_active"] = True person["is_active"] = True
if not person["is_bot"]: if not person["is_bot"]:
person["profile_data"] = {} person["profile_data"] = {}
@ -857,11 +861,14 @@ def apply_event(
if not was_admin and now_admin: if not was_admin and now_admin:
state["realm_bots"] = get_owned_bot_dicts(user_profile) state["realm_bots"] = get_owned_bot_dicts(user_profile)
if client_gravatar and "avatar_url" in person: if (
client_gravatar
and "avatar_url" in person
# Respect the client_gravatar setting in the `users` data. # Respect the client_gravatar setting in the `users` data.
if person["avatar_url"].startswith("https://secure.gravatar.com/"): and person["avatar_url"].startswith("https://secure.gravatar.com/")
person["avatar_url"] = None ):
person["avatar_url_medium"] = None person["avatar_url"] = None
person["avatar_url_medium"] = None
if person_user_id in state["raw_users"]: if person_user_id in state["raw_users"]:
p = state["raw_users"][person_user_id] p = state["raw_users"][person_user_id]
@ -1011,11 +1018,13 @@ def apply_event(
if permission in state: if permission in state:
state[permission] = user_profile.has_permission(policy) state[permission] = user_profile.has_permission(policy)
if event["property"] in policy_permission_dict: if (
if policy_permission_dict[event["property"]] in state: event["property"] in policy_permission_dict
state[policy_permission_dict[event["property"]]] = user_profile.has_permission( and policy_permission_dict[event["property"]] in state
event["property"] ):
) state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
event["property"]
)
# Finally, we need to recompute this value from its inputs. # Finally, we need to recompute this value from its inputs.
state["can_create_streams"] = ( state["can_create_streams"] = (

View File

@ -1595,9 +1595,8 @@ def export_files_from_s3(
count = 0 count = 0
for bkey in bucket.objects.filter(Prefix=object_prefix): for bkey in bucket.objects.filter(Prefix=object_prefix):
if valid_hashes is not None: if valid_hashes is not None and bkey.Object().key not in valid_hashes:
if bkey.Object().key not in valid_hashes: continue
continue
key = bucket.Object(bkey.key) key = bucket.Object(bkey.key)

View File

@ -306,9 +306,8 @@ def url_embed_preview_enabled(
if no_previews: if no_previews:
return False return False
if realm is None: if realm is None and message is not None:
if message is not None: realm = message.get_realm()
realm = message.get_realm()
if realm is None: if realm is None:
# realm can be None for odd use cases # realm can be None for odd use cases
@ -328,9 +327,8 @@ def image_preview_enabled(
if no_previews: if no_previews:
return False return False
if realm is None: if realm is None and message is not None:
if message is not None: realm = message.get_realm()
realm = message.get_realm()
if realm is None: if realm is None:
# realm can be None for odd use cases # realm can be None for odd use cases
@ -1780,12 +1778,16 @@ class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
# a newline. # a newline.
li1 = self.LI_RE.match(lines[i]) li1 = self.LI_RE.match(lines[i])
li2 = self.LI_RE.match(lines[i + 1]) li2 = self.LI_RE.match(lines[i + 1])
if not in_code_fence and lines[i]: if (
if (li2 and not li1) or ( not in_code_fence
li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1) and lines[i]
): and (
copy.insert(i + inserts + 1, "") (li2 and not li1)
inserts += 1 or (li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1))
)
):
copy.insert(i + inserts + 1, "")
inserts += 1
return copy return copy
@ -1865,9 +1867,8 @@ class UserMentionPattern(CompiledInlineProcessor):
# name matches the full_name of user in mention_data. # name matches the full_name of user in mention_data.
# This enforces our decision that # This enforces our decision that
# @**user_1_name|id_for_user_2** should be invalid syntax. # @**user_1_name|id_for_user_2** should be invalid syntax.
if full_name: if full_name and user and user.full_name != full_name:
if user and user.full_name != full_name: return None, None, None
return None, None, None
else: else:
# For @**name** syntax. # For @**name** syntax.
user = db_data.mention_data.get_user_by_name(name) user = db_data.mention_data.get_user_by_name(name)
@ -2538,9 +2539,8 @@ def do_convert(
# * Nothing is passed in other than content -> just run default options (e.g. for docs) # * Nothing is passed in other than content -> just run default options (e.g. for docs)
# * message is passed, but no realm is -> look up realm from message # * message is passed, but no realm is -> look up realm from message
# * message_realm is passed -> use that realm for Markdown purposes # * message_realm is passed -> use that realm for Markdown purposes
if message is not None: if message is not None and message_realm is None:
if message_realm is None: message_realm = message.get_realm()
message_realm = message.get_realm()
if message_realm is None: if message_realm is None:
linkifiers_key = DEFAULT_MARKDOWN_KEY linkifiers_key = DEFAULT_MARKDOWN_KEY
else: else:
@ -2551,12 +2551,15 @@ def do_convert(
else: else:
logging_message_id = "unknown" logging_message_id = "unknown"
if message is not None and message_realm is not None: if (
if message_realm.is_zephyr_mirror_realm: message is not None
if message.sending_client.name == "zephyr_mirror": and message_realm is not None
# Use slightly customized Markdown processor for content and message_realm.is_zephyr_mirror_realm
# delivered via zephyr_mirror and message.sending_client.name == "zephyr_mirror"
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY ):
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY
maybe_update_markdown_engines(linkifiers_key, email_gateway) maybe_update_markdown_engines(linkifiers_key, email_gateway)
md_engine_key = (linkifiers_key, email_gateway) md_engine_key = (linkifiers_key, email_gateway)

View File

@ -135,9 +135,8 @@ Missing required -X argument in curl command:
for line in lines: for line in lines:
regex = r'curl [-](sS)?X "?(GET|DELETE|PATCH|POST)"?' regex = r'curl [-](sS)?X "?(GET|DELETE|PATCH|POST)"?'
if line.startswith("curl"): if line.startswith("curl") and re.search(regex, line) is None:
if re.search(regex, line) is None: raise MarkdownRenderingError(error_msg.format(command=line.strip()))
raise MarkdownRenderingError(error_msg.format(command=line.strip()))
CODE_VALIDATORS: Dict[Optional[str], Callable[[List[str]], None]] = { CODE_VALIDATORS: Dict[Optional[str], Callable[[List[str]], None]] = {

View File

@ -42,17 +42,18 @@ class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocesso
for code_tag in code_tags: for code_tag in code_tags:
parent: Any = code_tag.family.parent parent: Any = code_tag.family.parent
grandparent: Any = code_tag.family.grandparent grandparent: Any = code_tag.family.grandparent
if parent.tag == "p" and grandparent.tag == "li": if (
parent.tag == "p"
and grandparent.tag == "li"
and parent.text is None
and len(list(parent)) == 1
and len(list(parent.itertext())) == 1
):
# if the parent (<p>) has no text, and no children, # if the parent (<p>) has no text, and no children,
# that means that the <code> element inside is its # that means that the <code> element inside is its
# only thing inside the bullet, we can confidently say # only thing inside the bullet, we can confidently say
# that this is a nested code block # that this is a nested code block
if ( nested_code_blocks.append(code_tag)
parent.text is None
and len(list(parent)) == 1
and len(list(parent.itertext())) == 1
):
nested_code_blocks.append(code_tag)
return nested_code_blocks return nested_code_blocks

View File

@ -1284,10 +1284,12 @@ def apply_unread_message_event(
topic=topic, topic=topic,
) )
if stream_id not in state["muted_stream_ids"]: if (
stream_id not in state["muted_stream_ids"]
# This next check hits the database. # This next check hits the database.
if not topic_is_muted(user_profile, stream_id, topic): and not topic_is_muted(user_profile, stream_id, topic)
state["unmuted_stream_msgs"].add(message_id) ):
state["unmuted_stream_msgs"].add(message_id)
elif message_type == "private": elif message_type == "private":
if len(others) == 1: if len(others) == 1:
@ -1311,9 +1313,8 @@ def apply_unread_message_event(
if "mentioned" in flags: if "mentioned" in flags:
state["mentions"].add(message_id) state["mentions"].add(message_id)
if "wildcard_mentioned" in flags: if "wildcard_mentioned" in flags and message_id in state["unmuted_stream_msgs"]:
if message_id in state["unmuted_stream_msgs"]: state["mentions"].add(message_id)
state["mentions"].add(message_id)
def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult, message_id: int) -> None: def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult, message_id: int) -> None:

View File

@ -95,22 +95,23 @@ def filter_by_subscription_history(
stream_messages = stream_messages[i:] stream_messages = stream_messages[i:]
break break
final_msg_count = len(stream_messages) final_msg_count = len(stream_messages)
if initial_msg_count == final_msg_count: if (
if stream_messages[-1]["id"] <= event_last_message_id: initial_msg_count == final_msg_count
stream_messages = [] and stream_messages[-1]["id"] <= event_last_message_id
):
stream_messages = []
else: else:
raise AssertionError(f"{log_entry.event_type} is not a subscription event.") raise AssertionError(f"{log_entry.event_type} is not a subscription event.")
if len(stream_messages) > 0: # We do this check for last event since if the last subscription
# We do this check for last event since if the last subscription # event was a subscription_deactivated then we don't want to create
# event was a subscription_deactivated then we don't want to create # UserMessage rows for any of the remaining messages.
# UserMessage rows for any of the remaining messages. if len(stream_messages) > 0 and stream_subscription_logs[-1].event_type in (
if stream_subscription_logs[-1].event_type in ( RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_ACTIVATED, RealmAuditLog.SUBSCRIPTION_CREATED,
RealmAuditLog.SUBSCRIPTION_CREATED, ):
): for stream_message in stream_messages:
for stream_message in stream_messages: store_user_message_to_insert(stream_message)
store_user_message_to_insert(stream_message)
return user_messages_to_insert return user_messages_to_insert

View File

@ -284,9 +284,8 @@ def validate_user_access_to_subscribers_helper(
# With the exception of web-public streams, a guest must # With the exception of web-public streams, a guest must
# be subscribed to a stream (even a public one) in order # be subscribed to a stream (even a public one) in order
# to see subscribers. # to see subscribers.
if user_profile.is_guest: if user_profile.is_guest and check_user_subscribed(user_profile):
if check_user_subscribed(user_profile): return
return
# We could explicitly handle the case where guests aren't # We could explicitly handle the case where guests aren't
# subscribed here in an `else` statement or we can fall # subscribed here in an `else` statement or we can fall
# through to the subsequent logic. Tim prefers the latter. # through to the subsequent logic. Tim prefers the latter.

View File

@ -1019,10 +1019,13 @@ Output:
body=content, body=content,
realm=recipient_realm, realm=recipient_realm,
) )
if not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists(): if (
if not sender.is_bot and not allow_unsubscribed_sender: not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists()
raise AssertionError( and not sender.is_bot
f""" and not allow_unsubscribed_sender
):
raise AssertionError(
f"""
It appears that the sender did not get a UserMessage row, which is It appears that the sender did not get a UserMessage row, which is
almost certainly an artificial symptom that in your test setup you almost certainly an artificial symptom that in your test setup you
have decided to send a message to a stream without the sender being have decided to send a message to a stream without the sender being
@ -1034,7 +1037,7 @@ Output:
{self.subscribed_stream_name_list(sender)} {self.subscribed_stream_name_list(sender)}
""" """
) )
return message_id return message_id

View File

@ -84,7 +84,7 @@ def check_send_webhook_message(
), ),
unquote_url_parameters: bool = False, unquote_url_parameters: bool = False,
) -> None: ) -> None:
if complete_event_type is not None: if complete_event_type is not None and (
# Here, we implement Zulip's generic support for filtering # Here, we implement Zulip's generic support for filtering
# events sent by the third-party service. # events sent by the third-party service.
# #
@ -95,14 +95,16 @@ def check_send_webhook_message(
# #
# We match items in only_events and exclude_events using Unix # We match items in only_events and exclude_events using Unix
# shell-style wildcards. # shell-style wildcards.
if ( (
only_events is not None only_events is not None
and all(not fnmatch.fnmatch(complete_event_type, pattern) for pattern in only_events) and all(not fnmatch.fnmatch(complete_event_type, pattern) for pattern in only_events)
) or ( )
or (
exclude_events is not None exclude_events is not None
and any(fnmatch.fnmatch(complete_event_type, pattern) for pattern in exclude_events) and any(fnmatch.fnmatch(complete_event_type, pattern) for pattern in exclude_events)
): )
return ):
return
client = RequestNotes.get_notes(request).client client = RequestNotes.get_notes(request).client
assert client is not None assert client is not None
@ -150,9 +152,11 @@ def standardize_headers(input_headers: Union[None, Dict[str, Any]]) -> Dict[str,
for raw_header in input_headers: for raw_header in input_headers:
polished_header = raw_header.upper().replace("-", "_") polished_header = raw_header.upper().replace("-", "_")
if polished_header not in ["CONTENT_TYPE", "CONTENT_LENGTH"]: if polished_header not in [
if not polished_header.startswith("HTTP_"): "CONTENT_TYPE",
polished_header = "HTTP_" + polished_header "CONTENT_LENGTH",
] and not polished_header.startswith("HTTP_"):
polished_header = "HTTP_" + polished_header
canonical_headers[polished_header] = str(input_headers[raw_header]) canonical_headers[polished_header] = str(input_headers[raw_header])
return canonical_headers return canonical_headers

View File

@ -217,13 +217,16 @@ def get_pull_request_event_message(
main_message = f"{main_message} {branch_info}" main_message = f"{main_message} {branch_info}"
punctuation = ":" if message else "." punctuation = ":" if message else "."
if assignees or assignee or (target_branch and base_branch) or (title is None): if (
main_message = f"{main_message}{punctuation}" assignees
elif title is not None: or assignee
or (target_branch and base_branch)
or title is None
# Once we get here, we know that the message ends with a title # Once we get here, we know that the message ends with a title
# which could already have punctuation at the end # which could already have punctuation at the end
if title[-1] not in string.punctuation: or title[-1] not in string.punctuation
main_message = f"{main_message}{punctuation}" ):
main_message = f"{main_message}{punctuation}"
if message: if message:
main_message += "\n" + CONTENT_MESSAGE_TEMPLATE.format(message=message) main_message += "\n" + CONTENT_MESSAGE_TEMPLATE.format(message=message)

View File

@ -36,14 +36,11 @@ realms used for testing; consider using deactivate_realm instead."""
from corporate.models import CustomerPlan, get_customer_by_realm from corporate.models import CustomerPlan, get_customer_by_realm
customer = get_customer_by_realm(realm) customer = get_customer_by_realm(realm)
if customer: if customer and (
if ( customer.stripe_customer_id
customer.stripe_customer_id or CustomerPlan.objects.filter(customer=customer).count() > 0
or CustomerPlan.objects.filter(customer=customer).count() > 0 ):
): raise CommandError("This realm has had a billing relationship associated with it!")
raise CommandError(
"This realm has had a billing relationship associated with it!"
)
print( print(
"This command will \033[91mPERMANENTLY DELETE\033[0m all data for this realm. " "This command will \033[91mPERMANENTLY DELETE\033[0m all data for this realm. "

View File

@ -27,13 +27,10 @@ def migrate_existing_attachment_data(
owner = entry.owner owner = entry.owner
entry.realm = owner.realm entry.realm = owner.realm
for message in entry.messages.all(): for message in entry.messages.all():
if owner == message.sender: if owner == message.sender and message.recipient.type == Recipient.STREAM:
if message.recipient.type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id)
stream = Stream.objects.get(id=message.recipient.type_id) is_realm_public = not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
is_realm_public = ( entry.is_realm_public = entry.is_realm_public or is_realm_public
not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
)
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save() entry.save()

View File

@ -4462,9 +4462,8 @@ def check_valid_user_ids(realm_id: int, val: object, allow_deactivated: bool = F
except UserProfile.DoesNotExist: except UserProfile.DoesNotExist:
raise ValidationError(_("Invalid user ID: {}").format(user_id)) raise ValidationError(_("Invalid user ID: {}").format(user_id))
if not allow_deactivated: if not allow_deactivated and not user_profile.is_active:
if not user_profile.is_active: raise ValidationError(_("User with ID {} is deactivated").format(user_id))
raise ValidationError(_("User with ID {} is deactivated").format(user_id))
if user_profile.is_bot: if user_profile.is_bot:
raise ValidationError(_("User with ID {} is a bot").format(user_id)) raise ValidationError(_("User with ID {} is a bot").format(user_id))

View File

@ -1011,11 +1011,10 @@ def json_fetch_api_key(
realm = get_realm_from_request(request) realm = get_realm_from_request(request)
if realm is None: if realm is None:
raise JsonableError(_("Invalid subdomain")) raise JsonableError(_("Invalid subdomain"))
if password_auth_enabled(user_profile.realm): if password_auth_enabled(user_profile.realm) and not authenticate(
if not authenticate( request=request, username=user_profile.delivery_email, password=password, realm=realm
request=request, username=user_profile.delivery_email, password=password, realm=realm ):
): raise JsonableError(_("Password is incorrect."))
raise JsonableError(_("Password is incorrect."))
api_key = get_api_key(user_profile) api_key = get_api_key(user_profile)
return json_success(request, data={"api_key": api_key, "email": user_profile.delivery_email}) return json_success(request, data={"api_key": api_key, "email": user_profile.delivery_email})

View File

@ -222,16 +222,17 @@ def update_realm_custom_profile_field(
_("Only 2 custom profile fields can be displayed in the profile summary.") _("Only 2 custom profile fields can be displayed in the profile summary.")
) )
if field.field_type == CustomProfileField.EXTERNAL_ACCOUNT: if (
field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
# HACK: Allow changing the display_in_profile_summary property # HACK: Allow changing the display_in_profile_summary property
# of default external account types, but not any others. # of default external account types, but not any others.
# #
# TODO: Make the name/hint/field_data parameters optional, and # TODO: Make the name/hint/field_data parameters optional, and
# just require that None was passed for all of them for this case. # just require that None was passed for all of them for this case.
if is_default_external_field( and is_default_external_field(field.field_type, orjson.loads(field.field_data))
field.field_type, orjson.loads(field.field_data) and not update_only_display_in_profile_summary(name, hint, field_data, field)
) and not update_only_display_in_profile_summary(name, hint, field_data, field): ):
raise JsonableError(_("Default custom field cannot be updated.")) raise JsonableError(_("Default custom field cannot be updated."))
validate_custom_profile_field( validate_custom_profile_field(
name, hint, field.field_type, field_data, display_in_profile_summary name, hint, field.field_type, field_data, display_in_profile_summary

View File

@ -267,36 +267,36 @@ def update_realm(
# Realm.notifications_stream and Realm.signup_notifications_stream are not boolean, # Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
# str or integer field, and thus doesn't fit into the do_set_realm_property framework. # str or integer field, and thus doesn't fit into the do_set_realm_property framework.
if notifications_stream_id is not None: if notifications_stream_id is not None and (
if realm.notifications_stream is None or ( realm.notifications_stream is None
realm.notifications_stream.id != notifications_stream_id or (realm.notifications_stream.id != notifications_stream_id)
): ):
new_notifications_stream = None new_notifications_stream = None
if notifications_stream_id >= 0: if notifications_stream_id >= 0:
(new_notifications_stream, sub) = access_stream_by_id( (new_notifications_stream, sub) = access_stream_by_id(
user_profile, notifications_stream_id user_profile, notifications_stream_id
)
do_set_realm_notifications_stream(
realm, new_notifications_stream, notifications_stream_id, acting_user=user_profile
) )
data["notifications_stream_id"] = notifications_stream_id do_set_realm_notifications_stream(
realm, new_notifications_stream, notifications_stream_id, acting_user=user_profile
)
data["notifications_stream_id"] = notifications_stream_id
if signup_notifications_stream_id is not None: if signup_notifications_stream_id is not None and (
if realm.signup_notifications_stream is None or ( realm.signup_notifications_stream is None
realm.signup_notifications_stream.id != signup_notifications_stream_id or realm.signup_notifications_stream.id != signup_notifications_stream_id
): ):
new_signup_notifications_stream = None new_signup_notifications_stream = None
if signup_notifications_stream_id >= 0: if signup_notifications_stream_id >= 0:
(new_signup_notifications_stream, sub) = access_stream_by_id( (new_signup_notifications_stream, sub) = access_stream_by_id(
user_profile, signup_notifications_stream_id user_profile, signup_notifications_stream_id
)
do_set_realm_signup_notifications_stream(
realm,
new_signup_notifications_stream,
signup_notifications_stream_id,
acting_user=user_profile,
) )
data["signup_notifications_stream_id"] = signup_notifications_stream_id do_set_realm_signup_notifications_stream(
realm,
new_signup_notifications_stream,
signup_notifications_stream_id,
acting_user=user_profile,
)
data["signup_notifications_stream_id"] = signup_notifications_stream_id
if default_code_block_language is not None: if default_code_block_language is not None:
# Migrate '', used in the API to encode the default/None behavior of this feature. # Migrate '', used in the API to encode the default/None behavior of this feature.

View File

@ -37,9 +37,8 @@ def upload_emoji(
raise JsonableError(_("A custom emoji with this name already exists.")) raise JsonableError(_("A custom emoji with this name already exists."))
if len(request.FILES) != 1: if len(request.FILES) != 1:
raise JsonableError(_("You must upload exactly one file.")) raise JsonableError(_("You must upload exactly one file."))
if emoji_name in valid_built_in_emoji: if emoji_name in valid_built_in_emoji and not user_profile.is_realm_admin:
if not user_profile.is_realm_admin: raise JsonableError(_("Only administrators can override built-in emoji."))
raise JsonableError(_("Only administrators can override built-in emoji."))
emoji_file = list(request.FILES.values())[0] emoji_file = list(request.FILES.values())[0]
assert isinstance(emoji_file, UploadedFile) assert isinstance(emoji_file, UploadedFile)
assert emoji_file.size is not None assert emoji_file.size is not None

View File

@ -614,12 +614,11 @@ def create_realm(request: HttpRequest, creation_key: Optional[str] = None) -> Ht
request, request,
"zerver/realm_creation_link_invalid.html", "zerver/realm_creation_link_invalid.html",
) )
if not settings.OPEN_REALM_CREATION: if not settings.OPEN_REALM_CREATION and key_record is None:
if key_record is None: return TemplateResponse(
return TemplateResponse( request,
request, "zerver/realm_creation_disabled.html",
"zerver/realm_creation_disabled.html", )
)
# When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm, # When settings.OPEN_REALM_CREATION is enabled, anyone can create a new realm,
# with a few restrictions on their email address. # with a few restrictions on their email address.

View File

@ -133,11 +133,10 @@ def get_topic_based_on_event(payload: WildValue, event: str) -> str:
def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]: def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]:
event_name = payload["eventType"].tame(check_string) event_name = payload["eventType"].tame(check_string)
if event_name == "git.push": if event_name == "git.push" and branches is not None:
if branches is not None: branch = get_code_push_branch_name(payload)
branch = get_code_push_branch_name(payload) if branches.find(branch) == -1:
if branches.find(branch) == -1: return None
return None
if event_name == "git.pullrequest.merged": if event_name == "git.pullrequest.merged":
status = payload["resource"]["status"].tame(check_string) status = payload["resource"]["status"].tame(check_string)
merge_status = payload["resource"]["mergeStatus"].tame(check_string) merge_status = payload["resource"]["mergeStatus"].tame(check_string)

View File

@ -90,9 +90,8 @@ def api_bitbucket2_webhook(
if not payload["push"]["changes"]: if not payload["push"]["changes"]:
return json_success(request) return json_success(request)
branch = get_branch_name_for_push_event(payload) branch = get_branch_name_for_push_event(payload)
if branch and branches: if branch and branches and branches.find(branch) == -1:
if branches.find(branch) == -1: return json_success(request)
return json_success(request)
subjects = get_push_subjects(payload) subjects = get_push_subjects(payload)
bodies = get_push_bodies(payload) bodies = get_push_bodies(payload)

View File

@ -203,9 +203,8 @@ def repo_push_handler(
event_target_type = change["ref"]["type"].tame(check_string) event_target_type = change["ref"]["type"].tame(check_string)
if event_target_type == "BRANCH": if event_target_type == "BRANCH":
branch = change["ref"]["displayId"].tame(check_string) branch = change["ref"]["displayId"].tame(check_string)
if branches: if branches and branch not in branches:
if branch not in branches: continue
continue
data.append(repo_push_branch_data(payload, change)) data.append(repo_push_branch_data(payload, change))
elif event_target_type == "TAG": elif event_target_type == "TAG":
data.append(repo_push_tag_data(payload, change)) data.append(repo_push_tag_data(payload, change))

View File

@ -508,11 +508,10 @@ def get_event(request: HttpRequest, payload: WildValue, branches: Optional[str])
elif event in ["Confidential Note Hook", "Note Hook"]: elif event in ["Confidential Note Hook", "Note Hook"]:
action = payload["object_attributes"]["noteable_type"].tame(check_string) action = payload["object_attributes"]["noteable_type"].tame(check_string)
event = f"{event} {action}" event = f"{event} {action}"
elif event == "Push Hook": elif event == "Push Hook" and branches is not None:
if branches is not None: branch = get_branch_name(payload)
branch = get_branch_name(payload) if branches.find(branch) == -1:
if branches.find(branch) == -1: return None
return None
if event in list(EVENT_FUNCTION_MAPPER.keys()): if event in list(EVENT_FUNCTION_MAPPER.keys()):
return event return event

View File

@ -48,11 +48,10 @@ def get_push_branch_name(payload: WildValue) -> str:
def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]: def get_event_name(payload: WildValue, branches: Optional[str]) -> Optional[str]:
event_name = payload["event"]["name"].tame(check_string) event_name = payload["event"]["name"].tame(check_string)
if event_name == "repo-push": if event_name == "repo-push" and branches is not None:
if branches is not None: branch = get_push_branch_name(payload)
branch = get_push_branch_name(payload) if branches.find(branch) == -1:
if branches.find(branch) == -1: return None
return None
if event_name in EVENT_FUNCTION_MAPPER: if event_name in EVENT_FUNCTION_MAPPER:
return event_name return event_name
raise UnsupportedWebhookEventTypeError(event_name) raise UnsupportedWebhookEventTypeError(event_name)

View File

@ -178,9 +178,8 @@ def saml_auth_enabled(realm: Optional[Realm] = None) -> bool:
def require_email_format_usernames(realm: Optional[Realm] = None) -> bool: def require_email_format_usernames(realm: Optional[Realm] = None) -> bool:
if ldap_auth_enabled(realm): if ldap_auth_enabled(realm) and (settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN):
if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN: return False
return False
return True return True
@ -1164,9 +1163,8 @@ def query_ldap(email: str) -> List[str]:
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items(): for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
value = ldap_attrs.get(ldap_field, ["LDAP field not present"])[0] value = ldap_attrs.get(ldap_field, ["LDAP field not present"])[0]
if django_field == "avatar": if django_field == "avatar" and isinstance(value, bytes):
if isinstance(value, bytes): value = "(An avatar image file)"
value = "(An avatar image file)"
values.append(f"{django_field}: {value}") values.append(f"{django_field}: {value}")
if settings.LDAP_EMAIL_ATTR is not None: if settings.LDAP_EMAIL_ATTR is not None:
values.append("{}: {}".format("email", ldap_attrs[settings.LDAP_EMAIL_ATTR][0])) values.append("{}: {}".format("email", ldap_attrs[settings.LDAP_EMAIL_ATTR][0]))

View File

@ -305,12 +305,14 @@ elif REMOTE_POSTGRES_HOST != "":
DATABASES["default"]["OPTIONS"]["sslmode"] = REMOTE_POSTGRES_SSLMODE DATABASES["default"]["OPTIONS"]["sslmode"] = REMOTE_POSTGRES_SSLMODE
else: else:
DATABASES["default"]["OPTIONS"]["sslmode"] = "verify-full" DATABASES["default"]["OPTIONS"]["sslmode"] = "verify-full"
elif get_config("postgresql", "database_user", "zulip") != "zulip": elif (
if get_secret("postgres_password") is not None: get_config("postgresql", "database_user", "zulip") != "zulip"
DATABASES["default"].update( and get_secret("postgres_password") is not None
PASSWORD=get_secret("postgres_password"), ):
HOST="localhost", DATABASES["default"].update(
) PASSWORD=get_secret("postgres_password"),
HOST="localhost",
)
POSTGRESQL_MISSING_DICTIONARIES = bool(get_config("postgresql", "missing_dictionaries", None)) POSTGRESQL_MISSING_DICTIONARIES = bool(get_config("postgresql", "missing_dictionaries", None))
DEFAULT_AUTO_FIELD = "django.db.models.AutoField" DEFAULT_AUTO_FIELD = "django.db.models.AutoField"