2016-07-04 23:06:44 +02:00
|
|
|
# Webhooks for external integrations.
|
|
|
|
import re
|
2019-04-19 22:02:41 +02:00
|
|
|
import string
|
2024-07-12 02:30:23 +02:00
|
|
|
from typing import Protocol
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2016-07-04 23:06:44 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
from zerver.decorator import log_unsupported_webhook_event, webhook_view
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
|
2024-04-29 23:20:36 +02:00
|
|
|
from zerver.lib.partial import partial
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-08-12 09:34:31 +02:00
|
|
|
from zerver.lib.validator import WildValue, check_bool, check_int, check_string
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
2023-08-12 09:34:31 +02:00
|
|
|
OptionalUserSpecifiedTopicStr,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_send_webhook_message,
|
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
TOPIC_WITH_BRANCH_TEMPLATE,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_force_push_commits_event_message,
|
|
|
|
get_issue_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_commits_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_remove_branch_event_message,
|
2022-11-07 21:24:35 +01:00
|
|
|
get_short_sha,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
BITBUCKET_TOPIC_TEMPLATE = "{repository_name}"
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2020-09-23 18:57:33 +02:00
|
|
|
BITBUCKET_FORK_BODY = "{actor} forked the repository into [{fork_name}]({fork_url})."
|
2021-02-12 08:19:30 +01:00
|
|
|
BITBUCKET_COMMIT_STATUS_CHANGED_BODY = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"[System {key}]({system_url}) changed status of {commit_info} to {status}."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
BITBUCKET_REPO_UPDATED_CHANGED = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"{actor} changed the {change} of the **{repo_name}** repo from **{old}** to **{new}**"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
BITBUCKET_REPO_UPDATED_ADDED = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"{actor} changed the {change} of the **{repo_name}** repo to **{new}**"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
|
|
|
PULL_REQUEST_SUPPORTED_ACTIONS = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"approved",
|
|
|
|
"unapproved",
|
|
|
|
"created",
|
|
|
|
"updated",
|
|
|
|
"rejected",
|
|
|
|
"fulfilled",
|
|
|
|
"comment_created",
|
|
|
|
"comment_updated",
|
|
|
|
"comment_deleted",
|
2016-07-04 23:06:44 +02:00
|
|
|
]
|
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = [
|
|
|
|
"change_commit_status",
|
|
|
|
"pull_request_comment_created",
|
|
|
|
"pull_request_updated",
|
|
|
|
"pull_request_unapproved",
|
|
|
|
"push",
|
|
|
|
"pull_request_approved",
|
|
|
|
"pull_request_fulfilled",
|
|
|
|
"issue_created",
|
|
|
|
"issue_commented",
|
|
|
|
"fork",
|
|
|
|
"pull_request_comment_updated",
|
|
|
|
"pull_request_created",
|
|
|
|
"pull_request_rejected",
|
|
|
|
"repo:updated",
|
|
|
|
"issue_updated",
|
|
|
|
"commit_comment",
|
|
|
|
"pull_request_comment_deleted",
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
@webhook_view("Bitbucket2", all_event_types=ALL_EVENT_TYPES)
|
2023-08-12 09:34:31 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_bitbucket2_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-12 09:34:31 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
payload: JsonBodyPayload[WildValue],
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None = None,
|
2023-08-12 09:34:31 +02:00
|
|
|
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-08-24 17:31:04 +02:00
|
|
|
type = get_type(request, payload)
|
2021-02-12 08:20:45 +01:00
|
|
|
if type == "push":
|
2017-11-27 01:03:52 +01:00
|
|
|
# ignore push events with no changes
|
2021-02-12 08:20:45 +01:00
|
|
|
if not payload["push"]["changes"]:
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-08-24 17:31:04 +02:00
|
|
|
branch = get_branch_name_for_push_event(payload)
|
2023-01-18 02:59:37 +01:00
|
|
|
if branch and branches and branches.find(branch) == -1:
|
|
|
|
return json_success(request)
|
2018-07-25 00:57:45 +02:00
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_names = get_push_topics(payload)
|
2023-07-10 22:17:30 +02:00
|
|
|
bodies = get_push_bodies(request, payload)
|
2021-12-15 04:03:10 +01:00
|
|
|
|
2024-07-12 02:30:30 +02:00
|
|
|
for b, t in zip(bodies, topic_names, strict=False):
|
2021-12-15 04:03:10 +01:00
|
|
|
check_send_webhook_message(
|
2023-07-12 13:37:08 +02:00
|
|
|
request, user_profile, t, b, type, unquote_url_parameters=True
|
2021-12-15 04:03:10 +01:00
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
else:
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = get_topic_based_on_type(payload, type)
|
2021-12-15 04:03:10 +01:00
|
|
|
body_function = get_body_based_on_type(type)
|
2021-12-15 04:14:45 +01:00
|
|
|
body = body_function(
|
2023-07-10 22:17:30 +02:00
|
|
|
request,
|
2021-12-15 04:14:45 +01:00
|
|
|
payload,
|
|
|
|
include_title=user_specified_topic is not None,
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
check_send_webhook_message(
|
2024-01-17 15:53:30 +01:00
|
|
|
request, user_profile, topic_name, body, type, unquote_url_parameters=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-03-13 23:43:02 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-12 13:37:08 +02:00
|
|
|
def get_topic_for_branch_specified_events(
|
2024-07-12 02:30:23 +02:00
|
|
|
payload: WildValue, branch_name: str | None = None
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_BRANCH_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
branch=get_branch_name_for_push_event(payload) if branch_name is None else branch_name,
|
2016-10-05 19:48:15 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_push_topics(payload: WildValue) -> list[str]:
|
2023-07-12 13:37:08 +02:00
|
|
|
topics_list = []
|
2021-02-12 08:20:45 +01:00
|
|
|
for change in payload["push"]["changes"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2023-07-12 13:37:08 +02:00
|
|
|
topics_list.append(get_topic(payload))
|
2016-11-09 16:05:45 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
if change.get("new"):
|
2021-12-17 07:03:22 +01:00
|
|
|
branch_name = change["new"]["name"].tame(check_string)
|
2016-11-11 13:32:41 +01:00
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
branch_name = change["old"]["name"].tame(check_string)
|
2023-07-12 13:37:08 +02:00
|
|
|
topics_list.append(get_topic_for_branch_specified_events(payload, branch_name))
|
|
|
|
return topics_list
|
2016-11-09 16:05:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-12 13:37:08 +02:00
|
|
|
def get_topic(payload: WildValue) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
return BITBUCKET_TOPIC_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repository_name=get_repository_name(payload["repository"])
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2023-07-12 13:37:08 +02:00
|
|
|
def get_topic_based_on_type(payload: WildValue, type: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if type.startswith("pull_request"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
|
|
|
type="PR",
|
2021-12-17 07:03:22 +01:00
|
|
|
id=payload["pullrequest"]["id"].tame(check_int),
|
|
|
|
title=payload["pullrequest"]["title"].tame(check_string),
|
2016-10-11 18:55:39 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
if type.startswith("issue"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2021-12-17 07:03:22 +01:00
|
|
|
id=payload["issue"]["id"].tame(check_int),
|
|
|
|
title=payload["issue"]["title"].tame(check_string),
|
2016-10-19 23:44:02 +02:00
|
|
|
)
|
2021-12-15 04:03:10 +01:00
|
|
|
assert type != "push"
|
2023-07-12 13:37:08 +02:00
|
|
|
return get_topic(payload)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_type(request: HttpRequest, payload: WildValue) -> str:
|
|
|
|
if "push" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "push"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "fork" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "fork"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "comment" in payload and "commit" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "commit_comment"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "commit_status" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "change_commit_status"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "issue" in payload:
|
|
|
|
if "changes" in payload:
|
2016-07-04 23:06:44 +02:00
|
|
|
return "issue_updated"
|
2021-12-17 07:03:22 +01:00
|
|
|
if "comment" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "issue_commented"
|
2016-07-04 23:06:44 +02:00
|
|
|
return "issue_created"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "pullrequest" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request_template = "pull_request_{}"
|
2018-04-24 20:54:13 +02:00
|
|
|
# Note that we only need the HTTP header to determine pullrequest events.
|
|
|
|
# We rely on the payload itself to determine the other ones.
|
2022-05-12 06:54:12 +02:00
|
|
|
event_key = validate_extract_webhook_http_header(request, "X-Event-Key", "BitBucket")
|
2024-04-26 20:30:22 +02:00
|
|
|
action = re.match(r"pullrequest:(?P<action>.*)$", event_key)
|
2016-07-04 23:06:44 +02:00
|
|
|
if action:
|
2021-02-12 08:20:45 +01:00
|
|
|
action_group = action.group("action")
|
2018-04-24 20:54:13 +02:00
|
|
|
if action_group in PULL_REQUEST_SUPPORTED_ACTIONS:
|
|
|
|
return pull_request_template.format(action_group)
|
|
|
|
else:
|
2022-05-12 06:54:12 +02:00
|
|
|
event_key = validate_extract_webhook_http_header(request, "X-Event-Key", "BitBucket")
|
2021-02-12 08:20:45 +01:00
|
|
|
if event_key == "repo:updated":
|
2018-04-24 20:54:13 +02:00
|
|
|
return event_key
|
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(event_key)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 04:14:45 +01:00
|
|
|
class BodyGetter(Protocol):
|
2024-01-29 00:32:21 +01:00
|
|
|
def __call__(self, request: HttpRequest, payload: WildValue, include_title: bool) -> str: ...
|
2021-12-15 04:14:45 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_body_based_on_type(
|
|
|
|
type: str,
|
|
|
|
) -> BodyGetter:
|
2021-12-15 04:03:10 +01:00
|
|
|
return GET_SINGLE_MESSAGE_BODY_DEPENDING_ON_TYPE_MAPPER[type]
|
2016-11-09 16:05:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_push_bodies(request: HttpRequest, payload: WildValue) -> list[str]:
|
2016-11-09 16:05:45 +01:00
|
|
|
messages_list = []
|
2021-02-12 08:20:45 +01:00
|
|
|
for change in payload["push"]["changes"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2023-07-10 22:17:30 +02:00
|
|
|
messages_list.append(get_push_tag_body(request, payload, change))
|
2018-04-02 23:57:52 +02:00
|
|
|
# if change['new'] is None, that means a branch was deleted
|
2021-12-17 07:03:22 +01:00
|
|
|
elif change["new"].value is None:
|
2023-07-10 22:17:30 +02:00
|
|
|
messages_list.append(get_remove_branch_push_body(request, payload, change))
|
2021-12-17 07:03:22 +01:00
|
|
|
elif change["forced"].tame(check_bool):
|
2023-07-10 22:17:30 +02:00
|
|
|
messages_list.append(get_force_push_body(request, payload, change))
|
2016-11-09 16:05:45 +01:00
|
|
|
else:
|
2023-07-10 22:17:30 +02:00
|
|
|
messages_list.append(get_normal_push_body(request, payload, change))
|
2016-11-09 16:05:45 +01:00
|
|
|
return messages_list
|
2016-10-06 15:32:10 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_remove_branch_push_body(request: HttpRequest, payload: WildValue, change: WildValue) -> str:
|
2016-10-06 16:14:51 +02:00
|
|
|
return get_remove_branch_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
get_actor_info(request, payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["old"]["name"].tame(check_string),
|
2016-10-06 16:14:51 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_force_push_body(request: HttpRequest, payload: WildValue, change: WildValue) -> str:
|
2016-10-06 15:32:10 +02:00
|
|
|
return get_force_push_commits_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
get_actor_info(request, payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["links"]["html"]["href"].tame(check_string),
|
|
|
|
change["new"]["name"].tame(check_string),
|
|
|
|
change["new"]["target"]["hash"].tame(check_string),
|
2016-10-06 15:32:10 +02:00
|
|
|
)
|
2016-10-02 09:44:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_commit_author_name(request: HttpRequest, commit: WildValue) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
if "user" in commit["author"]:
|
2023-07-10 22:17:30 +02:00
|
|
|
return get_user_info(request, commit["author"]["user"])
|
2021-12-17 07:03:22 +01:00
|
|
|
return commit["author"]["raw"].tame(check_string).split()[0]
|
2017-04-26 02:57:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_normal_push_body(request: HttpRequest, payload: WildValue, change: WildValue) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
commits_data = [
|
|
|
|
{
|
2023-07-10 22:17:30 +02:00
|
|
|
"name": get_commit_author_name(request, commit),
|
2021-12-17 07:03:22 +01:00
|
|
|
"sha": commit["hash"].tame(check_string),
|
|
|
|
"url": commit["links"]["html"]["href"].tame(check_string),
|
|
|
|
"message": commit["message"].tame(check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
for commit in change["commits"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2016-10-02 09:44:33 +02:00
|
|
|
|
|
|
|
return get_push_commits_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
get_actor_info(request, payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["links"]["html"]["href"].tame(check_string),
|
|
|
|
change["new"]["name"].tame(check_string),
|
2016-11-11 12:45:58 +01:00
|
|
|
commits_data,
|
2021-12-17 07:03:22 +01:00
|
|
|
is_truncated=change["truncated"].tame(check_bool),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_fork_body(request: HttpRequest, payload: WildValue, include_title: bool) -> str:
|
2016-07-04 23:06:44 +02:00
|
|
|
return BITBUCKET_FORK_BODY.format(
|
2023-07-10 22:17:30 +02:00
|
|
|
actor=get_user_info(request, payload["actor"]),
|
2021-02-12 08:20:45 +01:00
|
|
|
fork_name=get_repository_full_name(payload["fork"]),
|
|
|
|
fork_url=get_repository_url(payload["fork"]),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_commit_comment_body(request: HttpRequest, payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["comment"]
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "[commented]({})".format(comment["links"]["html"]["href"].tame(check_string))
|
2016-10-27 21:43:15 +02:00
|
|
|
return get_commits_comment_action_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
get_actor_info(request, payload),
|
2016-10-21 20:08:26 +02:00
|
|
|
action,
|
2021-12-17 07:03:22 +01:00
|
|
|
comment["commit"]["links"]["html"]["href"].tame(check_string),
|
|
|
|
comment["commit"]["hash"].tame(check_string),
|
|
|
|
comment["content"]["raw"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_commit_status_changed_body(
|
|
|
|
request: HttpRequest, payload: WildValue, include_title: bool
|
|
|
|
) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
commit_api_url = payload["commit_status"]["links"]["commit"]["href"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
commit_id = commit_api_url.split("/")[-1]
|
2018-09-22 22:38:52 +02:00
|
|
|
|
2018-09-22 22:57:45 +02:00
|
|
|
commit_info = "[{short_commit_id}]({repo_url}/commits/{commit_id})".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo_url=get_repository_url(payload["repository"]),
|
2022-11-07 21:24:35 +01:00
|
|
|
short_commit_id=get_short_sha(commit_id),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
commit_id=commit_id,
|
2018-09-22 22:38:52 +02:00
|
|
|
)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
|
|
|
return BITBUCKET_COMMIT_STATUS_CHANGED_BODY.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
key=payload["commit_status"]["key"].tame(check_string),
|
|
|
|
system_url=payload["commit_status"]["url"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
commit_info=commit_info,
|
2021-12-17 07:03:22 +01:00
|
|
|
status=payload["commit_status"]["state"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_issue_commented_body(request: HttpRequest, payload: WildValue, include_title: bool) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "[commented]({}) on".format(
|
|
|
|
payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2024-04-29 23:52:26 +02:00
|
|
|
return get_issue_action_body(action, request, payload, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_issue_action_body(
|
2024-04-29 23:52:26 +02:00
|
|
|
action: str, request: HttpRequest, payload: WildValue, include_title: bool
|
2023-07-10 22:17:30 +02:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
issue = payload["issue"]
|
2016-10-19 23:44:02 +02:00
|
|
|
assignee = None
|
|
|
|
message = None
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "created":
|
|
|
|
if issue["assignee"]:
|
2023-07-10 22:17:30 +02:00
|
|
|
assignee = get_user_info(request, issue["assignee"])
|
2021-12-17 07:03:22 +01:00
|
|
|
message = issue["content"]["raw"].tame(check_string)
|
2016-10-19 23:44:02 +02:00
|
|
|
|
|
|
|
return get_issue_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
user_name=get_actor_info(request, payload),
|
2023-03-16 20:36:47 +01:00
|
|
|
action=action,
|
|
|
|
url=issue["links"]["html"]["href"].tame(check_string),
|
|
|
|
number=issue["id"].tame(check_int),
|
|
|
|
message=message,
|
|
|
|
assignee=assignee,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=issue["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_pull_request_action_body(
|
2024-04-29 23:52:26 +02:00
|
|
|
action: str, request: HttpRequest, payload: WildValue, include_title: bool
|
2023-07-10 22:17:30 +02:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pullrequest"]
|
2023-03-24 23:22:48 +01:00
|
|
|
target_branch = None
|
|
|
|
base_branch = None
|
|
|
|
if action == "merged":
|
|
|
|
target_branch = pull_request["source"]["branch"]["name"].tame(check_string)
|
|
|
|
base_branch = pull_request["destination"]["branch"]["name"].tame(check_string)
|
|
|
|
|
2016-10-11 18:55:39 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
user_name=get_actor_info(request, payload),
|
2023-03-16 00:52:45 +01:00
|
|
|
action=action,
|
|
|
|
url=get_pull_request_url(pull_request),
|
|
|
|
number=pull_request["id"].tame(check_int),
|
2023-03-24 23:22:48 +01:00
|
|
|
target_branch=target_branch,
|
|
|
|
base_branch=base_branch,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-10-11 18:55:39 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_pull_request_created_or_updated_body(
|
2024-04-29 23:52:26 +02:00
|
|
|
action: str, request: HttpRequest, payload: WildValue, include_title: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pullrequest"]
|
2016-10-11 18:55:39 +02:00
|
|
|
assignee = None
|
2021-12-17 07:03:22 +01:00
|
|
|
if pull_request["reviewers"]:
|
2023-07-10 22:17:30 +02:00
|
|
|
assignee = get_user_info(request, pull_request["reviewers"][0])
|
2016-10-11 18:55:39 +02:00
|
|
|
|
|
|
|
return get_pull_request_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
user_name=get_actor_info(request, payload),
|
2023-03-16 00:52:45 +01:00
|
|
|
action=action,
|
|
|
|
url=get_pull_request_url(pull_request),
|
|
|
|
number=pull_request["id"].tame(check_int),
|
2024-01-29 00:32:21 +01:00
|
|
|
target_branch=(
|
|
|
|
pull_request["source"]["branch"]["name"].tame(check_string)
|
|
|
|
if action == "created"
|
|
|
|
else None
|
|
|
|
),
|
|
|
|
base_branch=(
|
|
|
|
pull_request["destination"]["branch"]["name"].tame(check_string)
|
|
|
|
if action == "created"
|
|
|
|
else None
|
|
|
|
),
|
2021-12-17 07:03:22 +01:00
|
|
|
message=pull_request["description"].tame(check_string),
|
2018-07-25 00:57:45 +02:00
|
|
|
assignee=assignee,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_comment_created_action_body(
|
2023-07-10 22:17:30 +02:00
|
|
|
request: HttpRequest,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "[commented]({})".format(
|
|
|
|
payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2023-07-10 22:17:30 +02:00
|
|
|
return get_pull_request_comment_action_body(request, payload, action, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_deleted_or_updated_comment_action_body(
|
2024-04-29 23:52:26 +02:00
|
|
|
action: str,
|
2023-07-10 22:17:30 +02:00
|
|
|
request: HttpRequest,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "{} a [comment]({})".format(
|
|
|
|
action, payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2023-07-10 22:17:30 +02:00
|
|
|
return get_pull_request_comment_action_body(request, payload, action, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_comment_action_body(
|
2023-07-10 22:17:30 +02:00
|
|
|
request: HttpRequest,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-02-12 08:19:30 +01:00
|
|
|
action: str,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
action += " on"
|
2016-10-21 20:08:26 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
user_name=get_actor_info(request, payload),
|
2023-03-16 00:52:45 +01:00
|
|
|
action=action,
|
|
|
|
url=payload["pullrequest"]["links"]["html"]["href"].tame(check_string),
|
|
|
|
number=payload["pullrequest"]["id"].tame(check_int),
|
2021-12-17 07:03:22 +01:00
|
|
|
message=payload["comment"]["content"]["raw"].tame(check_string),
|
|
|
|
title=payload["pullrequest"]["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_push_tag_body(request: HttpRequest, payload: WildValue, change: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if change.get("new"):
|
|
|
|
tag = change["new"]
|
|
|
|
action = "pushed"
|
|
|
|
elif change.get("old"):
|
|
|
|
tag = change["old"]
|
|
|
|
action = "removed"
|
2018-09-22 22:38:52 +02:00
|
|
|
|
2016-11-09 16:05:45 +01:00
|
|
|
return get_push_tag_event_message(
|
2023-07-10 22:17:30 +02:00
|
|
|
get_actor_info(request, payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
tag["name"].tame(check_string),
|
|
|
|
tag_url=tag["links"]["html"]["href"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
action=action,
|
2016-11-09 16:05:45 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-19 22:02:41 +02:00
|
|
|
def append_punctuation(title: str, message: str) -> str:
|
|
|
|
if title[-1] not in string.punctuation:
|
2020-06-09 00:25:09 +02:00
|
|
|
message = f"{message}."
|
2019-04-19 22:02:41 +02:00
|
|
|
|
|
|
|
return message
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_repo_updated_body(request: HttpRequest, payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
changes = ["website", "name", "links", "language", "full_name", "description"]
|
2018-04-25 01:36:03 +02:00
|
|
|
body = ""
|
2021-12-17 07:03:22 +01:00
|
|
|
repo_name = payload["repository"]["name"].tame(check_string)
|
2023-07-10 22:17:30 +02:00
|
|
|
actor = get_actor_info(request, payload)
|
2018-04-25 01:36:03 +02:00
|
|
|
|
|
|
|
for change in changes:
|
2021-02-12 08:20:45 +01:00
|
|
|
new = payload["changes"][change]["new"]
|
|
|
|
old = payload["changes"][change]["old"]
|
|
|
|
if change == "full_name":
|
|
|
|
change = "full name"
|
2018-04-25 01:36:03 +02:00
|
|
|
if new and old:
|
|
|
|
message = BITBUCKET_REPO_UPDATED_CHANGED.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
actor=actor,
|
|
|
|
change=change,
|
|
|
|
repo_name=repo_name,
|
2021-12-17 07:03:22 +01:00
|
|
|
old=str(old.value),
|
|
|
|
new=str(new.value),
|
2018-04-25 01:36:03 +02:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
message = append_punctuation(str(new.value), message) + "\n"
|
2018-04-25 01:36:03 +02:00
|
|
|
body += message
|
|
|
|
elif new and not old:
|
|
|
|
message = BITBUCKET_REPO_UPDATED_ADDED.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
actor=actor,
|
|
|
|
change=change,
|
|
|
|
repo_name=repo_name,
|
2021-12-17 07:03:22 +01:00
|
|
|
new=str(new.value),
|
2018-04-25 01:36:03 +02:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
message = append_punctuation(str(new.value), message) + "\n"
|
2018-04-25 01:36:03 +02:00
|
|
|
body += message
|
|
|
|
|
|
|
|
return body
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_pull_request_url(pullrequest_payload: WildValue) -> str:
|
|
|
|
return pullrequest_payload["links"]["html"]["href"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_url(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["links"]["html"]["href"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_name(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["name"].tame(check_string)
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_full_name(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["full_name"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_user_info(request: HttpRequest, dct: WildValue) -> str:
|
2020-09-23 18:57:33 +02:00
|
|
|
# See https://developer.atlassian.com/cloud/bitbucket/bitbucket-api-changes-gdpr/
|
|
|
|
# Since GDPR, we don't get username; instead, we either get display_name
|
|
|
|
# or nickname.
|
|
|
|
if "display_name" in dct:
|
2021-12-17 07:03:22 +01:00
|
|
|
return dct["display_name"].tame(check_string)
|
2020-09-23 18:57:33 +02:00
|
|
|
|
|
|
|
if "nickname" in dct:
|
2021-12-17 07:03:22 +01:00
|
|
|
return dct["nickname"].tame(check_string)
|
2020-09-23 18:57:33 +02:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
# We call this an unsupported_event, even though we
|
|
|
|
# are technically still sending a message.
|
|
|
|
log_unsupported_webhook_event(
|
2023-07-10 22:17:30 +02:00
|
|
|
request=request,
|
2020-09-23 18:57:33 +02:00
|
|
|
summary="Could not find display_name/nickname field",
|
|
|
|
)
|
|
|
|
|
|
|
|
return "Unknown user"
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-10 22:17:30 +02:00
|
|
|
def get_actor_info(request: HttpRequest, payload: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
actor = payload["actor"]
|
2023-07-10 22:17:30 +02:00
|
|
|
return get_user_info(request, actor)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_branch_name_for_push_event(payload: WildValue) -> str | None:
|
2021-02-12 08:20:45 +01:00
|
|
|
change = payload["push"]["changes"][-1]
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2017-04-05 02:52:31 +02:00
|
|
|
return None
|
2016-10-06 16:14:51 +02:00
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
return (change["new"] or change["old"])["name"].tame(check_string)
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
GET_SINGLE_MESSAGE_BODY_DEPENDING_ON_TYPE_MAPPER: dict[str, BodyGetter] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"fork": get_fork_body,
|
|
|
|
"commit_comment": get_commit_comment_body,
|
|
|
|
"change_commit_status": get_commit_status_changed_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"issue_updated": partial(get_issue_action_body, "updated"),
|
|
|
|
"issue_created": partial(get_issue_action_body, "created"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"issue_commented": get_issue_commented_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"pull_request_created": partial(get_pull_request_created_or_updated_body, "created"),
|
|
|
|
"pull_request_updated": partial(get_pull_request_created_or_updated_body, "updated"),
|
|
|
|
"pull_request_approved": partial(get_pull_request_action_body, "approved"),
|
|
|
|
"pull_request_unapproved": partial(get_pull_request_action_body, "unapproved"),
|
|
|
|
"pull_request_fulfilled": partial(get_pull_request_action_body, "merged"),
|
|
|
|
"pull_request_rejected": partial(get_pull_request_action_body, "rejected"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"pull_request_comment_created": get_pull_request_comment_created_action_body,
|
|
|
|
"pull_request_comment_updated": partial(
|
2024-04-29 23:52:26 +02:00
|
|
|
get_pull_request_deleted_or_updated_comment_action_body, "updated"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"pull_request_comment_deleted": partial(
|
2024-04-29 23:52:26 +02:00
|
|
|
get_pull_request_deleted_or_updated_comment_action_body, "deleted"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"repo:updated": get_repo_updated_body,
|
2016-07-04 23:06:44 +02:00
|
|
|
}
|