2019-05-07 02:42:30 +02:00
|
|
|
import string
|
2024-07-12 02:30:23 +02:00
|
|
|
from typing import Protocol
|
2019-03-03 17:44:33 +01:00
|
|
|
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
|
2024-04-29 23:20:36 +02:00
|
|
|
from zerver.lib.partial import partial
|
2019-03-03 17:44:33 +01:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-08-12 09:34:31 +02:00
|
|
|
from zerver.lib.validator import WildValue, check_int, check_none_or, check_string
|
2022-05-12 06:54:12 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
2023-08-12 09:34:31 +02:00
|
|
|
OptionalUserSpecifiedTopicStr,
|
2022-05-12 06:54:12 +02:00
|
|
|
check_send_webhook_message,
|
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
TOPIC_WITH_BRANCH_TEMPLATE,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_create_branch_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_remove_branch_event_message,
|
|
|
|
)
|
2020-01-14 22:06:24 +01:00
|
|
|
from zerver.models import UserProfile
|
2020-09-18 23:22:25 +02:00
|
|
|
from zerver.webhooks.bitbucket2.view import BITBUCKET_REPO_UPDATED_CHANGED, BITBUCKET_TOPIC_TEMPLATE
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
BITBUCKET_FORK_BODY = (
|
|
|
|
"User {display_name}(login: {username}) forked the repository into [{fork_name}]({fork_url})."
|
|
|
|
)
|
2019-03-25 05:13:47 +01:00
|
|
|
BRANCH_UPDATED_MESSAGE_TEMPLATE = "{user_name} pushed to branch {branch_name}. Head is now {head}."
|
2021-02-12 08:19:30 +01:00
|
|
|
PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE = (
|
2021-02-12 08:20:45 +01:00
|
|
|
'{user_name} marked [PR #{number}]({url}) as "needs work".'
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-05-07 02:42:30 +02:00
|
|
|
PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE_WITH_TITLE = """
|
|
|
|
{user_name} marked [PR #{number} {title}]({url}) as \"needs work\".
|
|
|
|
""".strip()
|
|
|
|
PULL_REQUEST_REASSIGNED_TEMPLATE = "{user_name} reassigned [PR #{number}]({url}) to {assignees}."
|
|
|
|
PULL_REQUEST_REASSIGNED_TEMPLATE_WITH_TITLE = """
|
|
|
|
{user_name} reassigned [PR #{number} {title}]({url}) to {assignees}.
|
|
|
|
""".strip()
|
2021-02-12 08:19:30 +01:00
|
|
|
PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE = (
|
|
|
|
"{user_name} removed all reviewers from [PR #{number}]({url})."
|
|
|
|
)
|
2019-05-07 02:42:30 +02:00
|
|
|
PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE_WITH_TITLE = """
|
|
|
|
{user_name} removed all reviewers from [PR #{number} {title}]({url})
|
|
|
|
""".strip()
|
|
|
|
PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS = """
|
|
|
|
{user_name} {action} [PR #{number}]({url}) from `{source}` to \
|
|
|
|
`{destination}` (assigned to {assignees} for review)
|
|
|
|
""".strip()
|
|
|
|
PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS_WITH_TITLE = """
|
|
|
|
{user_name} {action} [PR #{number} {title}]({url}) from `{source}` to \
|
|
|
|
`{destination}` (assigned to {assignees} for review)
|
|
|
|
""".strip()
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def fixture_to_headers(fixture_name: str) -> dict[str, str]:
|
2019-07-24 14:46:06 +02:00
|
|
|
if fixture_name == "diagnostics_ping":
|
|
|
|
return {"HTTP_X_EVENT_KEY": "diagnostics:ping"}
|
2020-09-02 08:14:51 +02:00
|
|
|
return {}
|
2019-07-04 18:59:22 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_user_name(payload: WildValue) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_name = "[{name}]({url})".format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=payload["actor"]["name"].tame(check_string),
|
|
|
|
url=payload["actor"]["links"]["self"][0]["href"].tame(check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-03-26 15:04:09 +01:00
|
|
|
return user_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def ping_handler(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2019-06-15 10:04:28 +02:00
|
|
|
if include_title:
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = include_title
|
2019-06-15 10:04:28 +02:00
|
|
|
else:
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = "Bitbucket Server Ping"
|
2019-06-15 10:04:28 +02:00
|
|
|
body = "Congratulations! The Bitbucket Server webhook was configured successfully!"
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name, "body": body}]
|
2019-06-15 10:04:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 05:40:18 +01:00
|
|
|
def repo_comment_handler(
|
|
|
|
action: str,
|
2024-04-29 23:52:26 +02:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2021-12-17 07:03:22 +01:00
|
|
|
repo_name = payload["repository"]["name"].tame(check_string)
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name)
|
2021-12-17 07:03:22 +01:00
|
|
|
sha = payload["commit"].tame(check_string)
|
|
|
|
commit_url = payload["repository"]["links"]["self"][0]["href"].tame(check_string)[
|
|
|
|
: -len("browse")
|
|
|
|
]
|
2020-06-10 06:41:04 +02:00
|
|
|
commit_url += f"commits/{sha}"
|
2021-12-17 07:03:22 +01:00
|
|
|
message = payload["comment"]["text"].tame(check_string)
|
2019-04-01 20:33:51 +02:00
|
|
|
if action == "deleted their comment":
|
2020-06-09 00:25:09 +02:00
|
|
|
message = f"~~{message}~~"
|
2019-03-25 05:13:47 +01:00
|
|
|
body = get_commits_comment_action_message(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2019-03-25 05:13:47 +01:00
|
|
|
action=action,
|
|
|
|
commit_url=commit_url,
|
|
|
|
sha=sha,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
message=message,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name, "body": body}]
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 05:40:18 +01:00
|
|
|
def repo_forked_handler(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2021-12-17 07:03:22 +01:00
|
|
|
repo_name = payload["repository"]["origin"]["name"].tame(check_string)
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name)
|
2019-03-03 17:44:33 +01:00
|
|
|
body = BITBUCKET_FORK_BODY.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
display_name=payload["actor"]["displayName"].tame(check_string),
|
2019-03-26 15:04:09 +01:00
|
|
|
username=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
fork_name=payload["repository"]["name"].tame(check_string),
|
|
|
|
fork_url=payload["repository"]["links"]["self"][0]["href"].tame(check_string),
|
2019-03-03 17:44:33 +01:00
|
|
|
)
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name, "body": body}]
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 05:40:18 +01:00
|
|
|
def repo_modified_handler(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name_new = BITBUCKET_TOPIC_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
repository_name=payload["new"]["name"].tame(check_string)
|
|
|
|
)
|
|
|
|
new_name = payload["new"]["name"].tame(check_string)
|
2019-03-03 17:44:33 +01:00
|
|
|
body = BITBUCKET_REPO_UPDATED_CHANGED.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
actor=get_user_name(payload),
|
2019-03-03 17:44:33 +01:00
|
|
|
change="name",
|
2021-12-17 07:03:22 +01:00
|
|
|
repo_name=payload["old"]["name"].tame(check_string),
|
|
|
|
old=payload["old"]["name"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
new=new_name,
|
2019-03-03 17:44:33 +01:00
|
|
|
) # As of writing this, the only change we'd be notified about is a name change.
|
2021-02-12 08:20:45 +01:00
|
|
|
punctuation = "." if new_name[-1] not in string.punctuation else ""
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"{body}{punctuation}"
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name_new, "body": body}]
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def repo_push_branch_data(payload: WildValue, change: WildValue) -> dict[str, str]:
|
2021-12-17 07:03:22 +01:00
|
|
|
event_type = change["type"].tame(check_string)
|
|
|
|
repo_name = payload["repository"]["name"].tame(check_string)
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name = get_user_name(payload)
|
2021-12-17 07:03:22 +01:00
|
|
|
branch_name = change["ref"]["displayId"].tame(check_string)
|
|
|
|
branch_head = change["toHash"].tame(check_string)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
|
|
|
if event_type == "ADD":
|
2019-03-25 05:13:47 +01:00
|
|
|
body = get_create_branch_event_message(
|
|
|
|
user_name=user_name,
|
|
|
|
url=None,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
branch_name=branch_name,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
2019-03-03 17:44:33 +01:00
|
|
|
elif event_type == "UPDATE":
|
2019-03-25 05:13:47 +01:00
|
|
|
body = BRANCH_UPDATED_MESSAGE_TEMPLATE.format(
|
|
|
|
user_name=user_name,
|
|
|
|
branch_name=branch_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
head=branch_head,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
2019-03-03 17:44:33 +01:00
|
|
|
elif event_type == "DELETE":
|
|
|
|
body = get_remove_branch_event_message(user_name, branch_name)
|
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
message = "{}.{}".format(payload["eventKey"].tame(check_string), event_type) # nocoverage
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(message)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = TOPIC_WITH_BRANCH_TEMPLATE.format(repo=repo_name, branch=branch_name)
|
|
|
|
return {"topic": topic_name, "body": body}
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def repo_push_tag_data(payload: WildValue, change: WildValue) -> dict[str, str]:
|
2021-12-17 07:03:22 +01:00
|
|
|
event_type = change["type"].tame(check_string)
|
|
|
|
repo_name = payload["repository"]["name"].tame(check_string)
|
|
|
|
tag_name = change["ref"]["displayId"].tame(check_string)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
|
|
|
if event_type == "ADD":
|
|
|
|
action = "pushed"
|
|
|
|
elif event_type == "DELETE":
|
|
|
|
action = "removed"
|
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
message = "{}.{}".format(payload["eventKey"].tame(check_string), event_type) # nocoverage
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(message)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name)
|
2019-03-26 15:04:09 +01:00
|
|
|
body = get_push_tag_event_message(get_user_name(payload), tag_name, action=action)
|
2024-01-17 15:53:30 +01:00
|
|
|
return {"topic": topic_name, "body": body}
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def repo_push_handler(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2019-03-03 17:44:33 +01:00
|
|
|
data = []
|
|
|
|
for change in payload["changes"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
event_target_type = change["ref"]["type"].tame(check_string)
|
2019-03-03 17:44:33 +01:00
|
|
|
if event_target_type == "BRANCH":
|
2021-12-17 07:03:22 +01:00
|
|
|
branch = change["ref"]["displayId"].tame(check_string)
|
2023-01-18 02:59:37 +01:00
|
|
|
if branches and branch not in branches:
|
|
|
|
continue
|
2019-03-03 17:44:33 +01:00
|
|
|
data.append(repo_push_branch_data(payload, change))
|
|
|
|
elif event_target_type == "TAG":
|
|
|
|
data.append(repo_push_tag_data(payload, change))
|
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
message = "{}.{}".format(
|
|
|
|
payload["eventKey"].tame(check_string), event_target_type
|
|
|
|
) # nocoverage
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(message)
|
2019-03-03 17:44:33 +01:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_assignees_string(pr: WildValue) -> str | None:
|
2019-03-25 05:13:47 +01:00
|
|
|
reviewers = []
|
|
|
|
for reviewer in pr["reviewers"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
name = reviewer["user"]["name"].tame(check_string)
|
|
|
|
link = reviewer["user"]["links"]["self"][0]["href"].tame(check_string)
|
2020-06-10 06:41:04 +02:00
|
|
|
reviewers.append(f"[{name}]({link})")
|
2019-03-25 05:13:47 +01:00
|
|
|
if len(reviewers) == 0:
|
|
|
|
assignees = None
|
|
|
|
elif len(reviewers) == 1:
|
|
|
|
assignees = reviewers[0]
|
|
|
|
else:
|
|
|
|
assignees = ", ".join(reviewers[:-1]) + " and " + reviewers[-1]
|
|
|
|
return assignees
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-12 13:37:08 +02:00
|
|
|
def get_pr_topic(repo: str, type: str, id: int, title: str) -> str:
|
2019-03-25 05:13:47 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(repo=repo, type=type, id=id, title=title)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_simple_pr_body(payload: WildValue, action: str, include_title: str | None) -> str:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
|
|
|
return get_pull_request_event_message(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2019-03-25 05:13:47 +01:00
|
|
|
action=action,
|
2021-12-17 07:03:22 +01:00
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
title=pr["title"].tame(check_string) if include_title else None,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_pr_opened_or_modified_body(
|
2024-07-12 02:30:23 +02:00
|
|
|
payload: WildValue, action: str, include_title: str | None
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
2021-12-17 07:03:22 +01:00
|
|
|
description = pr.get("description").tame(check_none_or(check_string))
|
2023-03-29 08:27:45 +02:00
|
|
|
target_branch = None
|
|
|
|
base_branch = None
|
|
|
|
if action == "opened":
|
|
|
|
target_branch = pr["fromRef"]["displayId"].tame(check_string)
|
|
|
|
base_branch = pr["toRef"]["displayId"].tame(check_string)
|
2023-03-29 08:05:23 +02:00
|
|
|
reviewers_string = get_assignees_string(pr)
|
|
|
|
|
2019-03-25 05:13:47 +01:00
|
|
|
return get_pull_request_event_message(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2019-03-25 05:13:47 +01:00
|
|
|
action=action,
|
2021-12-17 07:03:22 +01:00
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
number=pr["id"].tame(check_int),
|
2023-03-29 08:27:45 +02:00
|
|
|
target_branch=target_branch,
|
|
|
|
base_branch=base_branch,
|
2021-12-17 07:03:22 +01:00
|
|
|
message=description,
|
2023-03-29 08:05:23 +02:00
|
|
|
reviewer=reviewers_string if reviewers_string else None,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pr["title"].tame(check_string) if include_title else None,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_pr_merged_body(payload: WildValue, action: str, include_title: str | None) -> str:
|
2023-03-29 08:27:45 +02:00
|
|
|
pr = payload["pullRequest"]
|
|
|
|
return get_pull_request_event_message(
|
|
|
|
user_name=get_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
target_branch=pr["fromRef"]["displayId"].tame(check_string),
|
|
|
|
base_branch=pr["toRef"]["displayId"].tame(check_string),
|
|
|
|
title=pr["title"].tame(check_string) if include_title else None,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_pr_needs_work_body(payload: WildValue, include_title: str | None) -> str:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
|
|
|
if not include_title:
|
|
|
|
return PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
return PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE_WITH_TITLE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
title=pr["title"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_pr_reassigned_body(payload: WildValue, include_title: str | None) -> str:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
|
|
|
assignees_string = get_assignees_string(pr)
|
|
|
|
if not assignees_string:
|
|
|
|
if not include_title:
|
|
|
|
return PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
punctuation = "." if pr["title"].tame(check_string)[-1] not in string.punctuation else ""
|
2019-05-07 02:42:30 +02:00
|
|
|
message = PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE_WITH_TITLE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
title=pr["title"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
message = f"{message}{punctuation}"
|
2019-05-07 02:42:30 +02:00
|
|
|
return message
|
2019-03-25 05:13:47 +01:00
|
|
|
if not include_title:
|
|
|
|
return PULL_REQUEST_REASSIGNED_TEMPLATE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
assignees=assignees_string,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
return PULL_REQUEST_REASSIGNED_TEMPLATE_WITH_TITLE.format(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
number=pr["id"].tame(check_int),
|
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
assignees=assignees_string,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pr["title"].tame(check_string),
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def pr_handler(
|
2021-12-15 05:40:18 +01:00
|
|
|
action: str,
|
2024-04-29 23:52:26 +02:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = get_pr_topic(
|
2021-12-17 07:03:22 +01:00
|
|
|
pr["toRef"]["repository"]["name"].tame(check_string),
|
|
|
|
type="PR",
|
|
|
|
id=pr["id"].tame(check_int),
|
|
|
|
title=pr["title"].tame(check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-03-25 05:13:47 +01:00
|
|
|
if action in ["opened", "modified"]:
|
|
|
|
body = get_pr_opened_or_modified_body(payload, action, include_title)
|
2023-03-29 08:27:45 +02:00
|
|
|
elif action == "merged":
|
|
|
|
body = get_pr_merged_body(payload, action, include_title)
|
2019-03-25 05:13:47 +01:00
|
|
|
elif action == "needs_work":
|
|
|
|
body = get_pr_needs_work_body(payload, include_title)
|
|
|
|
elif action == "reviewers_updated":
|
|
|
|
body = get_pr_reassigned_body(payload, include_title)
|
|
|
|
else:
|
|
|
|
body = get_simple_pr_body(payload, action, include_title)
|
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name, "body": body}]
|
2019-03-25 05:13:47 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def pr_comment_handler(
|
2021-12-15 05:40:18 +01:00
|
|
|
action: str,
|
2024-04-29 23:52:26 +02:00
|
|
|
payload: WildValue,
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None,
|
|
|
|
include_title: str | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]:
|
2019-03-25 05:13:47 +01:00
|
|
|
pr = payload["pullRequest"]
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = get_pr_topic(
|
2021-12-17 07:03:22 +01:00
|
|
|
pr["toRef"]["repository"]["name"].tame(check_string),
|
|
|
|
type="PR",
|
|
|
|
id=pr["id"].tame(check_int),
|
|
|
|
title=pr["title"].tame(check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
message = payload["comment"]["text"].tame(check_string)
|
2019-04-01 20:33:51 +02:00
|
|
|
if action == "deleted their comment on":
|
2020-06-09 00:25:09 +02:00
|
|
|
message = f"~~{message}~~"
|
2019-03-25 05:13:47 +01:00
|
|
|
body = get_pull_request_event_message(
|
2019-03-26 15:04:09 +01:00
|
|
|
user_name=get_user_name(payload),
|
2019-03-25 05:13:47 +01:00
|
|
|
action=action,
|
2021-12-17 07:03:22 +01:00
|
|
|
url=pr["links"]["self"][0]["href"].tame(check_string),
|
|
|
|
number=pr["id"].tame(check_int),
|
2019-03-25 05:13:47 +01:00
|
|
|
message=message,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pr["title"].tame(check_string) if include_title else None,
|
2019-03-25 05:13:47 +01:00
|
|
|
)
|
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
return [{"topic": topic_name, "body": body}]
|
2019-03-25 05:13:47 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 05:40:18 +01:00
|
|
|
class EventHandler(Protocol):
|
|
|
|
def __call__(
|
2024-07-12 02:30:23 +02:00
|
|
|
self, payload: WildValue, branches: str | None, include_title: str | None
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, str]]: ...
|
2021-12-15 05:40:18 +01:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
EVENT_HANDLER_MAP: dict[str, EventHandler] = {
|
2019-06-15 10:04:28 +02:00
|
|
|
"diagnostics:ping": ping_handler,
|
2024-04-29 23:52:26 +02:00
|
|
|
"repo:comment:added": partial(repo_comment_handler, "commented"),
|
|
|
|
"repo:comment:edited": partial(repo_comment_handler, "edited their comment"),
|
|
|
|
"repo:comment:deleted": partial(repo_comment_handler, "deleted their comment"),
|
2019-03-03 17:44:33 +01:00
|
|
|
"repo:forked": repo_forked_handler,
|
|
|
|
"repo:modified": repo_modified_handler,
|
|
|
|
"repo:refs_changed": repo_push_handler,
|
2024-04-29 23:52:26 +02:00
|
|
|
"pr:comment:added": partial(pr_comment_handler, "commented on"),
|
|
|
|
"pr:comment:edited": partial(pr_comment_handler, "edited their comment on"),
|
|
|
|
"pr:comment:deleted": partial(pr_comment_handler, "deleted their comment on"),
|
|
|
|
"pr:declined": partial(pr_handler, "declined"),
|
|
|
|
"pr:deleted": partial(pr_handler, "deleted"),
|
|
|
|
"pr:merged": partial(pr_handler, "merged"),
|
|
|
|
"pr:modified": partial(pr_handler, "modified"),
|
|
|
|
"pr:opened": partial(pr_handler, "opened"),
|
|
|
|
"pr:reviewer:approved": partial(pr_handler, "approved"),
|
|
|
|
"pr:reviewer:needs_work": partial(pr_handler, "needs_work"),
|
|
|
|
"pr:reviewer:updated": partial(pr_handler, "reviewers_updated"),
|
|
|
|
"pr:reviewer:unapproved": partial(pr_handler, "unapproved"),
|
2021-02-16 00:17:17 +01:00
|
|
|
}
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = list(EVENT_HANDLER_MAP.keys())
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
|
|
|
|
@webhook_view("Bitbucket3", all_event_types=ALL_EVENT_TYPES)
|
2023-08-12 09:34:31 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_bitbucket3_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-12 09:34:31 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
payload: JsonBodyPayload[WildValue],
|
2024-07-12 02:30:23 +02:00
|
|
|
branches: str | None = None,
|
2023-08-12 09:34:31 +02:00
|
|
|
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2024-07-12 02:30:23 +02:00
|
|
|
eventkey: str | None
|
2021-12-17 07:03:22 +01:00
|
|
|
if "eventKey" in payload:
|
|
|
|
eventkey = payload["eventKey"].tame(check_string)
|
|
|
|
else:
|
2023-10-06 00:12:22 +02:00
|
|
|
eventkey = validate_extract_webhook_http_header(request, "X-Event-Key", "BitBucket")
|
2021-02-16 00:17:17 +01:00
|
|
|
handler = EVENT_HANDLER_MAP.get(eventkey)
|
|
|
|
if handler is None:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(eventkey)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2021-12-15 05:40:18 +01:00
|
|
|
data = handler(payload, branches=branches, include_title=user_specified_topic)
|
2019-03-03 17:44:33 +01:00
|
|
|
for element in data:
|
2021-02-12 08:19:30 +01:00
|
|
|
check_send_webhook_message(
|
2021-07-16 11:40:46 +02:00
|
|
|
request,
|
|
|
|
user_profile,
|
2023-07-12 13:37:08 +02:00
|
|
|
element["topic"],
|
2021-07-16 11:40:46 +02:00
|
|
|
element["body"],
|
|
|
|
eventkey,
|
|
|
|
unquote_url_parameters=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-03-03 17:44:33 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|