2016-07-04 23:06:44 +02:00
|
|
|
# Webhooks for external integrations.
|
|
|
|
import re
|
2019-04-19 22:02:41 +02:00
|
|
|
import string
|
2020-01-14 22:06:24 +01:00
|
|
|
from functools import partial
|
2022-04-27 02:23:56 +02:00
|
|
|
from typing import Dict, List, Optional, Protocol
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2016-07-04 23:06:44 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
from zerver.decorator import log_unsupported_webhook_event, webhook_view
|
2020-08-19 22:26:38 +02:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventType
|
2017-10-31 04:25:48 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.response import json_success
|
2021-12-17 07:03:22 +01:00
|
|
|
from zerver.lib.validator import WildValue, check_bool, check_int, check_string, to_wild_value
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
|
|
|
check_send_webhook_message,
|
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
TOPIC_WITH_BRANCH_TEMPLATE,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_force_push_commits_event_message,
|
|
|
|
get_issue_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_commits_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_remove_branch_event_message,
|
|
|
|
)
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
BITBUCKET_TOPIC_TEMPLATE = "{repository_name}"
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2020-09-23 18:57:33 +02:00
|
|
|
BITBUCKET_FORK_BODY = "{actor} forked the repository into [{fork_name}]({fork_url})."
|
2021-02-12 08:19:30 +01:00
|
|
|
BITBUCKET_COMMIT_STATUS_CHANGED_BODY = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"[System {key}]({system_url}) changed status of {commit_info} to {status}."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
BITBUCKET_REPO_UPDATED_CHANGED = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"{actor} changed the {change} of the **{repo_name}** repo from **{old}** to **{new}**"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
BITBUCKET_REPO_UPDATED_ADDED = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"{actor} changed the {change} of the **{repo_name}** repo to **{new}**"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
|
|
|
PULL_REQUEST_SUPPORTED_ACTIONS = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"approved",
|
|
|
|
"unapproved",
|
|
|
|
"created",
|
|
|
|
"updated",
|
|
|
|
"rejected",
|
|
|
|
"fulfilled",
|
|
|
|
"comment_created",
|
|
|
|
"comment_updated",
|
|
|
|
"comment_deleted",
|
2016-07-04 23:06:44 +02:00
|
|
|
]
|
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = [
|
|
|
|
"change_commit_status",
|
|
|
|
"pull_request_comment_created",
|
|
|
|
"pull_request_updated",
|
|
|
|
"pull_request_unapproved",
|
|
|
|
"push",
|
|
|
|
"pull_request_approved",
|
|
|
|
"pull_request_fulfilled",
|
|
|
|
"issue_created",
|
|
|
|
"issue_commented",
|
|
|
|
"fork",
|
|
|
|
"pull_request_comment_updated",
|
|
|
|
"pull_request_created",
|
|
|
|
"pull_request_rejected",
|
|
|
|
"repo:updated",
|
|
|
|
"issue_updated",
|
|
|
|
"commit_comment",
|
|
|
|
"pull_request_comment_deleted",
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
@webhook_view("Bitbucket2", all_event_types=ALL_EVENT_TYPES)
|
2016-07-04 23:06:44 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_bitbucket2_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue = REQ(argument_type="body", converter=to_wild_value),
|
2021-02-12 08:19:30 +01:00
|
|
|
branches: Optional[str] = REQ(default=None),
|
|
|
|
user_specified_topic: Optional[str] = REQ("topic", default=None),
|
|
|
|
) -> HttpResponse:
|
2017-08-24 17:31:04 +02:00
|
|
|
type = get_type(request, payload)
|
2021-02-12 08:20:45 +01:00
|
|
|
if type == "push":
|
2017-11-27 01:03:52 +01:00
|
|
|
# ignore push events with no changes
|
2021-02-12 08:20:45 +01:00
|
|
|
if not payload["push"]["changes"]:
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-08-24 17:31:04 +02:00
|
|
|
branch = get_branch_name_for_push_event(payload)
|
|
|
|
if branch and branches:
|
|
|
|
if branches.find(branch) == -1:
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2018-07-25 00:57:45 +02:00
|
|
|
|
2021-12-15 04:03:10 +01:00
|
|
|
subjects = get_push_subjects(payload)
|
|
|
|
bodies = get_push_bodies(payload)
|
|
|
|
|
|
|
|
for b, s in zip(bodies, subjects):
|
|
|
|
check_send_webhook_message(
|
|
|
|
request, user_profile, s, b, type, unquote_url_parameters=True
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
else:
|
2021-12-15 04:03:10 +01:00
|
|
|
subject = get_subject_based_on_type(payload, type)
|
|
|
|
body_function = get_body_based_on_type(type)
|
2021-12-15 04:14:45 +01:00
|
|
|
body = body_function(
|
|
|
|
payload,
|
|
|
|
include_title=user_specified_topic is not None,
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
check_send_webhook_message(
|
2021-07-16 11:40:46 +02:00
|
|
|
request, user_profile, subject, body, type, unquote_url_parameters=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-03-13 23:43:02 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_subject_for_branch_specified_events(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue, branch_name: Optional[str] = None
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_BRANCH_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
branch=get_branch_name_for_push_event(payload) if branch_name is None else branch_name,
|
2016-10-05 19:48:15 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_push_subjects(payload: WildValue) -> List[str]:
|
2016-11-09 16:05:45 +01:00
|
|
|
subjects_list = []
|
2021-02-12 08:20:45 +01:00
|
|
|
for change in payload["push"]["changes"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2021-12-17 07:03:22 +01:00
|
|
|
subjects_list.append(get_subject(payload))
|
2016-11-09 16:05:45 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
if change.get("new"):
|
2021-12-17 07:03:22 +01:00
|
|
|
branch_name = change["new"]["name"].tame(check_string)
|
2016-11-11 13:32:41 +01:00
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
branch_name = change["old"]["name"].tame(check_string)
|
|
|
|
subjects_list.append(get_subject_for_branch_specified_events(payload, branch_name))
|
2016-11-09 16:05:45 +01:00
|
|
|
return subjects_list
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_subject(payload: WildValue) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
return BITBUCKET_TOPIC_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repository_name=get_repository_name(payload["repository"])
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_subject_based_on_type(payload: WildValue, type: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if type.startswith("pull_request"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
|
|
|
type="PR",
|
2021-12-17 07:03:22 +01:00
|
|
|
id=payload["pullrequest"]["id"].tame(check_int),
|
|
|
|
title=payload["pullrequest"]["title"].tame(check_string),
|
2016-10-11 18:55:39 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
if type.startswith("issue"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo=get_repository_name(payload["repository"]),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2021-12-17 07:03:22 +01:00
|
|
|
id=payload["issue"]["id"].tame(check_int),
|
|
|
|
title=payload["issue"]["title"].tame(check_string),
|
2016-10-19 23:44:02 +02:00
|
|
|
)
|
2021-12-15 04:03:10 +01:00
|
|
|
assert type != "push"
|
2016-10-05 19:48:15 +02:00
|
|
|
return get_subject(payload)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_type(request: HttpRequest, payload: WildValue) -> str:
|
|
|
|
if "push" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "push"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "fork" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "fork"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "comment" in payload and "commit" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "commit_comment"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "commit_status" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "change_commit_status"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "issue" in payload:
|
|
|
|
if "changes" in payload:
|
2016-07-04 23:06:44 +02:00
|
|
|
return "issue_updated"
|
2021-12-17 07:03:22 +01:00
|
|
|
if "comment" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "issue_commented"
|
2016-07-04 23:06:44 +02:00
|
|
|
return "issue_created"
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "pullrequest" in payload:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request_template = "pull_request_{}"
|
2018-04-24 20:54:13 +02:00
|
|
|
# Note that we only need the HTTP header to determine pullrequest events.
|
|
|
|
# We rely on the payload itself to determine the other ones.
|
2022-05-12 06:54:12 +02:00
|
|
|
event_key = validate_extract_webhook_http_header(request, "X-Event-Key", "BitBucket")
|
2019-02-01 02:35:10 +01:00
|
|
|
assert event_key is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
action = re.match("pullrequest:(?P<action>.*)$", event_key)
|
2016-07-04 23:06:44 +02:00
|
|
|
if action:
|
2021-02-12 08:20:45 +01:00
|
|
|
action_group = action.group("action")
|
2018-04-24 20:54:13 +02:00
|
|
|
if action_group in PULL_REQUEST_SUPPORTED_ACTIONS:
|
|
|
|
return pull_request_template.format(action_group)
|
|
|
|
else:
|
2022-05-12 06:54:12 +02:00
|
|
|
event_key = validate_extract_webhook_http_header(request, "X-Event-Key", "BitBucket")
|
2021-02-12 08:20:45 +01:00
|
|
|
if event_key == "repo:updated":
|
2018-04-24 20:54:13 +02:00
|
|
|
return event_key
|
|
|
|
|
2020-08-20 00:50:06 +02:00
|
|
|
raise UnsupportedWebhookEventType(event_key)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 04:14:45 +01:00
|
|
|
class BodyGetter(Protocol):
|
2021-12-17 07:03:22 +01:00
|
|
|
def __call__(self, payload: WildValue, include_title: bool) -> str:
|
2021-12-15 04:14:45 +01:00
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
def get_body_based_on_type(
|
|
|
|
type: str,
|
|
|
|
) -> BodyGetter:
|
2021-12-15 04:03:10 +01:00
|
|
|
return GET_SINGLE_MESSAGE_BODY_DEPENDING_ON_TYPE_MAPPER[type]
|
2016-11-09 16:05:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_push_bodies(payload: WildValue) -> List[str]:
|
2016-11-09 16:05:45 +01:00
|
|
|
messages_list = []
|
2021-02-12 08:20:45 +01:00
|
|
|
for change in payload["push"]["changes"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2016-11-09 16:05:45 +01:00
|
|
|
messages_list.append(get_push_tag_body(payload, change))
|
2018-04-02 23:57:52 +02:00
|
|
|
# if change['new'] is None, that means a branch was deleted
|
2021-12-17 07:03:22 +01:00
|
|
|
elif change["new"].value is None:
|
2016-11-09 16:05:45 +01:00
|
|
|
messages_list.append(get_remove_branch_push_body(payload, change))
|
2021-12-17 07:03:22 +01:00
|
|
|
elif change["forced"].tame(check_bool):
|
2016-11-09 16:05:45 +01:00
|
|
|
messages_list.append(get_force_push_body(payload, change))
|
|
|
|
else:
|
|
|
|
messages_list.append(get_normal_push_body(payload, change))
|
|
|
|
return messages_list
|
2016-10-06 15:32:10 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_remove_branch_push_body(payload: WildValue, change: WildValue) -> str:
|
2016-10-06 16:14:51 +02:00
|
|
|
return get_remove_branch_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["old"]["name"].tame(check_string),
|
2016-10-06 16:14:51 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_force_push_body(payload: WildValue, change: WildValue) -> str:
|
2016-10-06 15:32:10 +02:00
|
|
|
return get_force_push_commits_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["links"]["html"]["href"].tame(check_string),
|
|
|
|
change["new"]["name"].tame(check_string),
|
|
|
|
change["new"]["target"]["hash"].tame(check_string),
|
2016-10-06 15:32:10 +02:00
|
|
|
)
|
2016-10-02 09:44:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_commit_author_name(commit: WildValue) -> str:
|
|
|
|
if "user" in commit["author"]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return get_user_info(commit["author"]["user"])
|
2021-12-17 07:03:22 +01:00
|
|
|
return commit["author"]["raw"].tame(check_string).split()[0]
|
2017-04-26 02:57:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_normal_push_body(payload: WildValue, change: WildValue) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
commits_data = [
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": get_commit_author_name(commit),
|
2021-12-17 07:03:22 +01:00
|
|
|
"sha": commit["hash"].tame(check_string),
|
|
|
|
"url": commit["links"]["html"]["href"].tame(check_string),
|
|
|
|
"message": commit["message"].tame(check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
for commit in change["commits"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2016-10-02 09:44:33 +02:00
|
|
|
|
|
|
|
return get_push_commits_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
change["links"]["html"]["href"].tame(check_string),
|
|
|
|
change["new"]["name"].tame(check_string),
|
2016-11-11 12:45:58 +01:00
|
|
|
commits_data,
|
2021-12-17 07:03:22 +01:00
|
|
|
is_truncated=change["truncated"].tame(check_bool),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_fork_body(payload: WildValue, include_title: bool) -> str:
|
2016-07-04 23:06:44 +02:00
|
|
|
return BITBUCKET_FORK_BODY.format(
|
2020-09-23 18:57:33 +02:00
|
|
|
actor=get_user_info(payload["actor"]),
|
2021-02-12 08:20:45 +01:00
|
|
|
fork_name=get_repository_full_name(payload["fork"]),
|
|
|
|
fork_url=get_repository_url(payload["fork"]),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_commit_comment_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["comment"]
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "[commented]({})".format(comment["links"]["html"]["href"].tame(check_string))
|
2016-10-27 21:43:15 +02:00
|
|
|
return get_commits_comment_action_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2016-10-21 20:08:26 +02:00
|
|
|
action,
|
2021-12-17 07:03:22 +01:00
|
|
|
comment["commit"]["links"]["html"]["href"].tame(check_string),
|
|
|
|
comment["commit"]["hash"].tame(check_string),
|
|
|
|
comment["content"]["raw"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_commit_status_changed_body(payload: WildValue, include_title: bool) -> str:
|
|
|
|
commit_api_url = payload["commit_status"]["links"]["commit"]["href"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
commit_id = commit_api_url.split("/")[-1]
|
2018-09-22 22:38:52 +02:00
|
|
|
|
2018-09-22 22:57:45 +02:00
|
|
|
commit_info = "[{short_commit_id}]({repo_url}/commits/{commit_id})".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
repo_url=get_repository_url(payload["repository"]),
|
2018-09-22 22:57:45 +02:00
|
|
|
short_commit_id=commit_id[:7],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
commit_id=commit_id,
|
2018-09-22 22:38:52 +02:00
|
|
|
)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
|
|
|
return BITBUCKET_COMMIT_STATUS_CHANGED_BODY.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
key=payload["commit_status"]["key"].tame(check_string),
|
|
|
|
system_url=payload["commit_status"]["url"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
commit_info=commit_info,
|
2021-12-17 07:03:22 +01:00
|
|
|
status=payload["commit_status"]["state"].tame(check_string),
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_issue_commented_body(payload: WildValue, include_title: bool) -> str:
|
|
|
|
action = "[commented]({}) on".format(
|
|
|
|
payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
return get_issue_action_body(payload, action, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_issue_action_body(payload: WildValue, action: str, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
issue = payload["issue"]
|
2016-10-19 23:44:02 +02:00
|
|
|
assignee = None
|
|
|
|
message = None
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "created":
|
|
|
|
if issue["assignee"]:
|
|
|
|
assignee = get_user_info(issue["assignee"])
|
2021-12-17 07:03:22 +01:00
|
|
|
message = issue["content"]["raw"].tame(check_string)
|
2016-10-19 23:44:02 +02:00
|
|
|
|
|
|
|
return get_issue_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2016-10-19 23:44:02 +02:00
|
|
|
action,
|
2021-12-17 07:03:22 +01:00
|
|
|
issue["links"]["html"]["href"].tame(check_string),
|
|
|
|
issue["id"].tame(check_int),
|
2016-10-19 23:44:02 +02:00
|
|
|
message,
|
2018-07-25 00:57:45 +02:00
|
|
|
assignee,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=issue["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_pull_request_action_body(payload: WildValue, action: str, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pullrequest"]
|
2016-10-11 18:55:39 +02:00
|
|
|
return get_pull_request_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2016-10-11 18:55:39 +02:00
|
|
|
action,
|
|
|
|
get_pull_request_url(pull_request),
|
2021-12-17 07:03:22 +01:00
|
|
|
pull_request["id"].tame(check_int),
|
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-10-11 18:55:39 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_pull_request_created_or_updated_body(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue, action: str, include_title: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pullrequest"]
|
2016-10-11 18:55:39 +02:00
|
|
|
assignee = None
|
2021-12-17 07:03:22 +01:00
|
|
|
if pull_request["reviewers"]:
|
|
|
|
assignee = get_user_info(pull_request["reviewers"][0])
|
2016-10-11 18:55:39 +02:00
|
|
|
|
|
|
|
return get_pull_request_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2016-10-11 18:55:39 +02:00
|
|
|
action,
|
|
|
|
get_pull_request_url(pull_request),
|
2021-12-17 07:03:22 +01:00
|
|
|
pull_request["id"].tame(check_int),
|
|
|
|
target_branch=pull_request["source"]["branch"]["name"].tame(check_string),
|
|
|
|
base_branch=pull_request["destination"]["branch"]["name"].tame(check_string),
|
|
|
|
message=pull_request["description"].tame(check_string),
|
2018-07-25 00:57:45 +02:00
|
|
|
assignee=assignee,
|
2021-12-17 07:03:22 +01:00
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_comment_created_action_body(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "[commented]({})".format(
|
|
|
|
payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
return get_pull_request_comment_action_body(payload, action, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_deleted_or_updated_comment_action_body(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-02-12 08:19:30 +01:00
|
|
|
action: str,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
action = "{} a [comment]({})".format(
|
|
|
|
action, payload["comment"]["links"]["html"]["href"].tame(check_string)
|
|
|
|
)
|
2018-07-25 00:57:45 +02:00
|
|
|
return get_pull_request_comment_action_body(payload, action, include_title)
|
2016-10-21 20:08:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-25 00:57:45 +02:00
|
|
|
def get_pull_request_comment_action_body(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
2021-02-12 08:19:30 +01:00
|
|
|
action: str,
|
2021-12-15 04:14:45 +01:00
|
|
|
include_title: bool,
|
2018-07-25 00:57:45 +02:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
action += " on"
|
2016-10-21 20:08:26 +02:00
|
|
|
return get_pull_request_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2016-10-21 20:08:26 +02:00
|
|
|
action,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload["pullrequest"]["links"]["html"]["href"].tame(check_string),
|
|
|
|
payload["pullrequest"]["id"].tame(check_int),
|
|
|
|
message=payload["comment"]["content"]["raw"].tame(check_string),
|
|
|
|
title=payload["pullrequest"]["title"].tame(check_string) if include_title else None,
|
2016-07-04 23:06:44 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_push_tag_body(payload: WildValue, change: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if change.get("new"):
|
|
|
|
tag = change["new"]
|
|
|
|
action = "pushed"
|
|
|
|
elif change.get("old"):
|
|
|
|
tag = change["old"]
|
|
|
|
action = "removed"
|
2018-09-22 22:38:52 +02:00
|
|
|
|
2016-11-09 16:05:45 +01:00
|
|
|
return get_push_tag_event_message(
|
2020-09-23 21:17:04 +02:00
|
|
|
get_actor_info(payload),
|
2021-12-17 07:03:22 +01:00
|
|
|
tag["name"].tame(check_string),
|
|
|
|
tag_url=tag["links"]["html"]["href"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
action=action,
|
2016-11-09 16:05:45 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-19 22:02:41 +02:00
|
|
|
def append_punctuation(title: str, message: str) -> str:
|
|
|
|
if title[-1] not in string.punctuation:
|
2020-06-09 00:25:09 +02:00
|
|
|
message = f"{message}."
|
2019-04-19 22:02:41 +02:00
|
|
|
|
|
|
|
return message
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repo_updated_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
changes = ["website", "name", "links", "language", "full_name", "description"]
|
2018-04-25 01:36:03 +02:00
|
|
|
body = ""
|
2021-12-17 07:03:22 +01:00
|
|
|
repo_name = payload["repository"]["name"].tame(check_string)
|
2020-09-23 21:17:04 +02:00
|
|
|
actor = get_actor_info(payload)
|
2018-04-25 01:36:03 +02:00
|
|
|
|
|
|
|
for change in changes:
|
2021-02-12 08:20:45 +01:00
|
|
|
new = payload["changes"][change]["new"]
|
|
|
|
old = payload["changes"][change]["old"]
|
|
|
|
if change == "full_name":
|
|
|
|
change = "full name"
|
2018-04-25 01:36:03 +02:00
|
|
|
if new and old:
|
|
|
|
message = BITBUCKET_REPO_UPDATED_CHANGED.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
actor=actor,
|
|
|
|
change=change,
|
|
|
|
repo_name=repo_name,
|
2021-12-17 07:03:22 +01:00
|
|
|
old=str(old.value),
|
|
|
|
new=str(new.value),
|
2018-04-25 01:36:03 +02:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
message = append_punctuation(str(new.value), message) + "\n"
|
2018-04-25 01:36:03 +02:00
|
|
|
body += message
|
|
|
|
elif new and not old:
|
|
|
|
message = BITBUCKET_REPO_UPDATED_ADDED.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
actor=actor,
|
|
|
|
change=change,
|
|
|
|
repo_name=repo_name,
|
2021-12-17 07:03:22 +01:00
|
|
|
new=str(new.value),
|
2018-04-25 01:36:03 +02:00
|
|
|
)
|
2021-12-17 07:03:22 +01:00
|
|
|
message = append_punctuation(str(new.value), message) + "\n"
|
2018-04-25 01:36:03 +02:00
|
|
|
body += message
|
|
|
|
|
|
|
|
return body
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_pull_request_url(pullrequest_payload: WildValue) -> str:
|
|
|
|
return pullrequest_payload["links"]["html"]["href"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_url(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["links"]["html"]["href"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_name(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["name"].tame(check_string)
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_repository_full_name(repository_payload: WildValue) -> str:
|
|
|
|
return repository_payload["full_name"].tame(check_string)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_user_info(dct: WildValue) -> str:
|
2020-09-23 18:57:33 +02:00
|
|
|
# See https://developer.atlassian.com/cloud/bitbucket/bitbucket-api-changes-gdpr/
|
|
|
|
# Since GDPR, we don't get username; instead, we either get display_name
|
|
|
|
# or nickname.
|
|
|
|
if "display_name" in dct:
|
2021-12-17 07:03:22 +01:00
|
|
|
return dct["display_name"].tame(check_string)
|
2020-09-23 18:57:33 +02:00
|
|
|
|
|
|
|
if "nickname" in dct:
|
2021-12-17 07:03:22 +01:00
|
|
|
return dct["nickname"].tame(check_string)
|
2020-09-23 18:57:33 +02:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
# We call this an unsupported_event, even though we
|
|
|
|
# are technically still sending a message.
|
|
|
|
log_unsupported_webhook_event(
|
2020-09-23 18:57:33 +02:00
|
|
|
summary="Could not find display_name/nickname field",
|
|
|
|
)
|
|
|
|
|
|
|
|
return "Unknown user"
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_actor_info(payload: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
actor = payload["actor"]
|
2020-09-23 21:17:04 +02:00
|
|
|
return get_user_info(actor)
|
2016-07-04 23:06:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_branch_name_for_push_event(payload: WildValue) -> Optional[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
change = payload["push"]["changes"][-1]
|
2021-12-17 07:03:22 +01:00
|
|
|
potential_tag = (change["new"] or change["old"])["type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if potential_tag == "tag":
|
2017-04-05 02:52:31 +02:00
|
|
|
return None
|
2016-10-06 16:14:51 +02:00
|
|
|
else:
|
2021-12-17 07:03:22 +01:00
|
|
|
return (change["new"] or change["old"])["name"].tame(check_string)
|
2016-10-05 19:48:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 04:14:45 +01:00
|
|
|
GET_SINGLE_MESSAGE_BODY_DEPENDING_ON_TYPE_MAPPER: Dict[str, BodyGetter] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"fork": get_fork_body,
|
|
|
|
"commit_comment": get_commit_comment_body,
|
|
|
|
"change_commit_status": get_commit_status_changed_body,
|
|
|
|
"issue_updated": partial(get_issue_action_body, action="updated"),
|
|
|
|
"issue_created": partial(get_issue_action_body, action="created"),
|
|
|
|
"issue_commented": get_issue_commented_body,
|
|
|
|
"pull_request_created": partial(get_pull_request_created_or_updated_body, action="created"),
|
|
|
|
"pull_request_updated": partial(get_pull_request_created_or_updated_body, action="updated"),
|
|
|
|
"pull_request_approved": partial(get_pull_request_action_body, action="approved"),
|
|
|
|
"pull_request_unapproved": partial(get_pull_request_action_body, action="unapproved"),
|
|
|
|
"pull_request_fulfilled": partial(get_pull_request_action_body, action="merged"),
|
|
|
|
"pull_request_rejected": partial(get_pull_request_action_body, action="rejected"),
|
|
|
|
"pull_request_comment_created": get_pull_request_comment_created_action_body,
|
|
|
|
"pull_request_comment_updated": partial(
|
|
|
|
get_pull_request_deleted_or_updated_comment_action_body, action="updated"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"pull_request_comment_deleted": partial(
|
|
|
|
get_pull_request_deleted_or_updated_comment_action_body, action="deleted"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"repo:updated": get_repo_updated_body,
|
2016-07-04 23:06:44 +02:00
|
|
|
}
|