2017-11-16 00:43:10 +01:00
|
|
|
import re
|
2016-10-25 14:50:42 +02:00
|
|
|
from functools import partial
|
2022-04-03 11:08:49 +02:00
|
|
|
from typing import Callable, Dict, Optional
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2016-10-25 14:50:42 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
from zerver.decorator import log_unsupported_webhook_event, webhook_view
|
2020-08-19 22:26:38 +02:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventType
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2016-10-25 14:50:42 +02:00
|
|
|
from zerver.lib.response import json_success
|
2022-05-24 00:25:03 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
WildValue,
|
|
|
|
check_bool,
|
|
|
|
check_int,
|
|
|
|
check_none_or,
|
|
|
|
check_string,
|
|
|
|
to_wild_value,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
|
|
|
check_send_webhook_message,
|
|
|
|
get_http_headers_from_filename,
|
2021-05-06 16:02:36 +02:00
|
|
|
get_setup_webhook_message,
|
2020-06-11 00:54:34 +02:00
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
CONTENT_MESSAGE_TEMPLATE,
|
|
|
|
TOPIC_WITH_BRANCH_TEMPLATE,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_issue_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_commits_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_release_event_message,
|
2022-11-07 21:24:35 +01:00
|
|
|
get_short_sha,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2019-06-22 06:57:40 +02:00
|
|
|
|
|
|
|
fixture_to_headers = get_http_headers_from_filename("HTTP_X_GITHUB_EVENT")
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-10-18 13:33:38 +02:00
|
|
|
TOPIC_FOR_DISCUSSION = "{repo} discussion #{number}: {title}"
|
2021-10-23 18:59:57 +02:00
|
|
|
DISCUSSION_TEMPLATE = "{author} created [discussion #{discussion_id}]({url}) in {category}:\n```quote\n### {title}\n{body}\n```"
|
|
|
|
DISCUSSION_COMMENT_TEMPLATE = "{author} [commented]({comment_url}) on [discussion #{discussion_id}]({discussion_url}):\n```quote\n{body}\n```"
|
2021-10-18 13:33:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
class Helper:
|
2020-09-02 15:15:50 +02:00
|
|
|
def __init__(
|
|
|
|
self,
|
2022-04-03 11:08:49 +02:00
|
|
|
payload: WildValue,
|
2020-09-02 15:15:50 +02:00
|
|
|
include_title: bool,
|
|
|
|
) -> None:
|
2020-09-01 17:50:13 +02:00
|
|
|
self.payload = payload
|
|
|
|
self.include_title = include_title
|
2020-09-02 15:15:50 +02:00
|
|
|
|
2020-08-19 22:26:38 +02:00
|
|
|
def log_unsupported(self, event: str) -> None:
|
2020-09-02 15:15:50 +02:00
|
|
|
summary = f"The '{event}' event isn't currently supported by the GitHub webhook"
|
2021-09-13 20:23:54 +02:00
|
|
|
log_unsupported_webhook_event(
|
2020-09-02 15:15:50 +02:00
|
|
|
summary=summary,
|
|
|
|
)
|
2020-09-01 17:50:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_opened_or_update_pull_request_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pull_request"]
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "synchronize":
|
|
|
|
action = "updated"
|
2016-10-25 14:50:42 +02:00
|
|
|
assignee = None
|
2021-02-12 08:20:45 +01:00
|
|
|
if pull_request.get("assignee"):
|
2022-04-03 11:08:49 +02:00
|
|
|
assignee = pull_request["assignee"]["login"].tame(check_string)
|
2020-11-23 09:15:42 +01:00
|
|
|
description = None
|
2021-02-12 08:20:45 +01:00
|
|
|
changes = payload.get("changes", {})
|
|
|
|
if "body" in changes or action == "opened":
|
2022-05-24 00:25:03 +02:00
|
|
|
description = pull_request["body"].tame(check_none_or(check_string))
|
2016-10-25 14:50:42 +02:00
|
|
|
|
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
2022-04-03 11:08:49 +02:00
|
|
|
pull_request["html_url"].tame(check_string),
|
|
|
|
target_branch=pull_request["head"]["ref"].tame(check_string),
|
|
|
|
base_branch=pull_request["base"]["ref"].tame(check_string),
|
2020-11-23 09:15:42 +01:00
|
|
|
message=description,
|
2018-07-20 21:36:18 +02:00
|
|
|
assignee=assignee,
|
2022-04-03 11:08:49 +02:00
|
|
|
number=pull_request["number"].tame(check_int),
|
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_assigned_or_unassigned_pull_request_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pull_request"]
|
|
|
|
assignee = pull_request.get("assignee")
|
2022-04-03 11:08:49 +02:00
|
|
|
if assignee:
|
2022-04-09 11:29:33 +02:00
|
|
|
stringified_assignee = assignee["login"].tame(check_string)
|
2017-02-08 20:41:43 +01:00
|
|
|
|
|
|
|
base_message = get_pull_request_event_message(
|
|
|
|
get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["action"].tame(check_string),
|
|
|
|
pull_request["html_url"].tame(check_string),
|
|
|
|
number=pull_request["number"].tame(check_int),
|
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2017-02-08 20:41:43 +01:00
|
|
|
)
|
2022-04-03 11:08:49 +02:00
|
|
|
if assignee:
|
|
|
|
return f"{base_message[:-1]} to {stringified_assignee}."
|
2017-02-08 20:41:43 +01:00
|
|
|
return base_message
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_closed_pull_request_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["pull_request"]
|
2022-04-03 11:08:49 +02:00
|
|
|
action = "merged" if pull_request["merged"].tame(check_bool) else "closed without merge"
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
2022-04-03 11:08:49 +02:00
|
|
|
pull_request["html_url"].tame(check_string),
|
|
|
|
number=pull_request["number"].tame(check_int),
|
|
|
|
title=pull_request["title"].tame(check_string) if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_membership_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
member = payload["member"]
|
2022-04-03 11:08:49 +02:00
|
|
|
team_name = payload["team"]["name"].tame(check_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{sender} {action} [{username}]({html_url}) {preposition} the {team_name} team.".format(
|
2019-03-09 21:06:44 +01:00
|
|
|
sender=get_sender_name(payload),
|
|
|
|
action=action,
|
2022-04-03 11:08:49 +02:00
|
|
|
username=member["login"].tame(check_string),
|
|
|
|
html_url=member["html_url"].tame(check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
preposition="from" if action == "removed" else "to",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
team_name=team_name,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_member_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{} {} [{}]({}) to [{}]({}).".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["action"].tame(check_string),
|
|
|
|
payload["member"]["login"].tame(check_string),
|
|
|
|
payload["member"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
get_repository_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_issue_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
issue = payload["issue"]
|
|
|
|
assignee = issue["assignee"]
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_issue_event_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
2022-04-03 11:08:49 +02:00
|
|
|
issue["html_url"].tame(check_string),
|
|
|
|
issue["number"].tame(check_int),
|
2022-05-24 00:25:03 +02:00
|
|
|
issue["body"].tame(check_none_or(check_string)),
|
2022-04-03 11:08:49 +02:00
|
|
|
assignee=assignee["login"].tame(check_string) if assignee else None,
|
|
|
|
title=issue["title"].tame(check_string) if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_issue_comment_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["comment"]
|
|
|
|
issue = payload["issue"]
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "created":
|
|
|
|
action = "[commented]"
|
2016-10-25 14:50:42 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = f"{action} a [comment]"
|
2022-04-03 11:08:49 +02:00
|
|
|
action += "({}) on".format(comment["html_url"].tame(check_string))
|
2016-10-25 14:50:42 +02:00
|
|
|
|
|
|
|
return get_issue_event_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
2022-04-03 11:08:49 +02:00
|
|
|
issue["html_url"].tame(check_string),
|
|
|
|
issue["number"].tame(check_int),
|
|
|
|
comment["body"].tame(check_string),
|
|
|
|
title=issue["title"].tame(check_string) if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_fork_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
forkee = payload["forkee"]
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{} forked [{}]({}).".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
forkee["name"].tame(check_string),
|
|
|
|
forkee["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_deployment_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"{get_sender_name(payload)} created new deployment."
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_change_deployment_status_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
return "Deployment changed status to {}.".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["deployment_status"]["state"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_create_or_delete_body(helper: Helper, action: str) -> str:
|
|
|
|
payload = helper.payload
|
2022-04-03 11:08:49 +02:00
|
|
|
ref_type = payload["ref_type"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
return "{} {} {} {}.".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
|
|
|
ref_type,
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["ref"].tame(check_string),
|
2017-01-24 06:34:26 +01:00
|
|
|
).rstrip()
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_commit_comment_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["comment"]
|
2022-04-03 11:08:49 +02:00
|
|
|
comment_url = comment["html_url"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
commit_url = comment_url.split("#", 1)[0]
|
|
|
|
action = f"[commented]({comment_url})"
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_commits_comment_action_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
|
|
|
commit_url,
|
2022-04-09 11:29:33 +02:00
|
|
|
comment["commit_id"].tame(check_string),
|
2022-04-03 11:08:49 +02:00
|
|
|
comment["body"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_push_tags_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_push_tag_event_message(
|
|
|
|
get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
get_tag_name_from_ref(payload["ref"].tame(check_string)),
|
2022-04-09 11:29:33 +02:00
|
|
|
action="pushed" if payload["created"].tame(check_bool) else "removed",
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_push_commits_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2022-04-03 11:08:49 +02:00
|
|
|
commits_data = []
|
|
|
|
for commit in payload["commits"]:
|
2022-04-09 11:29:33 +02:00
|
|
|
if commit["author"].get("username"):
|
|
|
|
name = commit["author"]["username"].tame(check_string)
|
2022-04-03 11:08:49 +02:00
|
|
|
else:
|
2022-04-09 11:29:33 +02:00
|
|
|
name = commit["author"]["name"].tame(check_string)
|
2022-04-03 11:08:49 +02:00
|
|
|
commits_data.append(
|
|
|
|
{
|
|
|
|
"name": name,
|
|
|
|
"sha": commit["id"].tame(check_string),
|
|
|
|
"url": commit["url"].tame(check_string),
|
|
|
|
"message": commit["message"].tame(check_string),
|
|
|
|
}
|
|
|
|
)
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_push_commits_event_message(
|
|
|
|
get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["compare"].tame(check_string),
|
|
|
|
get_branch_name_from_ref(payload["ref"].tame(check_string)),
|
2017-05-16 04:38:25 +02:00
|
|
|
commits_data,
|
2022-04-03 11:08:49 +02:00
|
|
|
deleted=payload["deleted"].tame(check_bool),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-10-18 13:33:38 +02:00
|
|
|
def get_discussion_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
return DISCUSSION_TEMPLATE.format(
|
|
|
|
author=get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
url=payload["discussion"]["html_url"].tame(check_string),
|
|
|
|
body=payload["discussion"]["body"].tame(check_string),
|
|
|
|
category=payload["discussion"]["category"]["name"].tame(check_string),
|
|
|
|
discussion_id=payload["discussion"]["number"].tame(check_int),
|
|
|
|
title=payload["discussion"]["title"].tame(check_string),
|
2021-10-18 13:33:38 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def get_discussion_comment_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
return DISCUSSION_COMMENT_TEMPLATE.format(
|
|
|
|
author=get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
body=payload["comment"]["body"].tame(check_string),
|
|
|
|
discussion_url=payload["discussion"]["html_url"].tame(check_string),
|
|
|
|
comment_url=payload["comment"]["html_url"].tame(check_string),
|
|
|
|
discussion_id=payload["discussion"]["number"].tame(check_int),
|
2021-10-18 13:33:38 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_public_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-06-27 13:19:32 +02:00
|
|
|
return "{} made the repository [{}]({}) public.".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
2020-06-27 13:19:32 +02:00
|
|
|
get_repository_full_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_wiki_pages_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-04-09 21:51:58 +02:00
|
|
|
wiki_page_info_template = "* {action} [{title}]({url})\n"
|
2021-02-12 08:20:45 +01:00
|
|
|
wiki_info = ""
|
|
|
|
for page in payload["pages"]:
|
2016-10-25 14:50:42 +02:00
|
|
|
wiki_info += wiki_page_info_template.format(
|
2022-04-03 11:08:49 +02:00
|
|
|
action=page["action"].tame(check_string),
|
|
|
|
title=page["title"].tame(check_string),
|
|
|
|
url=page["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{get_sender_name(payload)}:\n{wiki_info.rstrip()}"
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_watch_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-06-27 13:19:32 +02:00
|
|
|
return "{} starred the repository [{}]({}).".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
2020-06-27 13:19:32 +02:00
|
|
|
get_repository_full_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_repository_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-06-27 13:19:32 +02:00
|
|
|
return "{} {} the repository [{}]({}).".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_sender_name(payload),
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["action"].tame(check_string),
|
2020-06-27 13:19:32 +02:00
|
|
|
get_repository_full_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_add_team_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-06-27 13:19:32 +02:00
|
|
|
return "The repository [{}]({}) was added to team {}.".format(
|
|
|
|
get_repository_full_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
|
|
|
payload["team"]["name"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_team_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-05-14 14:20:05 +02:00
|
|
|
changes = payload["changes"]
|
|
|
|
if "description" in changes:
|
2022-04-03 11:08:49 +02:00
|
|
|
actor = payload["sender"]["login"].tame(check_string)
|
|
|
|
new_description = payload["team"]["description"].tame(check_string)
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"**{actor}** changed the team description to:\n```quote\n{new_description}\n```"
|
2020-05-14 14:20:05 +02:00
|
|
|
if "name" in changes:
|
2022-04-03 11:08:49 +02:00
|
|
|
original_name = changes["name"]["from"].tame(check_string)
|
|
|
|
new_name = payload["team"]["name"].tame(check_string)
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"Team `{original_name}` was renamed to `{new_name}`."
|
2020-05-14 14:20:05 +02:00
|
|
|
if "privacy" in changes:
|
2022-04-03 11:08:49 +02:00
|
|
|
new_visibility = payload["team"]["privacy"].tame(check_string)
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"Team visibility changed to `{new_visibility}`"
|
2020-09-02 14:23:52 +02:00
|
|
|
|
2022-10-30 00:35:32 +02:00
|
|
|
missing_keys = "/".join(sorted(changes.keys()))
|
2020-08-19 22:26:38 +02:00
|
|
|
helper.log_unsupported(f"team/edited (changes: {missing_keys})")
|
2020-09-02 15:15:50 +02:00
|
|
|
|
|
|
|
# Do our best to give useful info to the customer--at least
|
|
|
|
# if they know something changed, they can go to GitHub for
|
|
|
|
# more details. And if it's just spam, you can control that
|
|
|
|
# from GitHub.
|
|
|
|
return f"Team has changes to `{missing_keys}` data."
|
2020-05-14 14:20:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_release_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2022-04-03 11:08:49 +02:00
|
|
|
if payload["release"]["name"]:
|
|
|
|
release_name = payload["release"]["name"].tame(check_string)
|
|
|
|
else:
|
|
|
|
release_name = payload["release"]["tag_name"].tame(check_string)
|
2020-05-12 02:39:57 +02:00
|
|
|
data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_name": get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
"action": payload["action"].tame(check_string),
|
|
|
|
"tagname": payload["release"]["tag_name"].tame(check_string),
|
2020-05-12 02:39:57 +02:00
|
|
|
# Not every GitHub release has a "name" set; if not there, use the tag name.
|
2022-04-03 11:08:49 +02:00
|
|
|
"release_name": release_name,
|
|
|
|
"url": payload["release"]["html_url"].tame(check_string),
|
2020-05-12 02:39:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return get_release_event_message(**data)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_page_build_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
build = payload["build"]
|
2022-04-03 11:08:49 +02:00
|
|
|
status = build["status"].tame(check_string)
|
2018-09-27 21:55:45 +02:00
|
|
|
actions = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"null": "has yet to be built",
|
|
|
|
"building": "is being built",
|
2022-04-03 11:08:49 +02:00
|
|
|
"errored": "has failed: {}",
|
2021-02-12 08:20:45 +01:00
|
|
|
"built": "has finished building",
|
2018-09-27 21:55:45 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
action = actions.get(status, f"is {status}")
|
2022-04-03 11:08:49 +02:00
|
|
|
if build["error"]["message"]:
|
|
|
|
action = action.format(
|
|
|
|
CONTENT_MESSAGE_TEMPLATE.format(message=build["error"]["message"].tame(check_string)),
|
|
|
|
)
|
2018-09-27 21:55:45 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
return "GitHub Pages build, triggered by {}, {}.".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["build"]["pusher"]["login"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
action,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_status_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
if payload["target_url"]:
|
|
|
|
status = "[{}]({})".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["state"].tame(check_string),
|
|
|
|
payload["target_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
else:
|
2022-04-03 11:08:49 +02:00
|
|
|
status = payload["state"].tame(check_string)
|
2020-04-09 21:51:58 +02:00
|
|
|
return "[{}]({}) changed its status to {}.".format(
|
2022-11-07 21:24:35 +01:00
|
|
|
get_short_sha(payload["sha"].tame(check_string)),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["commit"]["html_url"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
status,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-01 23:16:08 +01:00
|
|
|
def get_locked_or_unlocked_pull_request_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-03-01 23:16:08 +01:00
|
|
|
|
|
|
|
message = "{sender} has locked [PR #{pr_number}]({pr_url}) as {reason} and limited conversation to collaborators."
|
|
|
|
if action == "unlocked":
|
|
|
|
message = "{sender} has unlocked [PR #{pr_number}]({pr_url})."
|
2022-04-03 11:08:49 +02:00
|
|
|
if payload["pull_request"]["active_lock_reason"]:
|
|
|
|
active_lock_reason = payload["pull_request"]["active_lock_reason"].tame(check_string)
|
|
|
|
else:
|
|
|
|
active_lock_reason = None
|
2021-03-01 23:16:08 +01:00
|
|
|
return message.format(
|
|
|
|
sender=get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
pr_number=payload["pull_request"]["number"].tame(check_int),
|
|
|
|
pr_url=payload["pull_request"]["html_url"].tame(check_string),
|
|
|
|
reason=active_lock_reason,
|
2021-03-01 23:16:08 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def get_pull_request_auto_merge_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-03-01 23:16:08 +01:00
|
|
|
|
|
|
|
message = "{sender} has enabled auto merge for [PR #{pr_number}]({pr_url})."
|
|
|
|
if action == "auto_merge_disabled":
|
|
|
|
message = "{sender} has disabled auto merge for [PR #{pr_number}]({pr_url})."
|
|
|
|
return message.format(
|
|
|
|
sender=get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
pr_number=payload["pull_request"]["number"].tame(check_int),
|
|
|
|
pr_url=payload["pull_request"]["html_url"].tame(check_string),
|
2021-03-01 23:16:08 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_pull_request_ready_for_review_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-05-12 15:25:08 +02:00
|
|
|
|
|
|
|
message = "**{sender}** has marked [PR #{pr_number}]({pr_url}) as ready for review."
|
|
|
|
return message.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
sender=get_sender_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
pr_number=payload["pull_request"]["number"].tame(check_int),
|
|
|
|
pr_url=payload["pull_request"]["html_url"].tame(check_string),
|
2020-05-12 15:25:08 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_pull_request_review_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2018-07-20 21:36:18 +02:00
|
|
|
title = "for #{} {}".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["pull_request"]["number"].tame(check_int),
|
|
|
|
payload["pull_request"]["title"].tame(check_string),
|
2018-07-20 21:36:18 +02:00
|
|
|
)
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_sender_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
"submitted",
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["review"]["html_url"].tame(check_string),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="PR review",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
title=title if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_pull_request_review_comment_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
message = None
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "created":
|
2022-04-03 11:08:49 +02:00
|
|
|
message = payload["comment"]["body"].tame(check_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2018-07-20 21:36:18 +02:00
|
|
|
title = "on #{} {}".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["pull_request"]["number"].tame(check_int),
|
|
|
|
payload["pull_request"]["title"].tame(check_string),
|
2018-07-20 21:36:18 +02:00
|
|
|
)
|
|
|
|
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_sender_name(payload),
|
|
|
|
action,
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["comment"]["html_url"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
message=message,
|
2021-05-10 07:02:14 +02:00
|
|
|
type="PR review comment",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
title=title if include_title else None,
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_pull_request_review_requested_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
|
|
|
include_title = helper.include_title
|
2021-02-12 08:20:45 +01:00
|
|
|
requested_reviewer = [payload["requested_reviewer"]] if "requested_reviewer" in payload else []
|
2020-03-11 00:29:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
requested_team = [payload["requested_team"]] if "requested_team" in payload else []
|
2020-03-11 00:29:59 +01:00
|
|
|
|
2018-07-01 20:40:08 +02:00
|
|
|
sender = get_sender_name(payload)
|
2022-04-03 11:08:49 +02:00
|
|
|
pr_number = payload["pull_request"]["number"].tame(check_int)
|
|
|
|
pr_url = payload["pull_request"]["html_url"].tame(check_string)
|
2018-07-01 20:40:08 +02:00
|
|
|
message = "**{sender}** requested {reviewers} for a review on [PR #{pr_number}]({pr_url})."
|
2021-02-12 08:19:30 +01:00
|
|
|
message_with_title = (
|
|
|
|
"**{sender}** requested {reviewers} for a review on [PR #{pr_number} {title}]({pr_url})."
|
|
|
|
)
|
2018-07-20 21:36:18 +02:00
|
|
|
body = message_with_title if include_title else message
|
|
|
|
|
2020-03-11 00:29:59 +01:00
|
|
|
all_reviewers = []
|
|
|
|
|
2021-05-08 04:11:09 +02:00
|
|
|
for reviewer in requested_reviewer:
|
2022-04-03 11:08:49 +02:00
|
|
|
all_reviewers.append(
|
|
|
|
"[{login}]({html_url})".format(
|
|
|
|
login=reviewer["login"].tame(check_string),
|
|
|
|
html_url=reviewer["html_url"].tame(check_string),
|
|
|
|
)
|
|
|
|
)
|
2020-03-11 00:29:59 +01:00
|
|
|
|
2021-05-08 04:11:09 +02:00
|
|
|
for team_reviewer in requested_team:
|
2022-04-03 11:08:49 +02:00
|
|
|
all_reviewers.append(
|
|
|
|
"[{name}]({html_url})".format(
|
|
|
|
name=team_reviewer["name"].tame(check_string),
|
|
|
|
html_url=team_reviewer["html_url"].tame(check_string),
|
|
|
|
)
|
|
|
|
)
|
2020-03-11 00:29:59 +01:00
|
|
|
|
2018-07-01 20:40:08 +02:00
|
|
|
reviewers = ""
|
2021-05-08 04:11:09 +02:00
|
|
|
reviewers = all_reviewers[0]
|
2018-07-01 20:40:08 +02:00
|
|
|
|
2018-07-20 21:36:18 +02:00
|
|
|
return body.format(
|
2018-07-01 20:40:08 +02:00
|
|
|
sender=sender,
|
|
|
|
reviewers=reviewers,
|
|
|
|
pr_number=pr_number,
|
|
|
|
pr_url=pr_url,
|
2022-04-03 11:08:49 +02:00
|
|
|
title=payload["pull_request"]["title"].tame(check_string) if include_title else None,
|
2018-07-01 20:40:08 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_check_run_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2019-02-19 21:16:41 +01:00
|
|
|
template = """
|
|
|
|
Check [{name}]({html_url}) {status} ({conclusion}). ([{short_hash}]({commit_url}))
|
|
|
|
""".strip()
|
|
|
|
|
|
|
|
kwargs = {
|
2022-04-03 11:08:49 +02:00
|
|
|
"name": payload["check_run"]["name"].tame(check_string),
|
|
|
|
"html_url": payload["check_run"]["html_url"].tame(check_string),
|
|
|
|
"status": payload["check_run"]["status"].tame(check_string),
|
2022-11-07 21:24:35 +01:00
|
|
|
"short_hash": get_short_sha(payload["check_run"]["head_sha"].tame(check_string)),
|
2021-02-12 08:20:45 +01:00
|
|
|
"commit_url": "{}/commit/{}".format(
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["repository"]["html_url"].tame(check_string),
|
|
|
|
payload["check_run"]["head_sha"].tame(check_string),
|
2019-02-19 21:16:41 +01:00
|
|
|
),
|
2022-04-03 11:08:49 +02:00
|
|
|
"conclusion": payload["check_run"]["conclusion"].tame(check_string),
|
2019-02-19 21:16:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return template.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_star_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2020-06-27 13:19:32 +02:00
|
|
|
template = "{user} {action} the repository [{repo}]({url})."
|
2019-06-06 06:51:38 +02:00
|
|
|
return template.format(
|
2022-04-03 11:08:49 +02:00
|
|
|
user=payload["sender"]["login"].tame(check_string),
|
|
|
|
action="starred" if payload["action"].tame(check_string) == "created" else "unstarred",
|
2020-06-27 13:19:32 +02:00
|
|
|
repo=get_repository_full_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
url=payload["repository"]["html_url"].tame(check_string),
|
2019-06-06 06:51:38 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
def get_ping_body(helper: Helper) -> str:
|
|
|
|
payload = helper.payload
|
2021-02-12 08:20:45 +01:00
|
|
|
return get_setup_webhook_message("GitHub", get_sender_name(payload))
|
2017-03-17 00:18:25 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def get_repository_name(payload: WildValue) -> str:
|
|
|
|
return payload["repository"]["name"].tame(check_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def get_repository_full_name(payload: WildValue) -> str:
|
|
|
|
return payload["repository"]["full_name"].tame(check_string)
|
2020-06-27 13:19:32 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def get_organization_name(payload: WildValue) -> str:
|
|
|
|
return payload["organization"]["login"].tame(check_string)
|
2017-07-17 04:03:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def get_sender_name(payload: WildValue) -> str:
|
|
|
|
return payload["sender"]["login"].tame(check_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_branch_name_from_ref(ref_string: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return re.sub(r"^refs/heads/", "", ref_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_tag_name_from_ref(ref_string: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return re.sub(r"^refs/tags/", "", ref_string)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def is_commit_push_event(payload: WildValue) -> bool:
|
|
|
|
return bool(re.match(r"^refs/heads/", payload["ref"].tame(check_string)))
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
def get_subject_based_on_type(payload: WildValue, event: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "pull_request" in event:
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-25 14:50:42 +02:00
|
|
|
repo=get_repository_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="PR",
|
2022-04-03 11:08:49 +02:00
|
|
|
id=payload["pull_request"]["number"].tame(check_int),
|
|
|
|
title=payload["pull_request"]["title"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event.startswith("issue"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-25 14:50:42 +02:00
|
|
|
repo=get_repository_name(payload),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2022-04-03 11:08:49 +02:00
|
|
|
id=payload["issue"]["number"].tame(check_int),
|
|
|
|
title=payload["issue"]["title"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event.startswith("deployment"):
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{} / Deployment on {}".format(
|
2016-10-25 14:50:42 +02:00
|
|
|
get_repository_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
payload["deployment"]["environment"].tame(check_string),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "membership":
|
2022-04-03 11:08:49 +02:00
|
|
|
return "{} organization".format(payload["organization"]["login"].tame(check_string))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "team":
|
2022-04-03 11:08:49 +02:00
|
|
|
return "team {}".format(payload["team"]["name"].tame(check_string))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "push_commits":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_BRANCH_TEMPLATE.format(
|
2016-10-25 14:50:42 +02:00
|
|
|
repo=get_repository_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
branch=get_branch_name_from_ref(payload["ref"].tame(check_string)),
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "gollum":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_BRANCH_TEMPLATE.format(
|
2016-10-25 14:50:42 +02:00
|
|
|
repo=get_repository_name(payload),
|
2021-05-10 07:02:14 +02:00
|
|
|
branch="wiki pages",
|
2016-10-25 14:50:42 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "ping":
|
2022-04-03 11:08:49 +02:00
|
|
|
if not payload.get("repository"):
|
2017-07-17 04:03:54 +02:00
|
|
|
return get_organization_name(payload)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "check_run":
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{get_repository_name(payload)} / checks"
|
2021-10-18 13:33:38 +02:00
|
|
|
elif event.startswith("discussion"):
|
|
|
|
return TOPIC_FOR_DISCUSSION.format(
|
|
|
|
repo=get_repository_name(payload),
|
2022-04-03 11:08:49 +02:00
|
|
|
number=payload["discussion"]["number"].tame(check_int),
|
|
|
|
title=payload["discussion"]["title"].tame(check_string),
|
2021-10-18 13:33:38 +02:00
|
|
|
)
|
2019-02-19 21:16:41 +01:00
|
|
|
|
2016-10-25 14:50:42 +02:00
|
|
|
return get_repository_name(payload)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 18:25:17 +02:00
|
|
|
EVENT_FUNCTION_MAPPER: Dict[str, Callable[[Helper], str]] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"commit_comment": get_commit_comment_body,
|
|
|
|
"closed_pull_request": get_closed_pull_request_body,
|
|
|
|
"create": partial(get_create_or_delete_body, action="created"),
|
|
|
|
"check_run": get_check_run_body,
|
|
|
|
"delete": partial(get_create_or_delete_body, action="deleted"),
|
|
|
|
"deployment": get_deployment_body,
|
|
|
|
"deployment_status": get_change_deployment_status_body,
|
2021-10-18 13:33:38 +02:00
|
|
|
"discussion": get_discussion_body,
|
|
|
|
"discussion_comment": get_discussion_comment_body,
|
2021-02-12 08:20:45 +01:00
|
|
|
"fork": get_fork_body,
|
|
|
|
"gollum": get_wiki_pages_body,
|
|
|
|
"issue_comment": get_issue_comment_body,
|
|
|
|
"issues": get_issue_body,
|
|
|
|
"member": get_member_body,
|
|
|
|
"membership": get_membership_body,
|
|
|
|
"opened_or_update_pull_request": get_opened_or_update_pull_request_body,
|
|
|
|
"assigned_or_unassigned_pull_request": get_assigned_or_unassigned_pull_request_body,
|
|
|
|
"page_build": get_page_build_body,
|
|
|
|
"ping": get_ping_body,
|
|
|
|
"public": get_public_body,
|
|
|
|
"pull_request_ready_for_review": get_pull_request_ready_for_review_body,
|
|
|
|
"pull_request_review": get_pull_request_review_body,
|
|
|
|
"pull_request_review_comment": get_pull_request_review_comment_body,
|
|
|
|
"pull_request_review_requested": get_pull_request_review_requested_body,
|
2021-03-01 23:16:08 +01:00
|
|
|
"pull_request_auto_merge": get_pull_request_auto_merge_body,
|
|
|
|
"locked_or_unlocked_pull_request": get_locked_or_unlocked_pull_request_body,
|
2021-02-12 08:20:45 +01:00
|
|
|
"push_commits": get_push_commits_body,
|
|
|
|
"push_tags": get_push_tags_body,
|
|
|
|
"release": get_release_body,
|
|
|
|
"repository": get_repository_body,
|
|
|
|
"star": get_star_body,
|
|
|
|
"status": get_status_body,
|
|
|
|
"team": get_team_body,
|
|
|
|
"team_add": get_add_team_body,
|
|
|
|
"watch": get_watch_body,
|
2016-10-25 14:50:42 +02:00
|
|
|
}
|
|
|
|
|
2019-02-19 20:17:40 +01:00
|
|
|
IGNORED_EVENTS = [
|
2020-08-22 15:21:35 +02:00
|
|
|
"check_suite",
|
|
|
|
"label",
|
|
|
|
"meta",
|
|
|
|
"milestone",
|
|
|
|
"organization",
|
|
|
|
"project_card",
|
|
|
|
"repository_vulnerability_alert",
|
2019-02-19 20:17:40 +01:00
|
|
|
]
|
|
|
|
|
2020-09-01 14:19:33 +02:00
|
|
|
IGNORED_PULL_REQUEST_ACTIONS = [
|
|
|
|
"approved",
|
|
|
|
"converted_to_draft",
|
|
|
|
"labeled",
|
|
|
|
"review_request_removed",
|
|
|
|
"unlabeled",
|
|
|
|
]
|
|
|
|
|
2020-09-01 15:07:08 +02:00
|
|
|
IGNORED_TEAM_ACTIONS = [
|
|
|
|
# These are actions that are well documented by github
|
|
|
|
# (https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads)
|
|
|
|
# but we ignore them for now, possibly just due to laziness.
|
|
|
|
# One curious example here is team/added_to_repository, which is
|
|
|
|
# possibly the same as team_add.
|
|
|
|
"added_to_repository",
|
|
|
|
"created",
|
|
|
|
"deleted",
|
|
|
|
"removed_from_repository",
|
|
|
|
]
|
|
|
|
|
2021-07-13 18:37:49 +02:00
|
|
|
ALL_EVENT_TYPES = list(EVENT_FUNCTION_MAPPER.keys())
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-13 18:37:49 +02:00
|
|
|
|
|
|
|
@webhook_view("GitHub", notify_bot_owner_on_invalid_json=True, all_event_types=ALL_EVENT_TYPES)
|
2016-10-25 14:50:42 +02:00
|
|
|
@has_request_variables
|
|
|
|
def api_github_webhook(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2022-04-03 11:08:49 +02:00
|
|
|
payload: WildValue = REQ(argument_type="body", converter=to_wild_value),
|
2021-02-12 08:19:30 +01:00
|
|
|
branches: Optional[str] = REQ(default=None),
|
|
|
|
user_specified_topic: Optional[str] = REQ("topic", default=None),
|
|
|
|
) -> HttpResponse:
|
2020-09-01 17:13:25 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
GitHub sends the event as an HTTP header. We have our
|
2020-09-01 17:13:25 +02:00
|
|
|
own Zulip-specific concept of an event that often maps
|
2022-05-12 06:54:12 +02:00
|
|
|
directly to the X-GitHub-Event header's event, but we sometimes
|
2020-09-01 17:13:25 +02:00
|
|
|
refine it based on the payload.
|
|
|
|
"""
|
2022-05-12 06:54:12 +02:00
|
|
|
header_event = validate_extract_webhook_http_header(request, "X-GitHub-Event", "GitHub")
|
2020-09-01 17:13:25 +02:00
|
|
|
if header_event is None:
|
2020-08-20 00:50:06 +02:00
|
|
|
raise UnsupportedWebhookEventType("no header provided")
|
2020-09-01 17:13:25 +02:00
|
|
|
|
|
|
|
event = get_zulip_event_name(header_event, payload, branches)
|
2020-09-01 17:18:28 +02:00
|
|
|
if event is None:
|
|
|
|
# This is nothing to worry about--get_event() returns None
|
|
|
|
# for events that are valid but not yet handled by us.
|
|
|
|
# See IGNORED_EVENTS, for example.
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2020-09-01 17:18:28 +02:00
|
|
|
subject = get_subject_based_on_type(payload, event)
|
|
|
|
|
2020-09-01 18:25:17 +02:00
|
|
|
body_function = EVENT_FUNCTION_MAPPER[event]
|
2020-09-01 17:18:28 +02:00
|
|
|
|
2020-09-01 17:50:13 +02:00
|
|
|
helper = Helper(
|
|
|
|
payload=payload,
|
|
|
|
include_title=user_specified_topic is not None,
|
|
|
|
)
|
|
|
|
body = body_function(helper)
|
2020-09-01 17:18:28 +02:00
|
|
|
|
2021-07-13 18:37:49 +02:00
|
|
|
check_send_webhook_message(request, user_profile, subject, body, event)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-10-25 14:50:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-01 17:13:25 +02:00
|
|
|
def get_zulip_event_name(
|
|
|
|
header_event: str,
|
2022-04-03 11:08:49 +02:00
|
|
|
payload: WildValue,
|
2020-09-01 17:13:25 +02:00
|
|
|
branches: Optional[str],
|
|
|
|
) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Usually, we return an event name that is a key in EVENT_FUNCTION_MAPPER.
|
|
|
|
|
|
|
|
We return None for an event that we know we don't want to handle.
|
|
|
|
"""
|
|
|
|
if header_event == "pull_request":
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if action in ("opened", "synchronize", "reopened", "edited"):
|
|
|
|
return "opened_or_update_pull_request"
|
|
|
|
if action in ("assigned", "unassigned"):
|
|
|
|
return "assigned_or_unassigned_pull_request"
|
|
|
|
if action == "closed":
|
|
|
|
return "closed_pull_request"
|
|
|
|
if action == "review_requested":
|
2020-09-01 16:47:56 +02:00
|
|
|
return "pull_request_review_requested"
|
2021-02-12 08:20:45 +01:00
|
|
|
if action == "ready_for_review":
|
|
|
|
return "pull_request_ready_for_review"
|
2021-03-01 23:16:08 +01:00
|
|
|
if action in ("locked", "unlocked"):
|
|
|
|
return "locked_or_unlocked_pull_request"
|
|
|
|
if action in ("auto_merge_enabled", "auto_merge_disabled"):
|
|
|
|
return "pull_request_auto_merge"
|
2020-09-01 14:19:33 +02:00
|
|
|
if action in IGNORED_PULL_REQUEST_ACTIONS:
|
2018-07-01 19:29:45 +02:00
|
|
|
return None
|
2020-09-01 17:13:25 +02:00
|
|
|
elif header_event == "push":
|
2016-10-25 14:50:42 +02:00
|
|
|
if is_commit_push_event(payload):
|
2017-03-25 03:23:15 +01:00
|
|
|
if branches is not None:
|
2022-04-03 11:08:49 +02:00
|
|
|
branch = get_branch_name_from_ref(payload["ref"].tame(check_string))
|
2017-04-25 02:20:39 +02:00
|
|
|
if branches.find(branch) == -1:
|
2017-03-25 03:23:15 +01:00
|
|
|
return None
|
2016-10-25 14:50:42 +02:00
|
|
|
return "push_commits"
|
|
|
|
else:
|
|
|
|
return "push_tags"
|
2020-09-01 17:13:25 +02:00
|
|
|
elif header_event == "check_run":
|
2022-04-03 11:08:49 +02:00
|
|
|
if payload["check_run"]["status"].tame(check_string) != "completed":
|
2019-02-19 21:16:41 +01:00
|
|
|
return None
|
2020-09-01 17:13:25 +02:00
|
|
|
return header_event
|
|
|
|
elif header_event == "team":
|
2022-04-03 11:08:49 +02:00
|
|
|
action = payload["action"].tame(check_string)
|
2020-09-01 15:07:08 +02:00
|
|
|
if action == "edited":
|
|
|
|
return "team"
|
|
|
|
if action in IGNORED_TEAM_ACTIONS:
|
|
|
|
# no need to spam our logs, we just haven't implemented it yet
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
# this means GH has actually added new actions since September 2020,
|
|
|
|
# so it's a bit more cause for alarm
|
2020-08-20 00:50:06 +02:00
|
|
|
raise UnsupportedWebhookEventType(f"unsupported team action {action}")
|
2020-09-01 17:13:25 +02:00
|
|
|
elif header_event in list(EVENT_FUNCTION_MAPPER.keys()):
|
|
|
|
return header_event
|
|
|
|
elif header_event in IGNORED_EVENTS:
|
2019-02-19 20:17:40 +01:00
|
|
|
return None
|
2018-05-22 16:46:45 +02:00
|
|
|
|
2022-04-03 11:08:49 +02:00
|
|
|
complete_event = "{}:{}".format(
|
|
|
|
header_event, payload.get("action", "???").tame(check_string)
|
|
|
|
) # nocoverage
|
2020-08-20 00:50:06 +02:00
|
|
|
raise UnsupportedWebhookEventType(complete_event)
|