2020-01-14 22:06:24 +01:00
|
|
|
import re
|
2016-08-18 20:57:50 +02:00
|
|
|
from functools import partial
|
2018-07-20 00:49:05 +02:00
|
|
|
from inspect import signature
|
2020-08-21 18:02:28 +02:00
|
|
|
from typing import Any, Dict, List, Optional
|
2017-11-16 00:43:10 +01:00
|
|
|
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2020-08-19 22:26:38 +02:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventType
|
2017-10-31 04:25:48 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2017-11-16 00:43:10 +01:00
|
|
|
from zerver.lib.response import json_success
|
2020-07-29 07:39:42 +02:00
|
|
|
from zerver.lib.validator import check_bool
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
|
|
|
check_send_webhook_message,
|
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
EMPTY_SHA,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_issue_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_commits_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_remove_branch_event_message,
|
|
|
|
)
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2020-01-14 22:06:24 +01:00
|
|
|
|
2019-07-15 08:10:55 +02:00
|
|
|
def fixture_to_headers(fixture_name: str) -> Dict[str, Any]:
|
|
|
|
if fixture_name.startswith("build"):
|
|
|
|
return {} # Since there are 2 possible event types.
|
|
|
|
|
|
|
|
# Map "push_hook__push_commits_more_than_limit.json" into GitLab's
|
|
|
|
# HTTP event title "Push Hook".
|
|
|
|
return {"HTTP_X_GITLAB_EVENT": fixture_name.split("__")[0].replace("_", " ").title()}
|
2016-10-06 17:13:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_push_event_body(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if payload.get("after") == EMPTY_SHA:
|
2016-10-06 17:13:40 +02:00
|
|
|
return get_remove_branch_event_body(payload)
|
|
|
|
return get_normal_push_event_body(payload)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_normal_push_event_body(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
compare_url = "{}/compare/{}...{}".format(
|
2020-04-22 15:15:39 +02:00
|
|
|
get_project_homepage(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["before"],
|
|
|
|
payload["after"],
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
2016-10-04 21:14:56 +02:00
|
|
|
|
|
|
|
commits = [
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": commit.get("author").get("name"),
|
|
|
|
"sha": commit.get("id"),
|
|
|
|
"message": commit.get("message"),
|
|
|
|
"url": commit.get("url"),
|
2016-10-04 21:14:56 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
for commit in payload["commits"]
|
2016-10-04 21:14:56 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
return get_push_commits_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_user_name(payload),
|
|
|
|
compare_url,
|
2016-10-04 21:14:56 +02:00
|
|
|
get_branch_name(payload),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
commits,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_remove_branch_event_body(payload: Dict[str, Any]) -> str:
|
2016-10-06 17:13:40 +02:00
|
|
|
return get_remove_branch_event_message(
|
|
|
|
get_user_name(payload),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
get_branch_name(payload),
|
2016-10-06 17:13:40 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_tag_push_event_body(payload: Dict[str, Any]) -> str:
|
2016-11-08 22:13:23 +01:00
|
|
|
return get_push_tag_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_user_name(payload),
|
2016-11-08 22:13:23 +01:00
|
|
|
get_tag_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
action="pushed" if payload.get("checkout_sha") else "removed",
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_issue_created_event_body(payload: Dict[str, Any], include_title: bool = False) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
description = payload["object_attributes"].get("description")
|
2018-01-24 03:08:53 +01:00
|
|
|
# Filter out multiline hidden comments
|
2018-03-23 18:50:46 +01:00
|
|
|
if description is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
description = re.sub("<!--.*?-->", "", description, 0, re.DOTALL)
|
2018-03-23 18:50:46 +01:00
|
|
|
description = description.rstrip()
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-19 23:49:23 +02:00
|
|
|
return get_issue_event_message(
|
|
|
|
get_issue_user_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
"created",
|
2016-10-19 23:49:23 +02:00
|
|
|
get_object_url(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("iid"),
|
2018-01-24 03:08:53 +01:00
|
|
|
description,
|
2020-08-21 18:02:28 +02:00
|
|
|
assignees=replace_assignees_username_with_name(get_assignees(payload)),
|
2021-02-12 08:20:45 +01:00
|
|
|
title=payload["object_attributes"].get("title") if include_title else None,
|
2016-10-19 23:49:23 +02:00
|
|
|
)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_issue_event_body(payload: Dict[str, Any], action: str, include_title: bool = False) -> str:
|
2016-10-19 23:49:23 +02:00
|
|
|
return get_issue_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
2016-10-19 23:49:23 +02:00
|
|
|
get_object_url(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("iid"),
|
|
|
|
title=payload["object_attributes"].get("title") if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_merge_request_updated_event_body(
|
|
|
|
payload: Dict[str, Any], include_title: bool = False
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if payload["object_attributes"].get("oldrev"):
|
2018-07-20 00:49:05 +02:00
|
|
|
return get_merge_request_event_body(
|
2021-02-12 08:19:30 +01:00
|
|
|
payload,
|
|
|
|
"added commit(s) to",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_title=include_title,
|
2018-07-20 00:49:05 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return get_merge_request_open_or_updated_body(
|
2021-02-12 08:19:30 +01:00
|
|
|
payload,
|
|
|
|
"updated",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_title=include_title,
|
2018-07-20 00:49:05 +02:00
|
|
|
)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_merge_request_event_body(
|
|
|
|
payload: Dict[str, Any], action: str, include_title: bool = False
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["object_attributes"]
|
2016-10-11 18:54:48 +02:00
|
|
|
return get_pull_request_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request.get("url"),
|
|
|
|
pull_request.get("iid"),
|
|
|
|
type="MR",
|
|
|
|
title=payload["object_attributes"].get("title") if include_title else None,
|
2016-10-11 18:54:48 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_merge_request_open_or_updated_body(
|
|
|
|
payload: Dict[str, Any], action: str, include_title: bool = False
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["object_attributes"]
|
2016-10-11 18:54:48 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request.get("url"),
|
|
|
|
pull_request.get("iid"),
|
|
|
|
pull_request.get("source_branch"),
|
|
|
|
pull_request.get("target_branch"),
|
|
|
|
pull_request.get("description"),
|
2020-08-21 18:02:28 +02:00
|
|
|
assignees=replace_assignees_username_with_name(get_assignees(payload)),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
|
|
|
title=payload["object_attributes"].get("title") if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-21 18:02:28 +02:00
|
|
|
def get_assignees(payload: Dict[str, Any]) -> List[Dict[str, str]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
assignee_details = payload.get("assignees")
|
2020-08-21 18:02:28 +02:00
|
|
|
if assignee_details is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
single_assignee_details = payload.get("assignee")
|
2020-08-21 18:02:28 +02:00
|
|
|
if single_assignee_details is None:
|
|
|
|
assignee_details = []
|
|
|
|
else:
|
|
|
|
assignee_details = [single_assignee_details]
|
|
|
|
return assignee_details
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-21 18:02:28 +02:00
|
|
|
def replace_assignees_username_with_name(assignees: List[Dict[str, str]]) -> List[Dict[str, str]]:
|
|
|
|
"""Replace the username of each assignee with their (full) name.
|
|
|
|
|
|
|
|
This is a hack-like adaptor so that when assignees are passed to
|
|
|
|
`get_pull_request_event_message` we can use the assignee's name
|
|
|
|
and not their username (for more consistency).
|
|
|
|
"""
|
|
|
|
for assignee in assignees:
|
|
|
|
assignee["username"] = assignee["name"]
|
|
|
|
return assignees
|
2016-10-19 23:49:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_commented_commit_event_body(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
|
|
|
action = "[commented]({})".format(comment["url"])
|
2016-10-27 21:43:15 +02:00
|
|
|
return get_commits_comment_action_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
2016-10-21 17:53:33 +02:00
|
|
|
action,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["commit"].get("url"),
|
|
|
|
payload["commit"].get("id"),
|
|
|
|
comment["note"],
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_commented_merge_request_event_body(
|
|
|
|
payload: Dict[str, Any], include_title: bool = False
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
|
|
|
action = "[commented]({}) on".format(comment["url"])
|
|
|
|
url = "{}/merge_requests/{}".format(
|
|
|
|
payload["project"].get("web_url"),
|
|
|
|
payload["merge_request"].get("iid"),
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 16:01:15 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["merge_request"].get("iid"),
|
|
|
|
message=comment["note"],
|
|
|
|
type="MR",
|
|
|
|
title=payload["merge_request"].get("title") if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_commented_issue_event_body(payload: Dict[str, Any], include_title: bool = False) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
|
|
|
action = "[commented]({}) on".format(comment["url"])
|
|
|
|
url = "{}/issues/{}".format(
|
|
|
|
payload["project"].get("web_url"),
|
|
|
|
payload["issue"].get("iid"),
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 16:01:15 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["issue"].get("iid"),
|
|
|
|
message=comment["note"],
|
|
|
|
type="Issue",
|
|
|
|
title=payload["issue"].get("title") if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_commented_snippet_event_body(payload: Dict[str, Any], include_title: bool = False) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
|
|
|
action = "[commented]({}) on".format(comment["url"])
|
|
|
|
url = "{}/snippets/{}".format(
|
|
|
|
payload["project"].get("web_url"),
|
|
|
|
payload["snippet"].get("id"),
|
2016-10-21 17:53:33 +02:00
|
|
|
)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 17:53:33 +02:00
|
|
|
return get_pull_request_event_message(
|
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["snippet"].get("id"),
|
|
|
|
message=comment["note"],
|
|
|
|
type="Snippet",
|
|
|
|
title=payload["snippet"].get("title") if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_wiki_page_event_body(payload: Dict[str, Any], action: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return '{} {} [Wiki Page "{}"]({}).'.format(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("title"),
|
|
|
|
payload["object_attributes"].get("url"),
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_build_hook_event_body(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
build_status = payload.get("build_status")
|
|
|
|
if build_status == "created":
|
|
|
|
action = "was created"
|
|
|
|
elif build_status == "running":
|
|
|
|
action = "started"
|
2016-09-28 22:02:41 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = f"changed status to {build_status}"
|
2020-04-09 21:51:58 +02:00
|
|
|
return "Build {} from {} stage {}.".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
payload.get("build_name"),
|
|
|
|
payload.get("build_stage"),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
action,
|
2016-09-28 22:02:41 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_test_event_body(payload: Dict[str, Any]) -> str:
|
2020-06-10 06:40:53 +02:00
|
|
|
return f"Webhook for **{get_repo_name(payload)}** has been configured successfully! :tada:"
|
2017-11-15 04:20:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_pipeline_event_body(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pipeline_status = payload["object_attributes"].get("status")
|
|
|
|
if pipeline_status == "pending":
|
|
|
|
action = "was created"
|
|
|
|
elif pipeline_status == "running":
|
|
|
|
action = "started"
|
2016-09-28 23:06:04 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = f"changed status to {pipeline_status}"
|
2016-09-28 23:06:04 +02:00
|
|
|
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage = get_project_homepage(payload)
|
2021-02-12 08:20:45 +01:00
|
|
|
pipeline_url = "{}/pipelines/{}".format(
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("id"),
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
builds_status = ""
|
2021-02-12 08:20:45 +01:00
|
|
|
for build in payload["builds"]:
|
|
|
|
build_url = "{}/-/jobs/{}".format(
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage,
|
2021-02-12 08:20:45 +01:00
|
|
|
build.get("id"),
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_filename = build.get("artifacts_file", {}).get("filename", None)
|
2020-06-06 15:09:57 +02:00
|
|
|
if artifact_filename:
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_download_url = f"{build_url}/artifacts/download"
|
|
|
|
artifact_browse_url = f"{build_url}/artifacts/browse"
|
|
|
|
artifact_string = f" * built artifact: *{artifact_filename}* [[Browse]({artifact_browse_url})|[Download]({artifact_download_url})]\n"
|
2020-06-06 15:09:57 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_string = ""
|
2020-06-06 15:09:57 +02:00
|
|
|
builds_status += "* [{}]({}) - {}\n{}".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
build.get("name"),
|
2020-05-06 16:06:19 +02:00
|
|
|
build_url,
|
2021-02-12 08:20:45 +01:00
|
|
|
build.get("status"),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
artifact_string,
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
2020-06-06 15:12:48 +02:00
|
|
|
return "[Pipeline ({})]({}) {} with build(s):\n{}.".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("id"),
|
2020-06-06 15:12:48 +02:00
|
|
|
pipeline_url,
|
|
|
|
action,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
builds_status[:-1],
|
2020-06-06 15:12:48 +02:00
|
|
|
)
|
2016-09-28 23:06:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_repo_name(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "project" in payload:
|
|
|
|
return payload["project"]["name"]
|
2020-05-11 23:36:05 +02:00
|
|
|
|
|
|
|
# Apparently, Job Hook payloads don't have a `project` section,
|
|
|
|
# but the repository name is accessible from the `repository`
|
|
|
|
# section.
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["repository"]["name"]
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_user_name(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["user_name"]
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_issue_user_name(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["user"]["name"]
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-22 15:15:39 +02:00
|
|
|
def get_project_homepage(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "project" in payload:
|
|
|
|
return payload["project"]["web_url"]
|
|
|
|
return payload["repository"]["homepage"]
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_branch_name(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["ref"].replace("refs/heads/", "")
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_tag_name(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["ref"].replace("refs/tags/", "")
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:43:34 +02:00
|
|
|
def get_object_url(payload: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return payload["object_attributes"]["url"]
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-18 20:57:50 +02:00
|
|
|
EVENT_FUNCTION_MAPPER = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"Push Hook": get_push_event_body,
|
|
|
|
"Tag Push Hook": get_tag_push_event_body,
|
|
|
|
"Test Hook": get_test_event_body,
|
|
|
|
"Issue Hook open": get_issue_created_event_body,
|
|
|
|
"Issue Hook close": partial(get_issue_event_body, action="closed"),
|
|
|
|
"Issue Hook reopen": partial(get_issue_event_body, action="reopened"),
|
|
|
|
"Issue Hook update": partial(get_issue_event_body, action="updated"),
|
|
|
|
"Confidential Issue Hook open": get_issue_created_event_body,
|
|
|
|
"Confidential Issue Hook close": partial(get_issue_event_body, action="closed"),
|
|
|
|
"Confidential Issue Hook reopen": partial(get_issue_event_body, action="reopened"),
|
|
|
|
"Confidential Issue Hook update": partial(get_issue_event_body, action="updated"),
|
|
|
|
"Note Hook Commit": get_commented_commit_event_body,
|
|
|
|
"Note Hook MergeRequest": get_commented_merge_request_event_body,
|
|
|
|
"Note Hook Issue": get_commented_issue_event_body,
|
|
|
|
"Confidential Note Hook Issue": get_commented_issue_event_body,
|
|
|
|
"Note Hook Snippet": get_commented_snippet_event_body,
|
|
|
|
"Merge Request Hook approved": partial(get_merge_request_event_body, action="approved"),
|
|
|
|
"Merge Request Hook unapproved": partial(get_merge_request_event_body, action="unapproved"),
|
|
|
|
"Merge Request Hook open": partial(get_merge_request_open_or_updated_body, action="created"),
|
|
|
|
"Merge Request Hook update": get_merge_request_updated_event_body,
|
|
|
|
"Merge Request Hook merge": partial(get_merge_request_event_body, action="merged"),
|
|
|
|
"Merge Request Hook close": partial(get_merge_request_event_body, action="closed"),
|
|
|
|
"Merge Request Hook reopen": partial(get_merge_request_event_body, action="reopened"),
|
|
|
|
"Wiki Page Hook create": partial(get_wiki_page_event_body, action="created"),
|
|
|
|
"Wiki Page Hook update": partial(get_wiki_page_event_body, action="updated"),
|
|
|
|
"Job Hook": get_build_hook_event_body,
|
|
|
|
"Build Hook": get_build_hook_event_body,
|
|
|
|
"Pipeline Hook": get_pipeline_event_body,
|
2016-08-18 20:57:50 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
@webhook_view("GitLab")
|
2016-08-18 20:57:50 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_gitlab_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload: Dict[str, Any] = REQ(argument_type="body"),
|
2021-02-12 08:19:30 +01:00
|
|
|
branches: Optional[str] = REQ(default=None),
|
2021-04-07 22:00:44 +02:00
|
|
|
use_merge_request_title: bool = REQ(default=True, json_validator=check_bool),
|
2021-02-12 08:19:30 +01:00
|
|
|
user_specified_topic: Optional[str] = REQ("topic", default=None),
|
|
|
|
) -> HttpResponse:
|
2017-04-03 04:46:41 +02:00
|
|
|
event = get_event(request, payload, branches)
|
|
|
|
if event is not None:
|
2018-07-20 00:49:05 +02:00
|
|
|
event_body_function = get_body_based_on_event(event)
|
2021-02-12 08:20:45 +01:00
|
|
|
if "include_title" in signature(event_body_function).parameters:
|
2018-07-20 00:49:05 +02:00
|
|
|
body = event_body_function(
|
|
|
|
payload,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_title=user_specified_topic is not None,
|
2018-07-20 00:49:05 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
body = event_body_function(payload)
|
|
|
|
|
2020-04-22 15:28:34 +02:00
|
|
|
# Add a link to the project if a custom topic is set
|
|
|
|
if user_specified_topic:
|
|
|
|
project_url = f"[{get_repo_name(payload)}]({get_project_homepage(payload)})"
|
|
|
|
body = f"[{project_url}] {body}"
|
|
|
|
|
2020-07-29 07:39:42 +02:00
|
|
|
topic = get_subject_based_on_event(event, payload, use_merge_request_title)
|
2018-03-13 23:43:02 +01:00
|
|
|
check_send_webhook_message(request, user_profile, topic, body)
|
2016-08-18 20:57:50 +02:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-04 07:47:46 +01:00
|
|
|
def get_body_based_on_event(event: str) -> Any:
|
2016-08-18 20:57:50 +02:00
|
|
|
return EVENT_FUNCTION_MAPPER[event]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_subject_based_on_event(
|
|
|
|
event: str, payload: Dict[str, Any], use_merge_request_title: bool
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if event == "Push Hook":
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{get_repo_name(payload)} / {get_branch_name(payload)}"
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Job Hook" or event == "Build Hook":
|
|
|
|
return "{} / {}".format(payload["repository"].get("name"), get_branch_name(payload))
|
|
|
|
elif event == "Pipeline Hook":
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{} / {}".format(
|
2016-09-28 23:06:04 +02:00
|
|
|
get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["object_attributes"].get("ref").replace("refs/heads/", ""),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event.startswith("Merge Request Hook"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-11 18:54:48 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
|
|
|
id=payload["object_attributes"].get("iid"),
|
|
|
|
title=payload["object_attributes"].get("title") if use_merge_request_title else "",
|
2016-10-19 23:49:23 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event.startswith("Issue Hook") or event.startswith("Confidential Issue Hook"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-19 23:49:23 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="Issue",
|
|
|
|
id=payload["object_attributes"].get("iid"),
|
|
|
|
title=payload["object_attributes"].get("title"),
|
2016-10-11 18:54:48 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Note Hook Issue" or event == "Confidential Note Hook Issue":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 16:01:15 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="Issue",
|
|
|
|
id=payload["issue"].get("iid"),
|
|
|
|
title=payload["issue"].get("title"),
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Note Hook MergeRequest":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 16:01:15 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
|
|
|
id=payload["merge_request"].get("iid"),
|
|
|
|
title=payload["merge_request"].get("title") if use_merge_request_title else "",
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2016-10-21 17:53:33 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Note Hook Snippet":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 17:53:33 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="Snippet",
|
|
|
|
id=payload["snippet"].get("id"),
|
|
|
|
title=payload["snippet"].get("title"),
|
2016-10-21 17:53:33 +02:00
|
|
|
)
|
2016-09-26 08:12:24 +02:00
|
|
|
return get_repo_name(payload)
|
2016-09-22 20:15:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_event(
|
|
|
|
request: HttpRequest, payload: Dict[str, Any], branches: Optional[str]
|
|
|
|
) -> Optional[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
event = validate_extract_webhook_http_header(request, "X_GITLAB_EVENT", "GitLab")
|
2020-04-25 11:09:30 +02:00
|
|
|
if event == "System Hook":
|
2020-10-23 02:43:28 +02:00
|
|
|
# Convert the event name to a GitLab event title
|
2021-02-12 08:20:45 +01:00
|
|
|
event_name = payload.get("event_name", payload.get("object_kind"))
|
2020-04-25 11:09:30 +02:00
|
|
|
event = event_name.split("__")[0].replace("_", " ").title()
|
|
|
|
event = f"{event} Hook"
|
2021-02-12 08:20:45 +01:00
|
|
|
if event in ["Confidential Issue Hook", "Issue Hook", "Merge Request Hook", "Wiki Page Hook"]:
|
|
|
|
action = payload["object_attributes"].get("action", "open")
|
2020-04-25 11:09:30 +02:00
|
|
|
event = f"{event} {action}"
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event in ["Confidential Note Hook", "Note Hook"]:
|
|
|
|
action = payload["object_attributes"].get("noteable_type")
|
2020-04-25 11:09:30 +02:00
|
|
|
event = f"{event} {action}"
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Push Hook":
|
2017-04-03 04:46:41 +02:00
|
|
|
if branches is not None:
|
|
|
|
branch = get_branch_name(payload)
|
2017-04-25 02:21:34 +02:00
|
|
|
if branches.find(branch) == -1:
|
2017-04-03 04:46:41 +02:00
|
|
|
return None
|
2016-08-18 20:57:50 +02:00
|
|
|
|
|
|
|
if event in list(EVENT_FUNCTION_MAPPER.keys()):
|
|
|
|
return event
|
2018-05-22 16:46:45 +02:00
|
|
|
|
2020-08-20 00:50:06 +02:00
|
|
|
raise UnsupportedWebhookEventType(event)
|