2020-01-14 22:06:24 +01:00
|
|
|
import re
|
2024-07-12 02:30:17 +02:00
|
|
|
from typing import Optional, Protocol, Union
|
2017-11-16 00:43:10 +01:00
|
|
|
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
2023-08-14 23:50:15 +02:00
|
|
|
from pydantic import Json
|
2017-11-16 00:43:10 +01:00
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
|
2024-04-29 23:20:36 +02:00
|
|
|
from zerver.lib.partial import partial
|
2017-11-16 00:43:10 +01:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-11-02 14:37:18 +01:00
|
|
|
from zerver.lib.validator import WildValue, check_int, check_none_or, check_string
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
2023-08-14 23:50:15 +02:00
|
|
|
OptionalUserSpecifiedTopicStr,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_send_webhook_message,
|
|
|
|
validate_extract_webhook_http_header,
|
|
|
|
)
|
|
|
|
from zerver.lib.webhooks.git import (
|
|
|
|
EMPTY_SHA,
|
|
|
|
TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE,
|
|
|
|
get_commits_comment_action_message,
|
|
|
|
get_issue_event_message,
|
|
|
|
get_pull_request_event_message,
|
|
|
|
get_push_commits_event_message,
|
|
|
|
get_push_tag_event_message,
|
|
|
|
get_remove_branch_event_message,
|
|
|
|
)
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2020-01-14 22:06:24 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def fixture_to_headers(fixture_name: str) -> dict[str, str]:
|
2019-07-15 08:10:55 +02:00
|
|
|
if fixture_name.startswith("build"):
|
|
|
|
return {} # Since there are 2 possible event types.
|
|
|
|
|
|
|
|
# Map "push_hook__push_commits_more_than_limit.json" into GitLab's
|
|
|
|
# HTTP event title "Push Hook".
|
|
|
|
return {"HTTP_X_GITLAB_EVENT": fixture_name.split("__")[0].replace("_", " ").title()}
|
2016-10-06 17:13:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_push_event_body(payload: WildValue, include_title: bool) -> str:
|
2022-04-06 17:37:10 +02:00
|
|
|
after = payload.get("after")
|
|
|
|
if after:
|
|
|
|
stringified_after = after.tame(check_string)
|
|
|
|
if stringified_after == EMPTY_SHA:
|
|
|
|
return get_remove_branch_event_body(payload)
|
2016-10-06 17:13:40 +02:00
|
|
|
return get_normal_push_event_body(payload)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_normal_push_event_body(payload: WildValue) -> str:
|
2023-05-27 19:35:48 +02:00
|
|
|
compare_url = "{}/-/compare/{}...{}".format(
|
2020-04-22 15:15:39 +02:00
|
|
|
get_project_homepage(payload),
|
2022-04-06 17:37:10 +02:00
|
|
|
payload["before"].tame(check_string),
|
|
|
|
payload["after"].tame(check_string),
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
2016-10-04 21:14:56 +02:00
|
|
|
|
|
|
|
commits = [
|
|
|
|
{
|
2022-04-09 11:29:33 +02:00
|
|
|
"name": commit["author"]["name"].tame(check_string),
|
|
|
|
"sha": commit["id"].tame(check_string),
|
|
|
|
"message": commit["message"].tame(check_string),
|
|
|
|
"url": commit["url"].tame(check_string),
|
2016-10-04 21:14:56 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
for commit in payload["commits"]
|
2016-10-04 21:14:56 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
return get_push_commits_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_user_name(payload),
|
|
|
|
compare_url,
|
2016-10-04 21:14:56 +02:00
|
|
|
get_branch_name(payload),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
commits,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_remove_branch_event_body(payload: WildValue) -> str:
|
2016-10-06 17:13:40 +02:00
|
|
|
return get_remove_branch_event_message(
|
|
|
|
get_user_name(payload),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
get_branch_name(payload),
|
2016-10-06 17:13:40 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_tag_push_event_body(payload: WildValue, include_title: bool) -> str:
|
2016-11-08 22:13:23 +01:00
|
|
|
return get_push_tag_event_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_user_name(payload),
|
2016-11-08 22:13:23 +01:00
|
|
|
get_tag_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
action="pushed" if payload.get("checkout_sha") else "removed",
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_issue_created_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
description = payload["object_attributes"].get("description")
|
2018-01-24 03:08:53 +01:00
|
|
|
# Filter out multiline hidden comments
|
2022-04-06 17:37:10 +02:00
|
|
|
if description:
|
|
|
|
stringified_description = description.tame(check_string)
|
2023-07-19 23:09:25 +02:00
|
|
|
stringified_description = re.sub(
|
2024-04-26 20:30:22 +02:00
|
|
|
r"<!--.*?-->", "", stringified_description, count=0, flags=re.DOTALL
|
2023-07-19 23:09:25 +02:00
|
|
|
)
|
2022-04-06 17:37:10 +02:00
|
|
|
stringified_description = stringified_description.rstrip()
|
|
|
|
else:
|
|
|
|
stringified_description = None
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-19 23:49:23 +02:00
|
|
|
return get_issue_event_message(
|
2023-03-16 20:36:47 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action="created",
|
|
|
|
url=get_object_url(payload),
|
|
|
|
number=payload["object_attributes"]["iid"].tame(check_int),
|
|
|
|
message=stringified_description,
|
2020-08-21 18:02:28 +02:00
|
|
|
assignees=replace_assignees_username_with_name(get_assignees(payload)),
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["object_attributes"]["title"].tame(check_string) if include_title else None,
|
2016-10-19 23:49:23 +02:00
|
|
|
)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_issue_event_body(action: str, payload: WildValue, include_title: bool) -> str:
|
2016-10-19 23:49:23 +02:00
|
|
|
return get_issue_event_message(
|
2023-03-16 20:36:47 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=get_object_url(payload),
|
|
|
|
number=payload["object_attributes"]["iid"].tame(check_int),
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["object_attributes"]["title"].tame(check_string) if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_merge_request_updated_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if payload["object_attributes"].get("oldrev"):
|
2018-07-20 00:49:05 +02:00
|
|
|
return get_merge_request_event_body(
|
2021-02-12 08:19:30 +01:00
|
|
|
"added commit(s) to",
|
2024-04-29 23:52:26 +02:00
|
|
|
payload,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_title=include_title,
|
2018-07-20 00:49:05 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return get_merge_request_open_or_updated_body(
|
2021-02-12 08:19:30 +01:00
|
|
|
"updated",
|
2024-04-29 23:52:26 +02:00
|
|
|
payload,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_title=include_title,
|
2018-07-20 00:49:05 +02:00
|
|
|
)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_merge_request_event_body(action: str, payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["object_attributes"]
|
2023-03-20 01:20:38 +01:00
|
|
|
target_branch = None
|
|
|
|
base_branch = None
|
|
|
|
if action == "merged":
|
|
|
|
target_branch = pull_request["source_branch"].tame(check_string)
|
|
|
|
base_branch = pull_request["target_branch"].tame(check_string)
|
|
|
|
|
2016-10-11 18:54:48 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-03-16 00:52:45 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=pull_request["url"].tame(check_string),
|
|
|
|
number=pull_request["iid"].tame(check_int),
|
2023-03-20 01:20:38 +01:00
|
|
|
target_branch=target_branch,
|
|
|
|
base_branch=base_branch,
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["object_attributes"]["title"].tame(check_string) if include_title else None,
|
2016-10-11 18:54:48 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_merge_request_open_or_updated_body(
|
2024-04-29 23:52:26 +02:00
|
|
|
action: str, payload: WildValue, include_title: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
pull_request = payload["object_attributes"]
|
2016-10-11 18:54:48 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-03-16 00:52:45 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=pull_request["url"].tame(check_string),
|
|
|
|
number=pull_request["iid"].tame(check_int),
|
2024-01-29 00:32:21 +01:00
|
|
|
target_branch=(
|
|
|
|
pull_request["source_branch"].tame(check_string) if action == "created" else None
|
|
|
|
),
|
|
|
|
base_branch=(
|
|
|
|
pull_request["target_branch"].tame(check_string) if action == "created" else None
|
|
|
|
),
|
2023-11-02 14:37:18 +01:00
|
|
|
message=pull_request["description"].tame(check_none_or(check_string)),
|
2020-08-21 18:02:28 +02:00
|
|
|
assignees=replace_assignees_username_with_name(get_assignees(payload)),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["object_attributes"]["title"].tame(check_string) if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_assignees(payload: WildValue) -> Union[list[WildValue], WildValue]:
|
2021-02-12 08:20:45 +01:00
|
|
|
assignee_details = payload.get("assignees")
|
2022-04-06 17:37:10 +02:00
|
|
|
if not assignee_details:
|
2021-02-12 08:20:45 +01:00
|
|
|
single_assignee_details = payload.get("assignee")
|
2022-04-06 17:37:10 +02:00
|
|
|
if not single_assignee_details:
|
|
|
|
transformed_assignee_details = []
|
2020-08-21 18:02:28 +02:00
|
|
|
else:
|
2022-04-06 17:37:10 +02:00
|
|
|
transformed_assignee_details = [single_assignee_details]
|
|
|
|
return transformed_assignee_details
|
2020-08-21 18:02:28 +02:00
|
|
|
return assignee_details
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def replace_assignees_username_with_name(
|
2024-07-12 02:30:17 +02:00
|
|
|
assignees: Union[list[WildValue], WildValue],
|
|
|
|
) -> list[dict[str, str]]:
|
2020-08-21 18:02:28 +02:00
|
|
|
"""Replace the username of each assignee with their (full) name.
|
|
|
|
|
|
|
|
This is a hack-like adaptor so that when assignees are passed to
|
|
|
|
`get_pull_request_event_message` we can use the assignee's name
|
|
|
|
and not their username (for more consistency).
|
|
|
|
"""
|
2022-04-06 17:37:10 +02:00
|
|
|
formatted_assignees = []
|
2020-08-21 18:02:28 +02:00
|
|
|
for assignee in assignees:
|
2022-04-06 17:37:10 +02:00
|
|
|
formatted_assignee = {}
|
|
|
|
formatted_assignee["username"] = assignee["name"].tame(check_string)
|
|
|
|
formatted_assignees.append(formatted_assignee)
|
|
|
|
return formatted_assignees
|
2016-10-19 23:49:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_commented_commit_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
2022-04-06 17:37:10 +02:00
|
|
|
action = "[commented]({})".format(comment["url"].tame(check_string))
|
2016-10-27 21:43:15 +02:00
|
|
|
return get_commits_comment_action_message(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
2016-10-21 17:53:33 +02:00
|
|
|
action,
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["commit"]["url"].tame(check_string),
|
|
|
|
payload["commit"]["id"].tame(check_string),
|
2022-04-06 17:37:10 +02:00
|
|
|
comment["note"].tame(check_string),
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_commented_merge_request_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
2022-04-06 17:37:10 +02:00
|
|
|
action = "[commented]({}) on".format(comment["url"].tame(check_string))
|
2023-05-27 19:35:15 +02:00
|
|
|
url = payload["merge_request"]["url"].tame(check_string)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 16:01:15 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-03-16 00:52:45 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=url,
|
|
|
|
number=payload["merge_request"]["iid"].tame(check_int),
|
2022-04-06 17:37:10 +02:00
|
|
|
message=comment["note"].tame(check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["merge_request"]["title"].tame(check_string) if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_commented_issue_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
2022-04-06 17:37:10 +02:00
|
|
|
action = "[commented]({}) on".format(comment["url"].tame(check_string))
|
2023-05-27 19:35:15 +02:00
|
|
|
url = payload["issue"]["url"].tame(check_string)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 16:01:15 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-03-16 00:52:45 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=url,
|
|
|
|
number=payload["issue"]["iid"].tame(check_int),
|
2022-04-06 17:37:10 +02:00
|
|
|
message=comment["note"].tame(check_string),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["issue"]["title"].tame(check_string) if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_commented_snippet_event_body(payload: WildValue, include_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
comment = payload["object_attributes"]
|
2022-04-06 17:37:10 +02:00
|
|
|
action = "[commented]({}) on".format(comment["url"].tame(check_string))
|
2023-05-27 19:35:15 +02:00
|
|
|
# Snippet URL is only available in GitLab 16.1+
|
|
|
|
if "url" in payload["snippet"]:
|
|
|
|
url = payload["snippet"]["url"].tame(check_string)
|
|
|
|
else:
|
|
|
|
url = "{}/-/snippets/{}".format(
|
|
|
|
payload["project"]["web_url"].tame(check_string),
|
|
|
|
payload["snippet"]["id"].tame(check_int),
|
|
|
|
)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2016-10-21 17:53:33 +02:00
|
|
|
return get_pull_request_event_message(
|
2023-03-16 00:52:45 +01:00
|
|
|
user_name=get_issue_user_name(payload),
|
|
|
|
action=action,
|
|
|
|
url=url,
|
|
|
|
number=payload["snippet"]["id"].tame(check_int),
|
2022-04-06 17:37:10 +02:00
|
|
|
message=comment["note"].tame(check_string),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="snippet",
|
2022-04-09 11:29:33 +02:00
|
|
|
title=payload["snippet"]["title"].tame(check_string) if include_title else None,
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_wiki_page_event_body(action: str, payload: WildValue, include_title: bool) -> str:
|
2021-05-10 07:02:14 +02:00
|
|
|
return '{} {} [wiki page "{}"]({}).'.format(
|
2016-08-18 20:57:50 +02:00
|
|
|
get_issue_user_name(payload),
|
|
|
|
action,
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["object_attributes"]["title"].tame(check_string),
|
|
|
|
payload["object_attributes"]["url"].tame(check_string),
|
2016-08-18 20:57:50 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_build_hook_event_body(payload: WildValue, include_title: bool) -> str:
|
2022-04-09 11:29:33 +02:00
|
|
|
build_status = payload["build_status"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if build_status == "created":
|
|
|
|
action = "was created"
|
|
|
|
elif build_status == "running":
|
|
|
|
action = "started"
|
2016-09-28 22:02:41 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = f"changed status to {build_status}"
|
2020-04-09 21:51:58 +02:00
|
|
|
return "Build {} from {} stage {}.".format(
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["build_name"].tame(check_string),
|
|
|
|
payload["build_stage"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
action,
|
2016-09-28 22:02:41 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_test_event_body(payload: WildValue, include_title: bool) -> str:
|
2020-06-10 06:40:53 +02:00
|
|
|
return f"Webhook for **{get_repo_name(payload)}** has been configured successfully! :tada:"
|
2017-11-15 04:20:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_pipeline_event_body(payload: WildValue, include_title: bool) -> str:
|
2022-04-09 11:29:33 +02:00
|
|
|
pipeline_status = payload["object_attributes"]["status"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
if pipeline_status == "pending":
|
|
|
|
action = "was created"
|
|
|
|
elif pipeline_status == "running":
|
|
|
|
action = "started"
|
2016-09-28 23:06:04 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = f"changed status to {pipeline_status}"
|
2016-09-28 23:06:04 +02:00
|
|
|
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage = get_project_homepage(payload)
|
2023-05-27 19:35:48 +02:00
|
|
|
pipeline_url = "{}/-/pipelines/{}".format(
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage,
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["object_attributes"]["id"].tame(check_int),
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
builds_status = ""
|
2021-02-12 08:20:45 +01:00
|
|
|
for build in payload["builds"]:
|
|
|
|
build_url = "{}/-/jobs/{}".format(
|
2020-05-06 16:06:19 +02:00
|
|
|
project_homepage,
|
2022-04-09 11:29:33 +02:00
|
|
|
build["id"].tame(check_int),
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_filename = build.get("artifacts_file", {}).get("filename", None)
|
2020-06-06 15:09:57 +02:00
|
|
|
if artifact_filename:
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_download_url = f"{build_url}/artifacts/download"
|
|
|
|
artifact_browse_url = f"{build_url}/artifacts/browse"
|
2022-04-06 17:37:10 +02:00
|
|
|
artifact_string = f" * built artifact: *{artifact_filename.tame(check_string)}* [[Browse]({artifact_browse_url})|[Download]({artifact_download_url})]\n"
|
2020-06-06 15:09:57 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
artifact_string = ""
|
2020-06-06 15:09:57 +02:00
|
|
|
builds_status += "* [{}]({}) - {}\n{}".format(
|
2022-04-09 11:29:33 +02:00
|
|
|
build["name"].tame(check_string),
|
2020-05-06 16:06:19 +02:00
|
|
|
build_url,
|
2022-04-09 11:29:33 +02:00
|
|
|
build["status"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
artifact_string,
|
2020-05-06 16:06:19 +02:00
|
|
|
)
|
2020-06-06 15:12:48 +02:00
|
|
|
return "[Pipeline ({})]({}) {} with build(s):\n{}.".format(
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["object_attributes"]["id"].tame(check_int),
|
2020-06-06 15:12:48 +02:00
|
|
|
pipeline_url,
|
|
|
|
action,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
builds_status[:-1],
|
2020-06-06 15:12:48 +02:00
|
|
|
)
|
2016-09-28 23:06:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_repo_name(payload: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "project" in payload:
|
2022-04-06 17:37:10 +02:00
|
|
|
return payload["project"]["name"].tame(check_string)
|
2020-05-11 23:36:05 +02:00
|
|
|
|
|
|
|
# Apparently, Job Hook payloads don't have a `project` section,
|
|
|
|
# but the repository name is accessible from the `repository`
|
|
|
|
# section.
|
2022-04-06 17:37:10 +02:00
|
|
|
return payload["repository"]["name"].tame(check_string)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_user_name(payload: WildValue) -> str:
|
|
|
|
return payload["user_name"].tame(check_string)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_issue_user_name(payload: WildValue) -> str:
|
|
|
|
return payload["user"]["name"].tame(check_string)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_project_homepage(payload: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "project" in payload:
|
2022-04-06 17:37:10 +02:00
|
|
|
return payload["project"]["web_url"].tame(check_string)
|
|
|
|
return payload["repository"]["homepage"].tame(check_string)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_branch_name(payload: WildValue) -> str:
|
|
|
|
return payload["ref"].tame(check_string).replace("refs/heads/", "")
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_tag_name(payload: WildValue) -> str:
|
|
|
|
return payload["ref"].tame(check_string).replace("refs/tags/", "")
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_object_url(payload: WildValue) -> str:
|
|
|
|
return payload["object_attributes"]["url"].tame(check_string)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
class EventFunction(Protocol):
|
2024-01-29 00:32:21 +01:00
|
|
|
def __call__(self, payload: WildValue, include_title: bool) -> str: ...
|
2022-05-11 08:52:22 +02:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
EVENT_FUNCTION_MAPPER: dict[str, EventFunction] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"Push Hook": get_push_event_body,
|
|
|
|
"Tag Push Hook": get_tag_push_event_body,
|
|
|
|
"Test Hook": get_test_event_body,
|
|
|
|
"Issue Hook open": get_issue_created_event_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"Issue Hook close": partial(get_issue_event_body, "closed"),
|
|
|
|
"Issue Hook reopen": partial(get_issue_event_body, "reopened"),
|
|
|
|
"Issue Hook update": partial(get_issue_event_body, "updated"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"Confidential Issue Hook open": get_issue_created_event_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"Confidential Issue Hook close": partial(get_issue_event_body, "closed"),
|
|
|
|
"Confidential Issue Hook reopen": partial(get_issue_event_body, "reopened"),
|
|
|
|
"Confidential Issue Hook update": partial(get_issue_event_body, "updated"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"Note Hook Commit": get_commented_commit_event_body,
|
|
|
|
"Note Hook MergeRequest": get_commented_merge_request_event_body,
|
|
|
|
"Note Hook Issue": get_commented_issue_event_body,
|
|
|
|
"Confidential Note Hook Issue": get_commented_issue_event_body,
|
|
|
|
"Note Hook Snippet": get_commented_snippet_event_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"Merge Request Hook approved": partial(get_merge_request_event_body, "approved"),
|
|
|
|
"Merge Request Hook unapproved": partial(get_merge_request_event_body, "unapproved"),
|
|
|
|
"Merge Request Hook open": partial(get_merge_request_open_or_updated_body, "created"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"Merge Request Hook update": get_merge_request_updated_event_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"Merge Request Hook merge": partial(get_merge_request_event_body, "merged"),
|
|
|
|
"Merge Request Hook close": partial(get_merge_request_event_body, "closed"),
|
|
|
|
"Merge Request Hook reopen": partial(get_merge_request_event_body, "reopened"),
|
|
|
|
"Wiki Page Hook create": partial(get_wiki_page_event_body, "created"),
|
|
|
|
"Wiki Page Hook update": partial(get_wiki_page_event_body, "updated"),
|
2021-02-12 08:20:45 +01:00
|
|
|
"Job Hook": get_build_hook_event_body,
|
|
|
|
"Build Hook": get_build_hook_event_body,
|
|
|
|
"Pipeline Hook": get_pipeline_event_body,
|
2016-08-18 20:57:50 +02:00
|
|
|
}
|
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = list(EVENT_FUNCTION_MAPPER.keys())
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
|
|
|
|
@webhook_view("GitLab", all_event_types=ALL_EVENT_TYPES)
|
2023-08-14 23:50:15 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_gitlab_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-14 23:50:15 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
payload: JsonBodyPayload[WildValue],
|
2023-08-14 23:50:15 +02:00
|
|
|
branches: Optional[str] = None,
|
|
|
|
use_merge_request_title: Json[bool] = True,
|
|
|
|
user_specified_topic: OptionalUserSpecifiedTopicStr = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-04-03 04:46:41 +02:00
|
|
|
event = get_event(request, payload, branches)
|
|
|
|
if event is not None:
|
2018-07-20 00:49:05 +02:00
|
|
|
event_body_function = get_body_based_on_event(event)
|
2022-05-11 08:52:22 +02:00
|
|
|
body = event_body_function(
|
|
|
|
payload,
|
|
|
|
include_title=user_specified_topic is not None,
|
|
|
|
)
|
2018-07-20 00:49:05 +02:00
|
|
|
|
2020-04-22 15:28:34 +02:00
|
|
|
# Add a link to the project if a custom topic is set
|
|
|
|
if user_specified_topic:
|
|
|
|
project_url = f"[{get_repo_name(payload)}]({get_project_homepage(payload)})"
|
|
|
|
body = f"[{project_url}] {body}"
|
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = get_topic_based_on_event(event, payload, use_merge_request_title)
|
|
|
|
check_send_webhook_message(request, user_profile, topic_name, body, event)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-11 08:52:22 +02:00
|
|
|
def get_body_based_on_event(event: str) -> EventFunction:
|
2016-08-18 20:57:50 +02:00
|
|
|
return EVENT_FUNCTION_MAPPER[event]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-12 13:37:08 +02:00
|
|
|
def get_topic_based_on_event(event: str, payload: WildValue, use_merge_request_title: bool) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if event == "Push Hook":
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{get_repo_name(payload)} / {get_branch_name(payload)}"
|
2023-07-22 01:15:10 +02:00
|
|
|
elif event in ("Job Hook", "Build Hook"):
|
2022-04-06 17:37:10 +02:00
|
|
|
return "{} / {}".format(
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["repository"]["name"].tame(check_string), get_branch_name(payload)
|
2022-04-06 17:37:10 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Pipeline Hook":
|
2020-04-09 21:51:58 +02:00
|
|
|
return "{} / {}".format(
|
2016-09-28 23:06:04 +02:00
|
|
|
get_repo_name(payload),
|
2022-04-09 11:29:33 +02:00
|
|
|
payload["object_attributes"]["ref"].tame(check_string).replace("refs/heads/", ""),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event.startswith("Merge Request Hook"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-11 18:54:48 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
2022-04-09 11:29:33 +02:00
|
|
|
id=payload["object_attributes"]["iid"].tame(check_int),
|
2024-01-29 00:32:21 +01:00
|
|
|
title=(
|
|
|
|
payload["object_attributes"]["title"].tame(check_string)
|
|
|
|
if use_merge_request_title
|
|
|
|
else ""
|
|
|
|
),
|
2016-10-19 23:49:23 +02:00
|
|
|
)
|
2023-02-09 00:16:37 +01:00
|
|
|
elif event.startswith(("Issue Hook", "Confidential Issue Hook")):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-19 23:49:23 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2022-04-09 11:29:33 +02:00
|
|
|
id=payload["object_attributes"]["iid"].tame(check_int),
|
|
|
|
title=payload["object_attributes"]["title"].tame(check_string),
|
2016-10-11 18:54:48 +02:00
|
|
|
)
|
2023-07-22 01:15:10 +02:00
|
|
|
elif event in ("Note Hook Issue", "Confidential Note Hook Issue"):
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 16:01:15 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="issue",
|
2022-04-09 11:29:33 +02:00
|
|
|
id=payload["issue"]["iid"].tame(check_int),
|
|
|
|
title=payload["issue"]["title"].tame(check_string),
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Note Hook MergeRequest":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 16:01:15 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-02-12 08:20:45 +01:00
|
|
|
type="MR",
|
2022-04-09 11:29:33 +02:00
|
|
|
id=payload["merge_request"]["iid"].tame(check_int),
|
2024-01-29 00:32:21 +01:00
|
|
|
title=(
|
|
|
|
payload["merge_request"]["title"].tame(check_string)
|
|
|
|
if use_merge_request_title
|
|
|
|
else ""
|
|
|
|
),
|
2016-10-21 16:01:15 +02:00
|
|
|
)
|
2016-10-21 17:53:33 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event == "Note Hook Snippet":
|
2018-11-09 20:59:15 +01:00
|
|
|
return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
2016-10-21 17:53:33 +02:00
|
|
|
repo=get_repo_name(payload),
|
2021-05-10 07:02:14 +02:00
|
|
|
type="snippet",
|
2022-04-09 11:29:33 +02:00
|
|
|
id=payload["snippet"]["id"].tame(check_int),
|
|
|
|
title=payload["snippet"]["title"].tame(check_string),
|
2016-10-21 17:53:33 +02:00
|
|
|
)
|
2016-09-26 08:12:24 +02:00
|
|
|
return get_repo_name(payload)
|
2016-09-22 20:15:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-06 17:37:10 +02:00
|
|
|
def get_event(request: HttpRequest, payload: WildValue, branches: Optional[str]) -> Optional[str]:
|
2022-05-12 06:54:12 +02:00
|
|
|
event = validate_extract_webhook_http_header(request, "X-GitLab-Event", "GitLab")
|
2020-04-25 11:09:30 +02:00
|
|
|
if event == "System Hook":
|
2020-10-23 02:43:28 +02:00
|
|
|
# Convert the event name to a GitLab event title
|
2022-06-02 20:48:14 +02:00
|
|
|
if "event_name" in payload:
|
|
|
|
event_name = payload["event_name"].tame(check_string)
|
|
|
|
else:
|
|
|
|
event_name = payload["object_kind"].tame(check_string)
|
2020-04-25 11:09:30 +02:00
|
|
|
event = event_name.split("__")[0].replace("_", " ").title()
|
|
|
|
event = f"{event} Hook"
|
2021-02-12 08:20:45 +01:00
|
|
|
if event in ["Confidential Issue Hook", "Issue Hook", "Merge Request Hook", "Wiki Page Hook"]:
|
2022-04-06 17:37:10 +02:00
|
|
|
action = payload["object_attributes"].get("action", "open").tame(check_string)
|
2020-04-25 11:09:30 +02:00
|
|
|
event = f"{event} {action}"
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event in ["Confidential Note Hook", "Note Hook"]:
|
2022-04-09 11:29:33 +02:00
|
|
|
action = payload["object_attributes"]["noteable_type"].tame(check_string)
|
2020-04-25 11:09:30 +02:00
|
|
|
event = f"{event} {action}"
|
2023-01-18 02:59:37 +01:00
|
|
|
elif event == "Push Hook" and branches is not None:
|
|
|
|
branch = get_branch_name(payload)
|
|
|
|
if branches.find(branch) == -1:
|
|
|
|
return None
|
2016-08-18 20:57:50 +02:00
|
|
|
|
2023-09-12 23:19:57 +02:00
|
|
|
if event in EVENT_FUNCTION_MAPPER:
|
2016-08-18 20:57:50 +02:00
|
|
|
return event
|
2018-05-22 16:46:45 +02:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(event)
|