2019-10-20 02:12:00 +02:00
|
|
|
# Webhooks for external integrations.
|
|
|
|
|
|
|
|
from django.db.models import Q
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
|
2019-10-20 02:12:00 +02:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-08-12 09:34:31 +02:00
|
|
|
from zerver.lib.validator import WildValue, check_int, check_string
|
2020-08-19 22:14:40 +02:00
|
|
|
from zerver.lib.webhooks.common import check_send_webhook_message
|
2019-10-20 02:12:00 +02:00
|
|
|
from zerver.models import Realm, UserProfile
|
|
|
|
|
|
|
|
IGNORED_EVENTS = [
|
2022-06-18 11:30:25 +02:00
|
|
|
"DOWNLOAD_CHART",
|
|
|
|
"DELETE_CHART",
|
|
|
|
"UPLOAD_CHART",
|
|
|
|
"PULL_ARTIFACT",
|
|
|
|
"DELETE_ARTIFACT",
|
|
|
|
"SCANNING_FAILED",
|
2019-10-20 02:12:00 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def guess_zulip_user_from_harbor(harbor_username: str, realm: Realm) -> UserProfile | None:
|
2019-10-20 02:12:00 +02:00
|
|
|
try:
|
|
|
|
# Try to find a matching user in Zulip
|
|
|
|
# We search a user's full name, short name,
|
|
|
|
# and beginning of email address
|
|
|
|
user = UserProfile.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
Q(full_name__iexact=harbor_username) | Q(email__istartswith=harbor_username),
|
2019-10-20 02:12:00 +02:00
|
|
|
is_active=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=realm,
|
|
|
|
).order_by("id")[0]
|
2019-10-20 02:12:00 +02:00
|
|
|
return user # nocoverage
|
|
|
|
except IndexError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2023-05-11 19:14:01 +02:00
|
|
|
def image_id(payload: WildValue) -> str:
|
|
|
|
image_name = payload["event_data"]["repository"]["repo_full_name"].tame(check_string)
|
|
|
|
resource = payload["event_data"]["resources"][0]
|
|
|
|
if "tag" in resource:
|
|
|
|
return image_name + ":" + resource["tag"].tame(check_string)
|
|
|
|
else:
|
|
|
|
return image_name + "@" + resource["digest"].tame(check_string)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def handle_push_image_event(
|
2022-10-08 19:37:05 +02:00
|
|
|
payload: WildValue, user_profile: UserProfile, operator_username: str
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2023-05-11 19:14:01 +02:00
|
|
|
return f"{operator_username} pushed image `{image_id(payload)}`"
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
|
|
|
|
SCANNING_COMPLETED_TEMPLATE = """
|
2023-05-11 19:14:01 +02:00
|
|
|
Image scan completed for `{image_id}`. Vulnerabilities by severity:
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
{scan_results}
|
|
|
|
""".strip()
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def handle_scanning_completed_event(
|
2022-10-08 19:37:05 +02:00
|
|
|
payload: WildValue, user_profile: UserProfile, operator_username: str
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> str:
|
2020-04-09 21:51:58 +02:00
|
|
|
scan_results = ""
|
2022-06-18 11:30:25 +02:00
|
|
|
scan_overview = payload["event_data"]["resources"][0]["scan_overview"]
|
|
|
|
if "application/vnd.security.vulnerability.report; version=1.1" not in scan_overview:
|
2023-10-06 00:20:38 +02:00
|
|
|
raise UnsupportedWebhookEventTypeError(str(list(scan_overview.keys())))
|
2022-06-18 11:30:25 +02:00
|
|
|
scan_summaries = scan_overview["application/vnd.security.vulnerability.report; version=1.1"][
|
|
|
|
"summary"
|
|
|
|
]["summary"]
|
|
|
|
if len(scan_summaries) > 0:
|
|
|
|
for severity, count in scan_summaries.items():
|
2023-01-18 03:23:15 +01:00
|
|
|
scan_results += f"* {severity}: **{count.tame(check_int)}**\n"
|
2022-06-18 11:30:25 +02:00
|
|
|
else:
|
|
|
|
scan_results += "None\n"
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
return SCANNING_COMPLETED_TEMPLATE.format(
|
2023-05-11 19:14:01 +02:00
|
|
|
image_id=image_id(payload),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
scan_results=scan_results,
|
2019-10-20 02:12:00 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
EVENT_FUNCTION_MAPPER = {
|
2022-06-18 11:30:25 +02:00
|
|
|
"PUSH_ARTIFACT": handle_push_image_event,
|
|
|
|
"SCANNING_COMPLETED": handle_scanning_completed_event,
|
2019-10-20 02:12:00 +02:00
|
|
|
}
|
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = list(EVENT_FUNCTION_MAPPER.keys())
|
2019-10-20 02:12:00 +02:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
|
|
|
|
@webhook_view("Harbor", all_event_types=ALL_EVENT_TYPES)
|
2023-08-12 09:34:31 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_harbor_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-12 09:34:31 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
payload: JsonBodyPayload[WildValue],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2022-10-08 19:37:05 +02:00
|
|
|
operator_username = "**{}**".format(payload["operator"].tame(check_string))
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
if operator_username != "auto":
|
2021-02-12 08:19:30 +01:00
|
|
|
operator_profile = guess_zulip_user_from_harbor(operator_username, user_profile.realm)
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
if operator_profile:
|
2020-06-09 00:25:09 +02:00
|
|
|
operator_username = f"@**{operator_profile.full_name}**" # nocoverage
|
2019-10-20 02:12:00 +02:00
|
|
|
|
2022-10-08 19:37:05 +02:00
|
|
|
event = payload["type"].tame(check_string)
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = payload["event_data"]["repository"]["repo_full_name"].tame(check_string)
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
if event in IGNORED_EVENTS:
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2019-10-20 02:12:00 +02:00
|
|
|
|
|
|
|
content_func = EVENT_FUNCTION_MAPPER.get(event)
|
|
|
|
|
|
|
|
if content_func is None:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(event)
|
2019-10-20 02:12:00 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
content: str = content_func(payload, user_profile, operator_username)
|
2019-10-20 02:12:00 +02:00
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
check_send_webhook_message(
|
2024-01-17 15:53:30 +01:00
|
|
|
request, user_profile, topic_name, content, event, unquote_url_parameters=True
|
2021-07-16 11:40:46 +02:00
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|