2017-10-27 02:36:54 +02:00
|
|
|
"""Taiga integration for Zulip.
|
2016-04-28 15:46:00 +02:00
|
|
|
|
|
|
|
Tips for notification output:
|
|
|
|
|
2017-10-27 02:36:54 +02:00
|
|
|
*Text formatting*: if there has been a change of a property, the new
|
|
|
|
value should always be in bold; otherwise the subject of US/task
|
|
|
|
should be in bold.
|
2016-04-28 15:46:00 +02:00
|
|
|
"""
|
2024-01-29 00:32:21 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
from typing import Dict, List, Optional, Tuple, Union
|
2016-05-25 15:02:02 +02:00
|
|
|
|
2016-06-06 01:56:35 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2023-08-02 23:53:10 +02:00
|
|
|
from typing_extensions import TypeAlias
|
2016-05-25 15:02:02 +02:00
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-08-12 09:34:31 +02:00
|
|
|
from zerver.lib.validator import WildValue, check_bool, check_none_or, check_string
|
2018-03-16 22:53:50 +01:00
|
|
|
from zerver.lib.webhooks.common import check_send_webhook_message
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2023-08-02 23:53:10 +02:00
|
|
|
EventType: TypeAlias = Dict[str, Union[str, Dict[str, Optional[Union[str, bool]]]]]
|
|
|
|
ReturnType: TypeAlias = Tuple[WildValue, WildValue]
|
2022-07-09 10:42:49 +02:00
|
|
|
|
2020-01-14 22:06:24 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@webhook_view("Taiga")
|
2023-08-12 09:34:31 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_taiga_webhook(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-12 09:34:31 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
message: JsonBodyPayload[WildValue],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2016-04-28 15:46:00 +02:00
|
|
|
parsed_events = parse_message(message)
|
2023-07-31 22:52:35 +02:00
|
|
|
content = "".join(sorted(generate_content(event) + "\n" for event in parsed_events))
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = "General"
|
2022-07-09 10:42:49 +02:00
|
|
|
if message["data"].get("milestone") and "name" in message["data"]["milestone"]:
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = message["data"]["milestone"]["name"].tame(check_string)
|
|
|
|
check_send_webhook_message(request, user_profile, topic_name, content)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-04-28 15:46:00 +02:00
|
|
|
templates = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"epic": {
|
|
|
|
"create": "[{user}]({user_link}) created epic {subject}.",
|
|
|
|
"set_assigned_to": "[{user}]({user_link}) assigned epic {subject} to {new}.",
|
|
|
|
"unset_assigned_to": "[{user}]({user_link}) unassigned epic {subject}.",
|
|
|
|
"changed_assigned_to": "[{user}]({user_link}) reassigned epic {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"blocked": "[{user}]({user_link}) blocked epic {subject}.",
|
|
|
|
"unblocked": "[{user}]({user_link}) unblocked epic {subject}.",
|
|
|
|
"changed_status": "[{user}]({user_link}) changed status of epic {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"renamed": "[{user}]({user_link}) renamed epic from **{old}** to **{new}**.",
|
|
|
|
"description_diff": "[{user}]({user_link}) updated description of epic {subject}.",
|
|
|
|
"commented": "[{user}]({user_link}) commented on epic {subject}.",
|
|
|
|
"delete": "[{user}]({user_link}) deleted epic {subject}.",
|
2017-10-22 05:30:45 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"relateduserstory": {
|
|
|
|
"create": (
|
2023-01-03 02:16:53 +01:00
|
|
|
"[{user}]({user_link}) added a related user story"
|
|
|
|
" {userstory_subject} to the epic {epic_subject}."
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"delete": (
|
2023-01-03 02:16:53 +01:00
|
|
|
"[{user}]({user_link}) removed a related user story"
|
|
|
|
" {userstory_subject} from the epic {epic_subject}."
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-10-25 03:31:11 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"userstory": {
|
|
|
|
"create": "[{user}]({user_link}) created user story {subject}.",
|
|
|
|
"set_assigned_to": "[{user}]({user_link}) assigned user story {subject} to {new}.",
|
|
|
|
"unset_assigned_to": "[{user}]({user_link}) unassigned user story {subject}.",
|
|
|
|
"changed_assigned_to": "[{user}]({user_link}) reassigned user story {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"points": "[{user}]({user_link}) changed estimation of user story {subject}.",
|
|
|
|
"blocked": "[{user}]({user_link}) blocked user story {subject}.",
|
|
|
|
"unblocked": "[{user}]({user_link}) unblocked user story {subject}.",
|
|
|
|
"set_milestone": "[{user}]({user_link}) added user story {subject} to sprint {new}.",
|
|
|
|
"unset_milestone": "[{user}]({user_link}) removed user story {subject} from sprint {old}.",
|
|
|
|
"changed_milestone": "[{user}]({user_link}) changed sprint of user story {subject} from {old}"
|
|
|
|
" to {new}.",
|
|
|
|
"changed_status": "[{user}]({user_link}) changed status of user story {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"closed": "[{user}]({user_link}) closed user story {subject}.",
|
|
|
|
"reopened": "[{user}]({user_link}) reopened user story {subject}.",
|
|
|
|
"renamed": "[{user}]({user_link}) renamed user story from {old} to **{new}**.",
|
|
|
|
"description_diff": "[{user}]({user_link}) updated description of user story {subject}.",
|
|
|
|
"commented": "[{user}]({user_link}) commented on user story {subject}.",
|
|
|
|
"delete": "[{user}]({user_link}) deleted user story {subject}.",
|
|
|
|
"due_date": "[{user}]({user_link}) changed due date of user story {subject}"
|
|
|
|
" from {old} to {new}.",
|
2023-01-03 01:51:16 +01:00
|
|
|
"set_due_date": "[{user}]({user_link}) set due date of user story {subject} to {new}.",
|
2016-04-28 15:46:00 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"milestone": {
|
|
|
|
"create": "[{user}]({user_link}) created sprint {subject}.",
|
|
|
|
"renamed": "[{user}]({user_link}) renamed sprint from {old} to **{new}**.",
|
|
|
|
"estimated_start": "[{user}]({user_link}) changed estimated start of sprint {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"estimated_finish": "[{user}]({user_link}) changed estimated finish of sprint {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"set_estimated_start": "[{user}]({user_link}) changed estimated start of sprint {subject}"
|
|
|
|
" to {new}.",
|
|
|
|
"set_estimated_finish": "[{user}]({user_link}) set estimated finish of sprint {subject}"
|
|
|
|
" to {new}.",
|
|
|
|
"delete": "[{user}]({user_link}) deleted sprint {subject}.",
|
2016-04-28 15:46:00 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"task": {
|
|
|
|
"create": "[{user}]({user_link}) created task {subject}.",
|
|
|
|
"set_assigned_to": "[{user}]({user_link}) assigned task {subject} to {new}.",
|
|
|
|
"unset_assigned_to": "[{user}]({user_link}) unassigned task {subject}.",
|
|
|
|
"changed_assigned_to": "[{user}]({user_link}) reassigned task {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"blocked": "[{user}]({user_link}) blocked task {subject}.",
|
|
|
|
"unblocked": "[{user}]({user_link}) unblocked task {subject}.",
|
|
|
|
"changed_status": "[{user}]({user_link}) changed status of task {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"renamed": "[{user}]({user_link}) renamed task {old} to **{new}**.",
|
|
|
|
"description_diff": "[{user}]({user_link}) updated description of task {subject}.",
|
|
|
|
"set_milestone": "[{user}]({user_link}) added task {subject} to sprint {new}.",
|
|
|
|
"commented": "[{user}]({user_link}) commented on task {subject}.",
|
|
|
|
"delete": "[{user}]({user_link}) deleted task {subject}.",
|
|
|
|
"changed_us": "[{user}]({user_link}) moved task {subject} from user story {old} to {new}.",
|
2023-01-03 01:51:16 +01:00
|
|
|
"due_date": "[{user}]({user_link}) changed due date of task {subject} from {old} to {new}.",
|
|
|
|
"set_due_date": "[{user}]({user_link}) set due date of task {subject} to {new}.",
|
2016-04-28 15:46:00 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"issue": {
|
|
|
|
"create": "[{user}]({user_link}) created issue {subject}.",
|
|
|
|
"set_assigned_to": "[{user}]({user_link}) assigned issue {subject} to {new}.",
|
|
|
|
"unset_assigned_to": "[{user}]({user_link}) unassigned issue {subject}.",
|
|
|
|
"changed_assigned_to": "[{user}]({user_link}) reassigned issue {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"set_milestone": "[{user}]({user_link}) added issue {subject} to sprint {new}.",
|
|
|
|
"unset_milestone": "[{user}]({user_link}) detached issue {subject} from sprint {old}.",
|
|
|
|
"changed_priority": "[{user}]({user_link}) changed priority of issue "
|
|
|
|
"{subject} from {old} to {new}.",
|
|
|
|
"changed_severity": "[{user}]({user_link}) changed severity of issue "
|
|
|
|
"{subject} from {old} to {new}.",
|
|
|
|
"changed_status": "[{user}]({user_link}) changed status of issue {subject}"
|
|
|
|
" from {old} to {new}.",
|
|
|
|
"changed_type": "[{user}]({user_link}) changed type of issue {subject} from {old} to {new}.",
|
|
|
|
"renamed": "[{user}]({user_link}) renamed issue {old} to **{new}**.",
|
|
|
|
"description_diff": "[{user}]({user_link}) updated description of issue {subject}.",
|
|
|
|
"commented": "[{user}]({user_link}) commented on issue {subject}.",
|
|
|
|
"delete": "[{user}]({user_link}) deleted issue {subject}.",
|
|
|
|
"due_date": "[{user}]({user_link}) changed due date of issue {subject}"
|
|
|
|
" from {old} to {new}.",
|
2023-01-03 01:51:16 +01:00
|
|
|
"set_due_date": "[{user}]({user_link}) set due date of issue {subject} to {new}.",
|
2021-02-12 08:20:45 +01:00
|
|
|
"blocked": "[{user}]({user_link}) blocked issue {subject}.",
|
|
|
|
"unblocked": "[{user}]({user_link}) unblocked issue {subject}.",
|
2016-04-28 15:46:00 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"webhook_test": {
|
|
|
|
"test": "[{user}]({user_link}) triggered a test of the Taiga integration.",
|
2019-10-27 17:40:01 +01:00
|
|
|
},
|
2016-04-28 15:46:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_old_and_new_values(change_type: str, message: WildValue) -> ReturnType:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Parses the payload and finds previous and current value of change_type."""
|
2018-10-16 19:33:28 +02:00
|
|
|
old = message["change"]["diff"][change_type].get("from")
|
|
|
|
new = message["change"]["diff"][change_type].get("to")
|
2016-04-28 15:46:00 +02:00
|
|
|
return old, new
|
|
|
|
|
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def parse_comment(
|
|
|
|
message: WildValue,
|
|
|
|
) -> EventType:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Parses the comment to issue, task or US."""
|
2016-04-28 15:46:00 +02:00
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"event": "commented",
|
2022-07-09 10:42:49 +02:00
|
|
|
"type": message["type"].tame(check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
"values": {
|
|
|
|
"user": get_owner_name(message),
|
|
|
|
"user_link": get_owner_link(message),
|
|
|
|
"subject": get_subject(message),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2016-04-28 15:46:00 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def parse_create_or_delete(
|
|
|
|
message: WildValue,
|
|
|
|
) -> EventType:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Parses create or delete event."""
|
2022-07-09 10:42:49 +02:00
|
|
|
if message["type"].tame(check_string) == "relateduserstory":
|
2017-10-25 03:31:11 +02:00
|
|
|
return {
|
2022-07-09 10:42:49 +02:00
|
|
|
"type": message["type"].tame(check_string),
|
|
|
|
"event": message["action"].tame(check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
"values": {
|
|
|
|
"user": get_owner_name(message),
|
|
|
|
"user_link": get_owner_link(message),
|
|
|
|
"epic_subject": get_epic_subject(message),
|
|
|
|
"userstory_subject": get_userstory_subject(message),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2017-10-25 03:31:11 +02:00
|
|
|
}
|
|
|
|
|
2016-04-28 15:46:00 +02:00
|
|
|
return {
|
2022-07-09 10:42:49 +02:00
|
|
|
"type": message["type"].tame(check_string),
|
|
|
|
"event": message["action"].tame(check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
"values": {
|
|
|
|
"user": get_owner_name(message),
|
|
|
|
"user_link": get_owner_link(message),
|
|
|
|
"subject": get_subject(message),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2016-04-28 15:46:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def parse_change_event(change_type: str, message: WildValue) -> Optional[EventType]:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Parses change event."""
|
2022-07-09 10:42:49 +02:00
|
|
|
evt: EventType = {}
|
|
|
|
values: Dict[str, Optional[Union[str, bool]]] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user": get_owner_name(message),
|
|
|
|
"user_link": get_owner_link(message),
|
|
|
|
"subject": get_subject(message),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2017-01-10 19:22:26 +01:00
|
|
|
if change_type in ["description_diff", "points"]:
|
2016-04-28 15:46:00 +02:00
|
|
|
event_type = change_type
|
|
|
|
|
|
|
|
elif change_type in ["milestone", "assigned_to"]:
|
|
|
|
old, new = get_old_and_new_values(change_type, message)
|
2022-07-09 10:42:49 +02:00
|
|
|
tamed_old = old.tame(check_none_or(check_string))
|
|
|
|
tamed_new = new.tame(check_none_or(check_string))
|
|
|
|
if not tamed_old:
|
2016-04-28 15:46:00 +02:00
|
|
|
event_type = "set_" + change_type
|
2022-07-09 10:42:49 +02:00
|
|
|
values["new"] = tamed_new
|
|
|
|
elif not tamed_new:
|
2017-01-10 19:22:26 +01:00
|
|
|
event_type = "unset_" + change_type
|
2022-07-09 10:42:49 +02:00
|
|
|
values["old"] = tamed_old
|
2016-04-28 15:46:00 +02:00
|
|
|
else:
|
|
|
|
event_type = "changed_" + change_type
|
2022-07-09 10:42:49 +02:00
|
|
|
values.update(old=tamed_old, new=tamed_new)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
|
|
|
elif change_type == "is_blocked":
|
2022-07-09 10:42:49 +02:00
|
|
|
if message["change"]["diff"]["is_blocked"]["to"].tame(check_bool):
|
2016-04-28 15:46:00 +02:00
|
|
|
event_type = "blocked"
|
|
|
|
else:
|
|
|
|
event_type = "unblocked"
|
|
|
|
|
|
|
|
elif change_type == "is_closed":
|
2022-07-09 10:42:49 +02:00
|
|
|
if message["change"]["diff"]["is_closed"]["to"].tame(check_bool):
|
2016-04-28 15:46:00 +02:00
|
|
|
event_type = "closed"
|
|
|
|
else:
|
|
|
|
event_type = "reopened"
|
|
|
|
|
|
|
|
elif change_type == "user_story":
|
|
|
|
old, new = get_old_and_new_values(change_type, message)
|
|
|
|
event_type = "changed_us"
|
2022-07-09 10:42:49 +02:00
|
|
|
tamed_old = old.tame(check_none_or(check_string))
|
|
|
|
tamed_new = new.tame(check_none_or(check_string))
|
|
|
|
values.update(old=tamed_old, new=tamed_new)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
elif change_type in ["subject", "name"]:
|
|
|
|
event_type = "renamed"
|
2016-04-28 15:46:00 +02:00
|
|
|
old, new = get_old_and_new_values(change_type, message)
|
2022-07-09 10:42:49 +02:00
|
|
|
tamed_old = old.tame(check_none_or(check_string))
|
|
|
|
tamed_new = new.tame(check_none_or(check_string))
|
|
|
|
values.update(old=tamed_old, new=tamed_new)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2020-01-30 18:12:21 +01:00
|
|
|
elif change_type in ["estimated_finish", "estimated_start", "due_date"]:
|
2016-04-28 15:46:00 +02:00
|
|
|
old, new = get_old_and_new_values(change_type, message)
|
2022-07-09 10:42:49 +02:00
|
|
|
tamed_old = old.tame(check_none_or(check_string))
|
|
|
|
tamed_new = new.tame(check_none_or(check_string))
|
|
|
|
if not tamed_old:
|
2020-01-30 18:12:21 +01:00
|
|
|
event_type = "set_" + change_type
|
2022-07-09 10:42:49 +02:00
|
|
|
values["new"] = tamed_new
|
2023-01-18 03:30:35 +01:00
|
|
|
elif tamed_old != tamed_new:
|
2016-04-28 15:46:00 +02:00
|
|
|
event_type = change_type
|
2022-07-09 10:42:49 +02:00
|
|
|
values.update(old=tamed_old, new=tamed_new)
|
2016-04-28 15:46:00 +02:00
|
|
|
else:
|
|
|
|
# date hasn't changed
|
|
|
|
return None
|
|
|
|
|
|
|
|
elif change_type in ["priority", "severity", "type", "status"]:
|
2021-02-12 08:20:45 +01:00
|
|
|
event_type = "changed_" + change_type
|
2016-04-28 15:46:00 +02:00
|
|
|
old, new = get_old_and_new_values(change_type, message)
|
2022-07-09 10:42:49 +02:00
|
|
|
tamed_old = old.tame(check_none_or(check_string))
|
|
|
|
tamed_new = new.tame(check_none_or(check_string))
|
|
|
|
values.update(old=tamed_old, new=tamed_new)
|
2016-04-28 15:46:00 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
# we are not supporting this type of event
|
|
|
|
return None
|
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
evt.update(type=message["type"].tame(check_string), event=event_type, values=values)
|
2016-04-28 15:46:00 +02:00
|
|
|
return evt
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def parse_webhook_test(
|
|
|
|
message: WildValue,
|
|
|
|
) -> EventType:
|
2019-10-27 17:40:01 +01:00
|
|
|
return {
|
|
|
|
"type": "webhook_test",
|
|
|
|
"event": "test",
|
|
|
|
"values": {
|
|
|
|
"user": get_owner_name(message),
|
2020-01-30 18:12:21 +01:00
|
|
|
"user_link": get_owner_link(message),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"end_type": "test",
|
|
|
|
},
|
2019-10-27 17:40:01 +01:00
|
|
|
}
|
|
|
|
|
2016-04-28 15:46:00 +02:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def parse_message(
|
|
|
|
message: WildValue,
|
|
|
|
) -> List[EventType]:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Parses the payload by delegating to specialized functions."""
|
2022-07-09 10:42:49 +02:00
|
|
|
events: List[EventType] = []
|
|
|
|
if message["action"].tame(check_string) in ["create", "delete"]:
|
2016-04-28 15:46:00 +02:00
|
|
|
events.append(parse_create_or_delete(message))
|
2022-07-09 10:42:49 +02:00
|
|
|
elif message["action"].tame(check_string) == "change":
|
2016-04-28 15:46:00 +02:00
|
|
|
if message["change"]["diff"]:
|
2023-01-03 02:18:00 +01:00
|
|
|
for value in message["change"]["diff"].keys(): # noqa: SIM118
|
2016-04-28 15:46:00 +02:00
|
|
|
parsed_event = parse_change_event(value, message)
|
2016-11-30 21:45:02 +01:00
|
|
|
if parsed_event:
|
|
|
|
events.append(parsed_event)
|
2022-07-09 10:42:49 +02:00
|
|
|
if message["change"]["comment"].tame(check_string):
|
2016-04-28 15:46:00 +02:00
|
|
|
events.append(parse_comment(message))
|
2022-07-09 10:42:49 +02:00
|
|
|
elif message["action"].tame(check_string) == "test":
|
2019-10-27 17:40:01 +01:00
|
|
|
events.append(parse_webhook_test(message))
|
2016-04-28 15:46:00 +02:00
|
|
|
|
|
|
|
return events
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def generate_content(data: EventType) -> str:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Gets the template string and formats it with parsed data."""
|
2022-07-09 10:42:49 +02:00
|
|
|
assert isinstance(data["type"], str) and isinstance(data["event"], str)
|
2021-02-12 08:20:45 +01:00
|
|
|
template = templates[data["type"]][data["event"]]
|
2022-07-09 10:42:49 +02:00
|
|
|
|
|
|
|
assert isinstance(data["values"], dict)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = template.format(**data["values"])
|
2019-05-09 19:45:26 +02:00
|
|
|
return content
|
2017-01-10 19:22:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_owner_name(message: WildValue) -> str:
|
|
|
|
return message["by"]["full_name"].tame(check_string)
|
2017-01-10 19:22:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_owner_link(message: WildValue) -> str:
|
|
|
|
return message["by"]["permalink"].tame(check_string)
|
2020-01-30 18:12:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_subject(message: WildValue) -> str:
|
2017-01-10 19:22:26 +01:00
|
|
|
data = message["data"]
|
2022-07-09 10:42:49 +02:00
|
|
|
|
|
|
|
subject = data.get("subject").tame(check_none_or(check_string))
|
|
|
|
subject_to_use = subject if subject else data["name"].tame(check_string)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "permalink" in data:
|
2022-07-09 10:42:49 +02:00
|
|
|
return "[" + subject_to_use + "]" + "(" + data["permalink"].tame(check_string) + ")"
|
|
|
|
return "**" + subject_to_use + "**"
|
2020-01-30 18:12:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_epic_subject(message: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "permalink" in message["data"]["epic"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
2021-02-12 08:20:45 +01:00
|
|
|
"["
|
2022-07-09 10:42:49 +02:00
|
|
|
+ message["data"]["epic"]["subject"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
+ "]"
|
|
|
|
+ "("
|
2022-07-09 10:42:49 +02:00
|
|
|
+ message["data"]["epic"]["permalink"].tame(check_string)
|
2021-02-12 08:20:45 +01:00
|
|
|
+ ")"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-07-09 10:42:49 +02:00
|
|
|
return "**" + message["data"]["epic"]["subject"].tame(check_string) + "**"
|
2020-01-30 18:12:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-09 10:42:49 +02:00
|
|
|
def get_userstory_subject(message: WildValue) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "permalink" in message["data"]["user_story"]:
|
|
|
|
us_data = message["data"]["user_story"]
|
2022-07-09 10:42:49 +02:00
|
|
|
return (
|
|
|
|
"["
|
|
|
|
+ us_data["subject"].tame(check_string)
|
|
|
|
+ "]"
|
|
|
|
+ "("
|
|
|
|
+ us_data["permalink"].tame(check_string)
|
|
|
|
+ ")"
|
|
|
|
)
|
|
|
|
return "**" + message["data"]["user_story"]["subject"].tame(check_string) + "**"
|