2021-12-17 07:03:22 +01:00
|
|
|
from typing import Callable, Dict, Iterable, Iterator, List, Optional
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
|
2024-04-29 23:20:36 +02:00
|
|
|
from zerver.lib.partial import partial
|
2018-06-19 01:54:57 +02:00
|
|
|
from zerver.lib.response import json_success
|
2023-09-27 19:01:31 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2021-12-17 07:03:22 +01:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
WildValue,
|
|
|
|
check_bool,
|
|
|
|
check_int,
|
|
|
|
check_list,
|
|
|
|
check_none_or,
|
|
|
|
check_string,
|
|
|
|
check_string_or_int,
|
|
|
|
)
|
2020-08-19 22:14:40 +02:00
|
|
|
from zerver.lib.webhooks.common import check_send_webhook_message
|
2018-06-19 01:54:57 +02:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
|
|
|
|
EPIC_NAME_TEMPLATE = "**{name}**"
|
|
|
|
STORY_NAME_TEMPLATE = "[{name}]({app_url})"
|
2021-02-12 08:19:30 +01:00
|
|
|
COMMENT_ADDED_TEMPLATE = (
|
|
|
|
"New comment added to the {entity} {name_template}:\n``` quote\n{text}\n```"
|
|
|
|
)
|
|
|
|
NEW_DESC_ADDED_TEMPLATE = (
|
|
|
|
"New description added to the {entity} {name_template}:\n``` quote\n{new}\n```"
|
|
|
|
)
|
|
|
|
DESC_CHANGED_TEMPLATE = (
|
|
|
|
"Description for the {entity} {name_template} was changed from:\n"
|
|
|
|
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
DESC_REMOVED_TEMPLATE = "Description for the {entity} {name_template} was removed."
|
2021-02-12 08:19:30 +01:00
|
|
|
STATE_CHANGED_TEMPLATE = (
|
|
|
|
"State of the {entity} {name_template} was changed from **{old}** to **{new}**."
|
|
|
|
)
|
|
|
|
NAME_CHANGED_TEMPLATE = (
|
|
|
|
"The name of the {entity} {name_template} was changed from:\n"
|
|
|
|
"``` quote\n{old}\n```\nto\n``` quote\n{new}\n```"
|
|
|
|
)
|
2021-04-10 20:20:57 +02:00
|
|
|
ARCHIVED_TEMPLATE = "The {entity} {name_template} was {operation}."
|
|
|
|
STORY_TASK_TEMPLATE = "Task **{task_description}** was {operation} the story {name_template}."
|
2021-02-12 08:19:30 +01:00
|
|
|
STORY_TASK_COMPLETED_TEMPLATE = (
|
|
|
|
"Task **{task_description}** ({name_template}) was completed. :tada:"
|
|
|
|
)
|
|
|
|
STORY_ADDED_REMOVED_EPIC_TEMPLATE = (
|
2021-04-10 20:20:57 +02:00
|
|
|
"The story {story_name_template} was {operation} the epic {epic_name_template}."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 03:52:14 +01:00
|
|
|
STORY_EPIC_CHANGED_TEMPLATE = "The story {story_name_template} was moved from {old_epic_name_template} to {new_epic_name_template}."
|
2018-06-19 01:54:57 +02:00
|
|
|
STORY_ESTIMATE_TEMPLATE = "The estimate for the story {story_name_template} was set to {estimate}."
|
2021-02-12 08:19:30 +01:00
|
|
|
FILE_ATTACHMENT_TEMPLATE = (
|
|
|
|
"A {type} attachment `{file_name}` was added to the story {name_template}."
|
|
|
|
)
|
2021-04-11 19:44:35 +02:00
|
|
|
LABEL_TEMPLATE = "**{name}**"
|
|
|
|
STORY_LABEL_TEMPLATE = "The label {labels} was added to the story {name_template}."
|
|
|
|
STORY_LABEL_PLURAL_TEMPLATE = "The labels {labels} were added to the story {name_template}."
|
2021-02-12 08:19:30 +01:00
|
|
|
STORY_UPDATE_PROJECT_TEMPLATE = (
|
|
|
|
"The story {name_template} was moved from the **{old}** project to **{new}**."
|
|
|
|
)
|
|
|
|
STORY_UPDATE_TYPE_TEMPLATE = (
|
|
|
|
"The type of the story {name_template} was changed from **{old_type}** to **{new_type}**."
|
|
|
|
)
|
2018-12-18 00:00:25 +01:00
|
|
|
DELETE_TEMPLATE = "The {entity_type} **{name}** was deleted."
|
2019-01-04 23:10:07 +01:00
|
|
|
STORY_UPDATE_OWNER_TEMPLATE = "New owner added to the story {name_template}."
|
2021-04-11 03:23:18 +02:00
|
|
|
TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE = " ({old} -> {new})"
|
2021-02-12 08:19:30 +01:00
|
|
|
STORY_GITHUB_PR_TEMPLATE = (
|
2021-04-11 03:23:18 +02:00
|
|
|
"New GitHub PR [#{name}]({url}) opened for story {name_template}{workflow_state_template}."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-04-11 03:23:18 +02:00
|
|
|
STORY_GITHUB_COMMENT_PR_TEMPLATE = "Existing GitHub PR [#{name}]({url}) associated with story {name_template}{workflow_state_template}."
|
|
|
|
STORY_GITHUB_BRANCH_TEMPLATE = "New GitHub branch [{name}]({url}) associated with story {name_template}{workflow_state_template}."
|
2021-04-11 19:47:20 +02:00
|
|
|
STORY_UPDATE_BATCH_TEMPLATE = "The story {name_template} {templates}{workflow_state_template}."
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_TEMPLATE = "{operation} from {sub_templates}"
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE = "{entity_type} **{old}** to **{new}**"
|
|
|
|
STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE = "{operation} with {entity}"
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_action_with_primary_id(payload: WildValue) -> WildValue:
|
2018-06-19 01:54:57 +02:00
|
|
|
for action in payload["actions"]:
|
|
|
|
if payload["primary_id"] == action["id"]:
|
|
|
|
action_with_primary_id = action
|
|
|
|
|
|
|
|
return action_with_primary_id
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_event(payload: WildValue, action: WildValue) -> Optional[str]:
|
|
|
|
event = "{}_{}".format(
|
|
|
|
action["entity_type"].tame(check_string), action["action"].tame(check_string)
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-04-11 19:47:20 +02:00
|
|
|
# We only consider the change to be a batch update only if there are multiple stories (thus there is no primary_id)
|
2021-12-17 07:03:22 +01:00
|
|
|
if event == "story_update" and "primary_id" not in payload:
|
2021-04-11 19:47:20 +02:00
|
|
|
return "{}_{}".format(event, "batch")
|
|
|
|
|
2019-02-20 23:21:14 +01:00
|
|
|
if event in IGNORED_EVENTS:
|
|
|
|
return None
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
if "changes" in action:
|
|
|
|
changes = action["changes"]
|
|
|
|
if "description" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "description")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "state" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "state")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "workflow_state_id" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "state")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "name" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "name")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "archived" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "archived")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "complete" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "complete")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "epic_id" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "epic")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "estimate" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "estimate")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "file_ids" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "attachment")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "label_ids" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "label")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "project_id" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "project")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "story_type" in changes:
|
2018-06-19 01:54:57 +02:00
|
|
|
event = "{}_{}".format(event, "type")
|
2021-12-17 07:03:22 +01:00
|
|
|
elif "owner_ids" in changes:
|
2019-01-04 23:10:07 +01:00
|
|
|
event = "{}_{}".format(event, "owner")
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2019-02-20 23:21:14 +01:00
|
|
|
return event
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_topic_function_based_on_type(
|
|
|
|
payload: WildValue, action: WildValue
|
|
|
|
) -> Optional[Callable[[WildValue, WildValue], Optional[str]]]:
|
|
|
|
entity_type = action["entity_type"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
return EVENT_TOPIC_FUNCTION_MAPPER.get(entity_type)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_delete_body(payload: WildValue, action: WildValue) -> str:
|
|
|
|
return DELETE_TEMPLATE.format(
|
|
|
|
entity_type=action["entity_type"].tame(check_string),
|
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
)
|
2018-12-18 00:00:25 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_create_body(payload: WildValue, action: WildValue) -> str:
|
|
|
|
if "epic_id" not in action:
|
2018-06-19 01:54:57 +02:00
|
|
|
message = "New story [{name}]({app_url}) of type **{story_type}** was created."
|
2021-12-17 07:03:22 +01:00
|
|
|
kwargs = {
|
|
|
|
"name": action["name"].tame(check_string),
|
|
|
|
"app_url": action["app_url"].tame(check_string),
|
|
|
|
"story_type": action["story_type"].tame(check_string),
|
|
|
|
}
|
2018-06-19 01:54:57 +02:00
|
|
|
else:
|
|
|
|
message = "New story [{name}]({app_url}) was created and added to the epic **{epic_name}**."
|
|
|
|
kwargs = {
|
2021-12-17 07:03:22 +01:00
|
|
|
"name": action["name"].tame(check_string),
|
|
|
|
"app_url": action["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
2021-12-17 07:03:22 +01:00
|
|
|
epic_id = action["epic_id"].tame(check_int)
|
2018-06-19 01:54:57 +02:00
|
|
|
refs = payload["references"]
|
|
|
|
for ref in refs:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == epic_id:
|
|
|
|
kwargs["epic_name"] = ref["name"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
return message.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_epic_create_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
message = "New epic **{name}**({state}) was created."
|
2021-12-17 07:03:22 +01:00
|
|
|
return message.format(
|
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
state=action["state"].tame(check_string),
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_comment_added_body(entity: str, payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
actions = payload["actions"]
|
|
|
|
kwargs = {"entity": entity}
|
|
|
|
for action in actions:
|
|
|
|
if action["id"] == payload["primary_id"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
kwargs["text"] = action["text"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
elif action["entity_type"] == entity:
|
|
|
|
name_template = get_name_template(entity).format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action.get("app_url").tame(check_none_or(check_string)),
|
2018-06-19 01:54:57 +02:00
|
|
|
)
|
|
|
|
kwargs["name_template"] = name_template
|
|
|
|
|
|
|
|
return COMMENT_ADDED_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_update_description_body(entity: str, payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
desc = action["changes"]["description"]
|
|
|
|
|
|
|
|
kwargs = {
|
|
|
|
"entity": entity,
|
2021-12-17 07:03:22 +01:00
|
|
|
"new": desc["new"].tame(check_string),
|
|
|
|
"old": desc["old"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
"name_template": get_name_template(entity).format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action.get("app_url").tame(check_none_or(check_string)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if kwargs["new"] and kwargs["old"]:
|
|
|
|
body = DESC_CHANGED_TEMPLATE.format(**kwargs)
|
|
|
|
elif kwargs["new"]:
|
|
|
|
body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs)
|
|
|
|
else:
|
|
|
|
body = DESC_REMOVED_TEMPLATE.format(**kwargs)
|
|
|
|
|
|
|
|
return body
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_epic_update_state_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
state = action["changes"]["state"]
|
|
|
|
kwargs = {
|
|
|
|
"entity": "epic",
|
2021-12-17 07:03:22 +01:00
|
|
|
"new": state["new"].tame(check_string),
|
|
|
|
"old": state["old"].tame(check_string),
|
|
|
|
"name_template": EPIC_NAME_TEMPLATE.format(
|
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return STATE_CHANGED_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_state_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
workflow_state_id = action["changes"]["workflow_state_id"]
|
|
|
|
references = payload["references"]
|
|
|
|
|
|
|
|
state = {}
|
|
|
|
for ref in references:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == workflow_state_id["new"].tame(check_int):
|
|
|
|
state["new"] = ref["name"].tame(check_string)
|
|
|
|
if ref["id"].tame(check_string_or_int) == workflow_state_id["old"].tame(check_int):
|
|
|
|
state["old"] = ref["name"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
kwargs = {
|
|
|
|
"entity": "story",
|
|
|
|
"new": state["new"],
|
|
|
|
"old": state["old"],
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action.get("app_url").tame(check_none_or(check_string)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return STATE_CHANGED_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_update_name_body(entity: str, payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
name = action["changes"]["name"]
|
|
|
|
kwargs = {
|
|
|
|
"entity": entity,
|
2021-12-17 07:03:22 +01:00
|
|
|
"new": name["new"].tame(check_string),
|
|
|
|
"old": name["old"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
"name_template": get_name_template(entity).format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action.get("app_url").tame(check_none_or(check_string)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return NAME_CHANGED_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_update_archived_body(entity: str, payload: WildValue, action: WildValue) -> str:
|
2021-04-10 20:20:57 +02:00
|
|
|
archived = action["changes"]["archived"]
|
2018-06-19 01:54:57 +02:00
|
|
|
if archived["new"]:
|
2021-04-10 20:20:57 +02:00
|
|
|
operation = "archived"
|
2018-06-19 01:54:57 +02:00
|
|
|
else:
|
2021-04-10 20:20:57 +02:00
|
|
|
operation = "unarchived"
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
kwargs = {
|
2019-02-20 23:44:42 +01:00
|
|
|
"entity": entity,
|
|
|
|
"name_template": get_name_template(entity).format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action.get("app_url").tame(check_none_or(check_string)),
|
2018-06-19 01:54:57 +02:00
|
|
|
),
|
2021-04-10 20:20:57 +02:00
|
|
|
"operation": operation,
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
2019-02-20 23:44:42 +01:00
|
|
|
return ARCHIVED_TEMPLATE.format(**kwargs)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_story_task_body(operation: str, payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
2021-12-17 07:03:22 +01:00
|
|
|
"task_description": action["description"].tame(check_string),
|
2021-04-10 20:20:57 +02:00
|
|
|
"operation": operation,
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for a in payload["actions"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if a["entity_type"].tame(check_string) == "story":
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=a["name"].tame(check_string),
|
|
|
|
app_url=a["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return STORY_TASK_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_task_completed_body(payload: WildValue, action: WildValue) -> Optional[str]:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
2021-12-17 07:03:22 +01:00
|
|
|
"task_description": action["description"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
story_id = action["story_id"].tame(check_int)
|
2018-06-19 01:54:57 +02:00
|
|
|
for ref in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == story_id:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs["name_template"] = STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=ref["name"].tame(check_string),
|
|
|
|
app_url=ref["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
)
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
if action["changes"]["complete"]["new"].tame(check_bool):
|
2018-06-19 01:54:57 +02:00
|
|
|
return STORY_TASK_COMPLETED_TEMPLATE.format(**kwargs)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_epic_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"story_name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
epic_id = action["changes"]["epic_id"]
|
|
|
|
new_id = epic_id.get("new").tame(check_none_or(check_int))
|
|
|
|
old_id = epic_id.get("old").tame(check_none_or(check_int))
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
for ref in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == new_id:
|
|
|
|
kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
|
|
|
|
name=ref["name"].tame(check_string),
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == old_id:
|
|
|
|
kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format(
|
|
|
|
name=ref["name"].tame(check_string),
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
if new_id and old_id:
|
|
|
|
return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs)
|
|
|
|
elif new_id:
|
|
|
|
kwargs["epic_name_template"] = kwargs["new_epic_name_template"]
|
2021-04-10 20:20:57 +02:00
|
|
|
kwargs["operation"] = "added to"
|
2018-06-19 01:54:57 +02:00
|
|
|
else:
|
|
|
|
kwargs["epic_name_template"] = kwargs["old_epic_name_template"]
|
2021-04-10 20:20:57 +02:00
|
|
|
kwargs["operation"] = "removed from"
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_estimate_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"story_name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
estimate = action["changes"]["estimate"]
|
|
|
|
if "new" in estimate:
|
|
|
|
new = estimate["new"].tame(check_int)
|
2020-06-09 00:25:09 +02:00
|
|
|
kwargs["estimate"] = f"{new} points"
|
2018-06-19 01:54:57 +02:00
|
|
|
else:
|
|
|
|
kwargs["estimate"] = "*Unestimated*"
|
|
|
|
|
|
|
|
return STORY_ESTIMATE_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_reference_by_id(payload: WildValue, ref_id: Optional[int]) -> Optional[WildValue]:
|
|
|
|
ref = None
|
2021-02-12 08:20:45 +01:00
|
|
|
for reference in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if reference["id"].tame(check_string_or_int) == ref_id:
|
2019-01-24 04:28:54 +01:00
|
|
|
ref = reference
|
|
|
|
|
|
|
|
return ref
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-11 03:23:18 +02:00
|
|
|
def get_secondary_actions_with_param(
|
2024-04-29 23:52:26 +02:00
|
|
|
entity: str, changed_attr: str, payload: WildValue
|
2021-12-17 07:03:22 +01:00
|
|
|
) -> Iterator[WildValue]:
|
2021-04-11 03:23:18 +02:00
|
|
|
# This function is a generator for secondary actions that have the required changed attributes,
|
|
|
|
# i.e.: "story" that has "pull-request_ids" changed.
|
|
|
|
for action in payload["actions"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if action["entity_type"].tame(check_string) == entity and changed_attr in action["changes"]:
|
2021-04-11 03:23:18 +02:00
|
|
|
yield action
|
|
|
|
|
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_story_create_github_entity_body(entity: str, payload: WildValue, action: WildValue) -> str:
|
2021-12-17 07:03:22 +01:00
|
|
|
pull_request_action: WildValue = get_action_with_primary_id(payload)
|
2019-01-24 04:28:54 +01:00
|
|
|
|
|
|
|
kwargs = {
|
2021-12-17 07:03:22 +01:00
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
|
|
|
),
|
2024-01-29 00:32:21 +01:00
|
|
|
"name": (
|
|
|
|
pull_request_action["number"].tame(check_int)
|
|
|
|
if entity in ("pull-request", "pull-request-comment")
|
|
|
|
else pull_request_action["name"].tame(check_string)
|
|
|
|
),
|
2021-12-17 07:03:22 +01:00
|
|
|
"url": pull_request_action["url"].tame(check_string),
|
2021-04-11 03:23:18 +02:00
|
|
|
"workflow_state_template": "",
|
2019-01-24 04:28:54 +01:00
|
|
|
}
|
|
|
|
|
2021-04-11 03:23:18 +02:00
|
|
|
# Sometimes the workflow state of the story will not be changed when linking to a PR.
|
2021-12-17 07:03:22 +01:00
|
|
|
if "workflow_state_id" in action["changes"]:
|
|
|
|
workflow_state_id = action["changes"]["workflow_state_id"]
|
|
|
|
new_state_id = workflow_state_id["new"].tame(check_int)
|
|
|
|
old_state_id = workflow_state_id["old"].tame(check_int)
|
|
|
|
new_reference = get_reference_by_id(payload, new_state_id)
|
|
|
|
assert new_reference is not None
|
|
|
|
new_state = new_reference["name"].tame(check_string)
|
|
|
|
old_reference = get_reference_by_id(payload, old_state_id)
|
|
|
|
assert old_reference is not None
|
|
|
|
old_state = old_reference["name"].tame(check_string)
|
2021-04-11 03:23:18 +02:00
|
|
|
kwargs["workflow_state_template"] = TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format(
|
|
|
|
new=new_state, old=old_state
|
|
|
|
)
|
|
|
|
|
|
|
|
if entity == "pull-request":
|
|
|
|
template = STORY_GITHUB_PR_TEMPLATE
|
|
|
|
elif entity == "pull-request-comment":
|
|
|
|
template = STORY_GITHUB_COMMENT_PR_TEMPLATE
|
|
|
|
else:
|
|
|
|
template = STORY_GITHUB_BRANCH_TEMPLATE
|
2019-01-24 04:28:54 +01:00
|
|
|
return template.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_attachment_body(payload: WildValue, action: WildValue) -> Optional[str]:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
2021-12-17 07:03:22 +01:00
|
|
|
file_ids = action["changes"]["file_ids"]
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
# If this is a payload for when an attachment is removed, ignore it
|
2021-12-17 07:03:22 +01:00
|
|
|
if "adds" not in file_ids:
|
2018-06-19 01:54:57 +02:00
|
|
|
return None
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
file_ids_added = file_ids["adds"].tame(check_list(check_int))
|
2018-06-19 01:54:57 +02:00
|
|
|
file_id = file_ids_added[0]
|
|
|
|
for ref in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == file_id:
|
2020-09-03 05:32:15 +02:00
|
|
|
kwargs.update(
|
2021-12-17 07:03:22 +01:00
|
|
|
type=ref["entity_type"].tame(check_string),
|
|
|
|
file_name=ref["name"].tame(check_string),
|
2020-09-03 05:32:15 +02:00
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
return FILE_ATTACHMENT_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-11 19:44:35 +02:00
|
|
|
def get_story_joined_label_list(
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue, action: WildValue, label_ids_added: List[int]
|
2021-04-11 19:44:35 +02:00
|
|
|
) -> str:
|
|
|
|
labels = []
|
|
|
|
|
|
|
|
for label_id in label_ids_added:
|
|
|
|
label_name = ""
|
|
|
|
|
|
|
|
for action in payload["actions"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if action["id"].tame(check_int) == label_id:
|
|
|
|
label_name = action.get("name", "").tame(check_string)
|
2021-04-11 19:44:35 +02:00
|
|
|
|
|
|
|
if label_name == "":
|
2021-12-17 07:03:22 +01:00
|
|
|
reference = get_reference_by_id(payload, label_id)
|
|
|
|
label_name = "" if reference is None else reference["name"].tame(check_string)
|
2021-04-11 19:44:35 +02:00
|
|
|
|
|
|
|
labels.append(LABEL_TEMPLATE.format(name=label_name))
|
|
|
|
|
|
|
|
return ", ".join(labels)
|
|
|
|
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_label_body(payload: WildValue, action: WildValue) -> Optional[str]:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
2021-12-17 07:03:22 +01:00
|
|
|
label_ids = action["changes"]["label_ids"]
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-04-11 19:44:35 +02:00
|
|
|
# If this is a payload for when no label is added, ignore it
|
2021-12-17 07:03:22 +01:00
|
|
|
if "adds" not in label_ids:
|
2018-06-19 01:54:57 +02:00
|
|
|
return None
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
label_ids_added = label_ids["adds"].tame(check_list(check_int))
|
2021-04-11 19:44:35 +02:00
|
|
|
kwargs.update(labels=get_story_joined_label_list(payload, action, label_ids_added))
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-04-11 19:44:35 +02:00
|
|
|
return (
|
|
|
|
STORY_LABEL_TEMPLATE.format(**kwargs)
|
|
|
|
if len(label_ids_added) == 1
|
|
|
|
else STORY_LABEL_PLURAL_TEMPLATE.format(**kwargs)
|
|
|
|
)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_project_body(payload: WildValue, action: WildValue) -> str:
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
project_id = action["changes"]["project_id"]
|
|
|
|
new_project_id = project_id["new"].tame(check_int)
|
|
|
|
old_project_id = project_id["old"].tame(check_int)
|
2018-06-19 01:54:57 +02:00
|
|
|
for ref in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["id"].tame(check_string_or_int) == new_project_id:
|
|
|
|
kwargs.update(new=ref["name"].tame(check_string))
|
|
|
|
if ref["id"].tame(check_string_or_int) == old_project_id:
|
|
|
|
kwargs.update(old=ref["name"].tame(check_string))
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
return STORY_UPDATE_PROJECT_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_type_body(payload: WildValue, action: WildValue) -> str:
|
|
|
|
story_type = action["changes"]["story_type"]
|
2018-06-19 01:54:57 +02:00
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
),
|
2021-12-17 07:03:22 +01:00
|
|
|
"new_type": story_type["new"].tame(check_string),
|
|
|
|
"old_type": story_type["old"].tame(check_string),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return STORY_UPDATE_TYPE_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_owner_body(payload: WildValue, action: WildValue) -> str:
|
2019-01-04 23:10:07 +01:00
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2019-01-04 23:10:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return STORY_UPDATE_OWNER_TEMPLATE.format(**kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
def get_story_update_batch_body(payload: WildValue, action: WildValue) -> Optional[str]:
|
2021-04-11 19:47:20 +02:00
|
|
|
# When the user selects one or more stories with the checkbox, they can perform
|
2023-01-02 20:50:23 +01:00
|
|
|
# a batch update on multiple stories while changing multiple attributes at the
|
2021-04-11 19:47:20 +02:00
|
|
|
# same time.
|
|
|
|
changes = action["changes"]
|
|
|
|
kwargs = {
|
|
|
|
"name_template": STORY_NAME_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
name=action["name"].tame(check_string),
|
|
|
|
app_url=action["app_url"].tame(check_string),
|
2021-04-11 19:47:20 +02:00
|
|
|
),
|
|
|
|
"workflow_state_template": "",
|
|
|
|
}
|
|
|
|
|
|
|
|
templates = []
|
|
|
|
last_change = "other"
|
|
|
|
|
|
|
|
move_sub_templates = []
|
|
|
|
if "epic_id" in changes:
|
|
|
|
last_change = "epic"
|
2021-12-17 07:03:22 +01:00
|
|
|
epic_id = changes["epic_id"]
|
|
|
|
old_reference = get_reference_by_id(
|
|
|
|
payload, epic_id.get("old").tame(check_none_or(check_int))
|
|
|
|
)
|
|
|
|
new_reference = get_reference_by_id(
|
|
|
|
payload, epic_id.get("new").tame(check_none_or(check_int))
|
|
|
|
)
|
2021-04-11 19:47:20 +02:00
|
|
|
move_sub_templates.append(
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
|
|
|
|
entity_type="Epic",
|
2021-12-17 07:03:22 +01:00
|
|
|
old=None if old_reference is None else old_reference["name"].tame(check_string),
|
|
|
|
new=None if new_reference is None else new_reference["name"].tame(check_string),
|
2021-04-11 19:47:20 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
if "project_id" in changes:
|
|
|
|
last_change = "project"
|
2021-12-17 07:03:22 +01:00
|
|
|
project_id = changes["project_id"]
|
|
|
|
old_reference = get_reference_by_id(
|
|
|
|
payload, project_id.get("old").tame(check_none_or(check_int))
|
|
|
|
)
|
|
|
|
new_reference = get_reference_by_id(
|
|
|
|
payload, project_id.get("new").tame(check_none_or(check_int))
|
|
|
|
)
|
2021-04-11 19:47:20 +02:00
|
|
|
move_sub_templates.append(
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
|
|
|
|
entity_type="Project",
|
2021-12-17 07:03:22 +01:00
|
|
|
old=None if old_reference is None else old_reference["name"].tame(check_string),
|
|
|
|
new=None if new_reference is None else new_reference["name"].tame(check_string),
|
2021-04-11 19:47:20 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
if len(move_sub_templates) > 0:
|
|
|
|
templates.append(
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format(
|
|
|
|
operation="was moved",
|
|
|
|
sub_templates=", ".join(move_sub_templates),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if "story_type" in changes:
|
|
|
|
last_change = "type"
|
2021-12-17 07:03:22 +01:00
|
|
|
story_type = changes["story_type"]
|
2021-04-11 19:47:20 +02:00
|
|
|
templates.append(
|
|
|
|
STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format(
|
|
|
|
operation="{} changed".format("was" if len(templates) == 0 else "and"),
|
|
|
|
sub_templates=STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format(
|
|
|
|
entity_type="type",
|
2021-12-17 07:03:22 +01:00
|
|
|
old=story_type.get("old").tame(check_none_or(check_string)),
|
|
|
|
new=story_type.get("new").tame(check_none_or(check_string)),
|
2021-04-11 19:47:20 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if "label_ids" in changes:
|
2021-12-17 07:03:22 +01:00
|
|
|
label_ids = changes["label_ids"]
|
2021-08-20 20:43:15 +02:00
|
|
|
# If this is a payload for when no label is added, ignore it
|
2021-12-17 07:03:22 +01:00
|
|
|
if "adds" in label_ids:
|
|
|
|
label_ids_added = label_ids["adds"].tame(check_list(check_int))
|
2021-08-20 20:43:15 +02:00
|
|
|
last_change = "label"
|
|
|
|
labels = get_story_joined_label_list(payload, action, label_ids_added)
|
|
|
|
templates.append(
|
|
|
|
STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE.format(
|
|
|
|
operation="{} added".format("was" if len(templates) == 0 else "and"),
|
|
|
|
entity="the new label{plural} {labels}".format(
|
2021-12-17 07:03:22 +01:00
|
|
|
plural="s" if len(label_ids) > 1 else "", labels=labels
|
2021-08-20 20:43:15 +02:00
|
|
|
),
|
|
|
|
)
|
2021-04-11 19:47:20 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if "workflow_state_id" in changes:
|
|
|
|
last_change = "state"
|
2021-12-17 07:03:22 +01:00
|
|
|
workflow_state_id = changes["workflow_state_id"]
|
|
|
|
old_reference = get_reference_by_id(
|
|
|
|
payload, workflow_state_id.get("old").tame(check_none_or(check_int))
|
|
|
|
)
|
|
|
|
new_reference = get_reference_by_id(
|
|
|
|
payload, workflow_state_id.get("new").tame(check_none_or(check_int))
|
|
|
|
)
|
2021-04-11 19:47:20 +02:00
|
|
|
kwargs.update(
|
|
|
|
workflow_state_template=TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format(
|
2021-12-17 07:03:22 +01:00
|
|
|
old=None if old_reference is None else old_reference["name"].tame(check_string),
|
|
|
|
new=None if new_reference is None else new_reference["name"].tame(check_string),
|
2021-04-11 19:47:20 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Use the default template for state change if it is the only one change.
|
|
|
|
if len(templates) <= 1 or (len(templates) == 0 and last_change == "state"):
|
|
|
|
event: str = "{}_{}".format("story_update", last_change)
|
|
|
|
alternative_body_func = EVENT_BODY_FUNCTION_MAPPER.get(event)
|
|
|
|
# If last_change is not one of "epic", "project", "type", "label" and "state"
|
|
|
|
# we should ignore the action as there is no way for us to render the changes.
|
|
|
|
if alternative_body_func is None:
|
|
|
|
return None
|
|
|
|
return alternative_body_func(payload, action)
|
|
|
|
|
|
|
|
kwargs.update(templates=", ".join(templates))
|
|
|
|
return STORY_UPDATE_BATCH_TEMPLATE.format(**kwargs)
|
|
|
|
|
|
|
|
|
2024-04-29 23:52:26 +02:00
|
|
|
def get_entity_name(entity: str, payload: WildValue, action: WildValue) -> Optional[str]:
|
2021-12-17 07:03:22 +01:00
|
|
|
name = action["name"].tame(check_string) if "name" in action else None
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if name is None or action["entity_type"] == "branch":
|
2018-06-19 01:54:57 +02:00
|
|
|
for action in payload["actions"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if action["entity_type"].tame(check_string) == entity:
|
|
|
|
name = action["name"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
if name is None:
|
|
|
|
for ref in payload["references"]:
|
2021-12-17 07:03:22 +01:00
|
|
|
if ref["entity_type"].tame(check_string) == entity:
|
|
|
|
name = ref["name"].tame(check_string)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
|
|
|
return name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-19 01:54:57 +02:00
|
|
|
def get_name_template(entity: str) -> str:
|
|
|
|
if entity == "story":
|
|
|
|
return STORY_NAME_TEMPLATE
|
|
|
|
return EPIC_NAME_TEMPLATE
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-05-04 22:26:20 +02:00
|
|
|
def send_channel_messages_for_actions(
|
2021-04-11 03:20:17 +02:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-12-17 07:03:22 +01:00
|
|
|
payload: WildValue,
|
|
|
|
action: WildValue,
|
2021-04-11 03:20:17 +02:00
|
|
|
event: str,
|
|
|
|
) -> None:
|
|
|
|
body_func = EVENT_BODY_FUNCTION_MAPPER.get(event)
|
|
|
|
topic_func = get_topic_function_based_on_type(payload, action)
|
|
|
|
if body_func is None or topic_func is None:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise UnsupportedWebhookEventTypeError(event)
|
2021-04-11 03:20:17 +02:00
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
topic_name = topic_func(payload, action)
|
2021-04-11 03:20:17 +02:00
|
|
|
body = body_func(payload, action)
|
|
|
|
|
2024-01-17 15:53:30 +01:00
|
|
|
if topic_name and body:
|
|
|
|
check_send_webhook_message(request, user_profile, topic_name, body, event)
|
2021-04-11 03:20:17 +02:00
|
|
|
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
EVENT_BODY_FUNCTION_MAPPER: Dict[str, Callable[[WildValue, WildValue], Optional[str]]] = {
|
2024-04-29 23:52:26 +02:00
|
|
|
"story_update_archived": partial(get_update_archived_body, "story"),
|
|
|
|
"epic_update_archived": partial(get_update_archived_body, "epic"),
|
2018-06-19 01:54:57 +02:00
|
|
|
"story_create": get_story_create_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"pull-request_create": partial(get_story_create_github_entity_body, "pull-request"),
|
|
|
|
"pull-request_comment": partial(get_story_create_github_entity_body, "pull-request-comment"),
|
|
|
|
"branch_create": partial(get_story_create_github_entity_body, "branch"),
|
2018-12-18 00:00:25 +01:00
|
|
|
"story_delete": get_delete_body,
|
|
|
|
"epic_delete": get_delete_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"story-task_create": partial(get_story_task_body, "added to"),
|
|
|
|
"story-task_delete": partial(get_story_task_body, "removed from"),
|
2018-06-19 01:54:57 +02:00
|
|
|
"story-task_update_complete": get_story_task_completed_body,
|
|
|
|
"story_update_epic": get_story_update_epic_body,
|
|
|
|
"story_update_estimate": get_story_update_estimate_body,
|
|
|
|
"story_update_attachment": get_story_update_attachment_body,
|
|
|
|
"story_update_label": get_story_label_body,
|
2019-01-04 23:10:07 +01:00
|
|
|
"story_update_owner": get_story_update_owner_body,
|
2018-06-19 01:54:57 +02:00
|
|
|
"story_update_project": get_story_update_project_body,
|
|
|
|
"story_update_type": get_story_update_type_body,
|
|
|
|
"epic_create": get_epic_create_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"epic-comment_create": partial(get_comment_added_body, "epic"),
|
|
|
|
"story-comment_create": partial(get_comment_added_body, "story"),
|
|
|
|
"epic_update_description": partial(get_update_description_body, "epic"),
|
|
|
|
"story_update_description": partial(get_update_description_body, "story"),
|
2018-06-19 01:54:57 +02:00
|
|
|
"epic_update_state": get_epic_update_state_body,
|
|
|
|
"story_update_state": get_story_update_state_body,
|
2024-04-29 23:52:26 +02:00
|
|
|
"epic_update_name": partial(get_update_name_body, "epic"),
|
|
|
|
"story_update_name": partial(get_update_name_body, "story"),
|
2021-04-11 19:47:20 +02:00
|
|
|
"story_update_batch": get_story_update_batch_body,
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
2021-07-16 11:40:46 +02:00
|
|
|
ALL_EVENT_TYPES = list(EVENT_BODY_FUNCTION_MAPPER.keys())
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
EVENT_TOPIC_FUNCTION_MAPPER: Dict[str, Callable[[WildValue, WildValue], Optional[str]]] = {
|
2024-04-29 23:52:26 +02:00
|
|
|
"story": partial(get_entity_name, "story"),
|
|
|
|
"pull-request": partial(get_entity_name, "story"),
|
|
|
|
"branch": partial(get_entity_name, "story"),
|
|
|
|
"story-comment": partial(get_entity_name, "story"),
|
|
|
|
"story-task": partial(get_entity_name, "story"),
|
|
|
|
"epic": partial(get_entity_name, "epic"),
|
|
|
|
"epic-comment": partial(get_entity_name, "epic"),
|
2018-06-19 01:54:57 +02:00
|
|
|
}
|
|
|
|
|
2019-02-20 23:21:14 +01:00
|
|
|
IGNORED_EVENTS = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"story-comment_update",
|
2019-02-20 23:21:14 +01:00
|
|
|
}
|
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER: Dict[str, Callable[[WildValue], Iterator[WildValue]]] = {
|
2024-04-29 23:52:26 +02:00
|
|
|
"pull-request_create": partial(get_secondary_actions_with_param, "story", "pull_request_ids"),
|
|
|
|
"branch_create": partial(get_secondary_actions_with_param, "story", "branch_ids"),
|
|
|
|
"pull-request_comment": partial(get_secondary_actions_with_param, "story", "pull_request_ids"),
|
2021-04-11 03:23:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-12-16 21:23:48 +01:00
|
|
|
@webhook_view("Clubhouse", all_event_types=ALL_EVENT_TYPES)
|
2023-08-12 09:34:31 +02:00
|
|
|
@typed_endpoint
|
2018-06-19 01:54:57 +02:00
|
|
|
def api_clubhouse_webhook(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2023-08-12 09:34:31 +02:00
|
|
|
*,
|
2023-09-27 19:01:31 +02:00
|
|
|
payload: JsonBodyPayload[WildValue],
|
2018-06-19 01:54:57 +02:00
|
|
|
) -> HttpResponse:
|
2019-08-01 00:30:01 +02:00
|
|
|
# Clubhouse has a tendency to send empty POST requests to
|
|
|
|
# third-party endpoints. It is unclear as to which event type
|
|
|
|
# such requests correspond to. So, it is best to ignore such
|
|
|
|
# requests for now.
|
2021-12-17 07:03:22 +01:00
|
|
|
if payload.value is None:
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2018-12-18 00:29:08 +01:00
|
|
|
|
2021-12-17 07:03:22 +01:00
|
|
|
if "primary_id" in payload:
|
2021-04-11 03:20:17 +02:00
|
|
|
action = get_action_with_primary_id(payload)
|
2021-12-17 07:03:22 +01:00
|
|
|
primary_actions: Iterable[WildValue] = [action]
|
2021-04-11 03:20:17 +02:00
|
|
|
else:
|
|
|
|
primary_actions = payload["actions"]
|
2021-04-10 20:20:57 +02:00
|
|
|
|
2021-04-11 03:20:17 +02:00
|
|
|
for primary_action in primary_actions:
|
|
|
|
event = get_event(payload, primary_action)
|
|
|
|
if event is None:
|
|
|
|
continue
|
2019-02-20 23:21:14 +01:00
|
|
|
|
2021-04-11 03:23:18 +02:00
|
|
|
if event in EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER:
|
|
|
|
sec_actions_func = EVENTS_SECONDARY_ACTIONS_FUNCTION_MAPPER[event]
|
|
|
|
for sec_action in sec_actions_func(payload):
|
2024-05-04 22:26:20 +02:00
|
|
|
send_channel_messages_for_actions(request, user_profile, payload, sec_action, event)
|
2021-04-11 03:23:18 +02:00
|
|
|
else:
|
2024-05-04 22:26:20 +02:00
|
|
|
send_channel_messages_for_actions(request, user_profile, payload, primary_action, event)
|
2018-06-19 01:54:57 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|