2020-04-09 19:33:49 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""Create or update a webhook integration screenshot using a test fixture."""
|
|
|
|
|
|
|
|
# check for the venv
|
|
|
|
from lib import sanity_check
|
|
|
|
|
|
|
|
sanity_check.check_venv(__file__)
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
|
|
|
|
TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
ROOT_DIR = os.path.dirname(TOOLS_DIR)
|
|
|
|
|
|
|
|
sys.path.insert(0, ROOT_DIR)
|
|
|
|
from scripts.lib.setup_path import setup_path
|
|
|
|
|
|
|
|
setup_path()
|
|
|
|
|
|
|
|
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
|
|
|
|
import django
|
|
|
|
|
|
|
|
django.setup()
|
|
|
|
|
|
|
|
import argparse
|
2020-05-01 08:28:25 +02:00
|
|
|
import base64
|
2020-04-10 19:49:18 +02:00
|
|
|
import subprocess
|
2020-05-01 18:19:35 +02:00
|
|
|
from typing import Any, Dict, Optional
|
2020-06-11 00:54:34 +02:00
|
|
|
from urllib.parse import parse_qsl, urlencode
|
2020-04-09 19:33:49 +02:00
|
|
|
|
|
|
|
import requests
|
|
|
|
import ujson
|
|
|
|
|
2020-04-26 10:55:03 +02:00
|
|
|
from scripts.lib.zulip_tools import BOLDRED, ENDC
|
2020-04-24 11:30:24 +02:00
|
|
|
from tools.lib.test_script import prepare_puppeteer_run
|
|
|
|
from zerver.lib.actions import (
|
2020-06-11 00:54:34 +02:00
|
|
|
bulk_add_subscriptions,
|
|
|
|
do_change_avatar_fields,
|
|
|
|
do_create_user,
|
|
|
|
notify_created_bot,
|
|
|
|
)
|
|
|
|
from zerver.lib.integrations import (
|
|
|
|
DOC_SCREENSHOT_CONFIG,
|
|
|
|
INTEGRATIONS,
|
|
|
|
ScreenshotConfig,
|
|
|
|
WebhookIntegration,
|
|
|
|
get_fixture_and_image_paths,
|
|
|
|
split_fixture_path,
|
|
|
|
)
|
|
|
|
from zerver.lib.storage import static_path
|
2020-04-24 11:30:24 +02:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2020-04-09 19:33:49 +02:00
|
|
|
from zerver.lib.upload import upload_avatar_image
|
|
|
|
from zerver.lib.webhooks.common import get_fixture_http_headers
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import Message, UserProfile, get_realm, get_user_by_delivery_email
|
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
|
2020-05-01 18:19:35 +02:00
|
|
|
def create_integration_bot(integration: WebhookIntegration, bot_name: Optional[str]=None) -> UserProfile:
|
2020-04-09 19:33:49 +02:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
owner = get_user_by_delivery_email("iago@zulip.com", realm)
|
2020-06-09 00:25:09 +02:00
|
|
|
bot_email = f"{integration.name}-bot@example.com"
|
2020-05-01 18:19:35 +02:00
|
|
|
if bot_name is None:
|
2020-06-09 00:25:09 +02:00
|
|
|
bot_name = f"{integration.name.capitalize()} Bot"
|
2020-04-09 19:33:49 +02:00
|
|
|
try:
|
|
|
|
bot = UserProfile.objects.get(email=bot_email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
bot = do_create_user(
|
|
|
|
email=bot_email,
|
|
|
|
password="123",
|
|
|
|
realm=owner.realm,
|
|
|
|
full_name=bot_name,
|
|
|
|
short_name=bot_name,
|
|
|
|
bot_type=UserProfile.INCOMING_WEBHOOK_BOT,
|
|
|
|
bot_owner=owner,
|
2020-06-29 13:12:17 +02:00
|
|
|
acting_user=owner,
|
2020-04-09 19:33:49 +02:00
|
|
|
)
|
|
|
|
notify_created_bot(bot)
|
|
|
|
|
2020-05-15 10:12:26 +02:00
|
|
|
bot_avatar_path = integration.get_bot_avatar_path()
|
|
|
|
if bot_avatar_path is not None:
|
|
|
|
bot_avatar_path = static_path(bot_avatar_path)
|
|
|
|
if os.path.isfile(bot_avatar_path):
|
|
|
|
with open(bot_avatar_path, "rb") as f:
|
|
|
|
upload_avatar_image(f, owner, bot)
|
2020-06-29 12:47:44 +02:00
|
|
|
do_change_avatar_fields(bot, UserProfile.AVATAR_FROM_USER, acting_user=owner)
|
2020-04-09 19:33:49 +02:00
|
|
|
|
|
|
|
return bot
|
|
|
|
|
2020-04-24 11:30:24 +02:00
|
|
|
def create_integration_stream(integration: WebhookIntegration, bot: UserProfile) -> None:
|
|
|
|
assert isinstance(bot.bot_owner, UserProfile)
|
|
|
|
stream, created = create_stream_if_needed(bot.bot_owner.realm, integration.stream_name)
|
|
|
|
bulk_add_subscriptions([stream], [bot, bot.bot_owner], from_stream_creation=created)
|
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
def get_integration(integration_name: str) -> WebhookIntegration:
|
|
|
|
integration = INTEGRATIONS[integration_name]
|
|
|
|
assert isinstance(integration, WebhookIntegration), "Not a WebhookIntegration"
|
|
|
|
return integration
|
|
|
|
|
|
|
|
def get_requests_headers(integration_name: str, fixture_name: str) -> Dict[str, Any]:
|
|
|
|
headers = get_fixture_http_headers(integration_name, fixture_name)
|
|
|
|
|
|
|
|
def fix_name(header: str) -> str:
|
|
|
|
header = header if not header.startswith('HTTP_') else header[len('HTTP_'):]
|
|
|
|
return header.replace('_', '-')
|
|
|
|
|
|
|
|
return {fix_name(k): v for k, v in headers.items()}
|
|
|
|
|
2020-04-17 11:27:53 +02:00
|
|
|
def custom_headers(headers_json: str) -> Dict[str, str]:
|
|
|
|
if not headers_json:
|
|
|
|
return {}
|
|
|
|
try:
|
|
|
|
return ujson.loads(headers_json)
|
|
|
|
except ValueError as ve:
|
|
|
|
raise argparse.ArgumentTypeError(
|
|
|
|
'Encountered an error while attempting to parse custom headers: {}\n'
|
|
|
|
'Note: all strings must be enclosed within "" instead of \'\''.format(ve))
|
|
|
|
|
|
|
|
def send_bot_payload_message(bot: UserProfile, integration: WebhookIntegration, fixture_path: str,
|
2020-05-01 08:28:25 +02:00
|
|
|
config: ScreenshotConfig) -> bool:
|
2020-04-17 10:14:13 +02:00
|
|
|
# Delete all messages, so new message is the only one it's message group
|
|
|
|
Message.objects.filter(sender=bot).delete()
|
2020-05-14 23:47:26 +02:00
|
|
|
json_fixture = fixture_path.endswith('.json')
|
2020-05-15 06:29:51 +02:00
|
|
|
_, fixture_name = split_fixture_path(fixture_path)
|
2020-04-17 10:14:13 +02:00
|
|
|
|
2020-05-15 06:29:51 +02:00
|
|
|
if fixture_name:
|
|
|
|
with open(fixture_path) as f:
|
|
|
|
if json_fixture:
|
|
|
|
data = ujson.load(f)
|
|
|
|
else:
|
|
|
|
data = f.read().strip()
|
|
|
|
else:
|
|
|
|
data = ''
|
2020-05-14 23:47:26 +02:00
|
|
|
|
2020-04-17 10:14:13 +02:00
|
|
|
headers = get_requests_headers(integration.name, fixture_name)
|
2020-06-13 03:34:01 +02:00
|
|
|
headers.update(config.custom_headers)
|
2020-05-01 08:28:25 +02:00
|
|
|
if config.use_basic_auth:
|
2020-06-09 00:25:09 +02:00
|
|
|
credentials = base64.b64encode(f'{bot.email}:{bot.api_key}'.encode('utf8')).decode('utf8')
|
|
|
|
auth = f'basic {credentials}'
|
2020-05-01 08:28:25 +02:00
|
|
|
headers.update(dict(Authorization=auth))
|
|
|
|
|
2020-05-01 17:55:11 +02:00
|
|
|
assert isinstance(bot.bot_owner, UserProfile)
|
|
|
|
stream = integration.stream_name or 'devel'
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{bot.bot_owner.realm.uri}/{integration.url}"
|
2020-05-01 17:55:11 +02:00
|
|
|
params = {'api_key': bot.api_key, 'stream': stream}
|
2020-06-13 03:34:01 +02:00
|
|
|
params.update(config.extra_params)
|
2020-05-14 23:47:26 +02:00
|
|
|
|
|
|
|
extra_args = {}
|
2020-05-15 06:29:51 +02:00
|
|
|
if not json_fixture and data:
|
2020-05-14 23:47:26 +02:00
|
|
|
# We overwrite any params in fixture with our params. stream name, for
|
|
|
|
# example, may be defined in the fixture.
|
|
|
|
parsed_params = dict(parse_qsl(data))
|
|
|
|
parsed_params.update(params)
|
|
|
|
params = parsed_params
|
|
|
|
|
|
|
|
elif config.payload_as_query_param:
|
2020-05-01 17:55:11 +02:00
|
|
|
params[config.payload_param_name] = ujson.dumps(data)
|
|
|
|
|
2020-05-14 23:47:26 +02:00
|
|
|
else:
|
|
|
|
extra_args = {'json': data}
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f'{url}?{urlencode(params)}'
|
2020-05-14 23:47:26 +02:00
|
|
|
|
2020-04-17 10:56:08 +02:00
|
|
|
try:
|
2020-05-01 17:55:11 +02:00
|
|
|
response = requests.post(url=url, headers=headers, **extra_args)
|
2020-04-17 10:56:08 +02:00
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
print('This tool needs the local dev server to be running. '
|
|
|
|
'Please start it using tools/run-dev.py before running this tool.')
|
|
|
|
sys.exit(1)
|
2020-04-17 10:14:13 +02:00
|
|
|
if response.status_code != 200:
|
|
|
|
print(response.json())
|
|
|
|
print('Failed to trigger webhook')
|
2020-04-24 12:21:26 +02:00
|
|
|
return False
|
|
|
|
else:
|
2020-06-09 00:25:09 +02:00
|
|
|
print(f'Triggered {integration.name} webhook')
|
2020-04-24 12:21:26 +02:00
|
|
|
return True
|
2020-04-17 10:14:13 +02:00
|
|
|
|
2020-04-24 14:48:28 +02:00
|
|
|
def capture_last_message_screenshot(bot: UserProfile, image_path: str) -> None:
|
2020-04-17 13:25:05 +02:00
|
|
|
message = Message.objects.filter(sender=bot).last()
|
|
|
|
if message is None:
|
2020-06-09 00:25:09 +02:00
|
|
|
print(f'No message found for {bot.full_name}')
|
2020-04-17 13:25:05 +02:00
|
|
|
return
|
|
|
|
message_id = str(message.id)
|
2020-04-17 10:14:13 +02:00
|
|
|
screenshot_script = os.path.join(TOOLS_DIR, 'message-screenshot.js')
|
2020-04-24 14:48:28 +02:00
|
|
|
subprocess.check_call(['node', screenshot_script, message_id, image_path])
|
2020-04-17 10:14:13 +02:00
|
|
|
|
2020-04-26 08:35:53 +02:00
|
|
|
def generate_screenshot_from_config(integration_name: str, screenshot_config: ScreenshotConfig) -> None:
|
|
|
|
integration = get_integration(integration_name)
|
|
|
|
fixture_path, image_path = get_fixture_and_image_paths(integration, screenshot_config)
|
2020-05-01 18:19:35 +02:00
|
|
|
bot = create_integration_bot(integration, screenshot_config.bot_name)
|
2020-04-26 08:35:53 +02:00
|
|
|
create_integration_stream(integration, bot)
|
2020-05-01 08:28:25 +02:00
|
|
|
message_sent = send_bot_payload_message(bot, integration, fixture_path, screenshot_config)
|
2020-04-26 08:35:53 +02:00
|
|
|
if message_sent:
|
|
|
|
capture_last_message_screenshot(bot, image_path)
|
2020-04-26 10:55:03 +02:00
|
|
|
print(f'Screenshot captured to: {BOLDRED}{image_path}{ENDC}')
|
2020-04-26 08:35:53 +02:00
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
parser = argparse.ArgumentParser()
|
2020-04-26 10:35:13 +02:00
|
|
|
group = parser.add_mutually_exclusive_group(required=True)
|
|
|
|
group.add_argument('--all', default=False, action='store_true')
|
|
|
|
group.add_argument('--integration', type=str, help='Name of the integration')
|
|
|
|
parser.add_argument('--fixture', type=str, help='Name of the fixture file to use')
|
|
|
|
parser.add_argument('--image-name', type=str, help='Name for the screenshot image')
|
2020-04-26 08:35:53 +02:00
|
|
|
parser.add_argument('--image-dir', type=str, help='Directory name where to save the screenshot image')
|
2020-05-01 18:19:35 +02:00
|
|
|
parser.add_argument('--bot-name', type=str, help='Name to use for the bot')
|
2020-05-01 08:28:25 +02:00
|
|
|
parser.add_argument('-A', '--use-basic-auth', action='store_true',
|
|
|
|
help='Add basic auth headers to the request')
|
2020-05-01 17:55:11 +02:00
|
|
|
parser.add_argument('-Q', '--payload-as-query-param', action='store_true',
|
|
|
|
help='Send payload as query param instead of body')
|
2020-04-26 10:35:13 +02:00
|
|
|
parser.add_argument('-P', '--payload-param-name', type=str,
|
2020-05-01 17:55:11 +02:00
|
|
|
help='Param name to use for the payload')
|
2020-04-17 11:27:53 +02:00
|
|
|
parser.add_argument('-H', '--custom-headers',
|
|
|
|
type=custom_headers,
|
|
|
|
help='Any additional headers to be sent with the request.')
|
2020-04-09 19:33:49 +02:00
|
|
|
|
2020-04-26 08:35:53 +02:00
|
|
|
options = parser.parse_args()
|
2020-04-17 09:31:49 +02:00
|
|
|
prepare_puppeteer_run()
|
2020-04-26 10:35:13 +02:00
|
|
|
|
|
|
|
if options.all:
|
|
|
|
for key, value in vars(options).items():
|
|
|
|
if key != 'all' and value:
|
|
|
|
print('Generating screenshots for all integrations. Ignoring all command-line options')
|
|
|
|
for integration_name, screenshot_configs in DOC_SCREENSHOT_CONFIG.items():
|
|
|
|
for screenshot_config in screenshot_configs:
|
|
|
|
generate_screenshot_from_config(integration_name, screenshot_config)
|
|
|
|
|
2020-05-15 09:13:56 +02:00
|
|
|
elif options.fixture:
|
2020-04-26 10:35:13 +02:00
|
|
|
config = dict(fixture_name=options.fixture, use_basic_auth=options.use_basic_auth,
|
|
|
|
custom_headers=options.custom_headers,
|
|
|
|
payload_as_query_param=options.payload_as_query_param)
|
|
|
|
if options.image_name:
|
|
|
|
config['image_name'] = options.image_name
|
|
|
|
if options.image_dir:
|
|
|
|
config['image_dir'] = options.image_dir
|
|
|
|
if options.bot_name:
|
|
|
|
config['bot_name'] = options.bot_name
|
|
|
|
if options.payload_param_name:
|
|
|
|
config['payload_param_name'] = options.payload_param_name
|
|
|
|
screenshot_config = ScreenshotConfig(**config)
|
|
|
|
generate_screenshot_from_config(options.integration, screenshot_config)
|
2020-05-15 09:13:56 +02:00
|
|
|
|
|
|
|
elif options.integration in DOC_SCREENSHOT_CONFIG:
|
|
|
|
configs = DOC_SCREENSHOT_CONFIG[options.integration]
|
|
|
|
for screenshot_config in configs:
|
|
|
|
generate_screenshot_from_config(options.integration, screenshot_config)
|
|
|
|
|
|
|
|
else:
|
|
|
|
parser.error(
|
|
|
|
"Could not find configuration for integration. "
|
|
|
|
"You can specify a fixture file to use, using the --fixture flag. "
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"Or add a configuration to zerver.lib.integrations.DOC_SCREENSHOT_CONFIG",
|
2020-05-15 09:13:56 +02:00
|
|
|
)
|