2020-04-09 19:33:49 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""Create or update a webhook integration screenshot using a test fixture."""
|
|
|
|
|
2021-07-03 08:22:44 +02:00
|
|
|
import argparse
|
|
|
|
import base64
|
2020-04-09 19:33:49 +02:00
|
|
|
import os
|
2021-07-03 08:22:44 +02:00
|
|
|
import subprocess
|
2020-04-09 19:33:49 +02:00
|
|
|
import sys
|
2021-07-03 08:22:44 +02:00
|
|
|
from typing import Any, Dict, Optional, Tuple
|
|
|
|
from urllib.parse import parse_qsl, urlencode
|
2020-04-09 19:33:49 +02:00
|
|
|
|
|
|
|
TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
ROOT_DIR = os.path.dirname(TOOLS_DIR)
|
|
|
|
sys.path.insert(0, ROOT_DIR)
|
2021-07-03 08:22:44 +02:00
|
|
|
|
|
|
|
# check for the venv
|
|
|
|
from tools.lib import sanity_check
|
|
|
|
|
|
|
|
sanity_check.check_venv(__file__)
|
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
from scripts.lib.setup_path import setup_path
|
|
|
|
|
|
|
|
setup_path()
|
|
|
|
|
|
|
|
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
|
|
|
|
import django
|
|
|
|
|
|
|
|
django.setup()
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-04-09 19:33:49 +02:00
|
|
|
import requests
|
2021-03-29 16:35:59 +02:00
|
|
|
import zulip
|
2020-04-09 19:33:49 +02:00
|
|
|
|
2020-04-26 10:55:03 +02:00
|
|
|
from scripts.lib.zulip_tools import BOLDRED, ENDC
|
2020-04-24 11:30:24 +02:00
|
|
|
from tools.lib.test_script import prepare_puppeteer_run
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, notify_created_bot
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import bulk_add_subscriptions
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import do_change_avatar_fields
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.integrations import (
|
|
|
|
DOC_SCREENSHOT_CONFIG,
|
|
|
|
INTEGRATIONS,
|
2021-04-28 17:47:38 +02:00
|
|
|
BaseScreenshotConfig,
|
2021-03-29 02:10:56 +02:00
|
|
|
Integration,
|
2020-06-11 00:54:34 +02:00
|
|
|
ScreenshotConfig,
|
|
|
|
WebhookIntegration,
|
|
|
|
get_fixture_and_image_paths,
|
|
|
|
split_fixture_path,
|
|
|
|
)
|
|
|
|
from zerver.lib.storage import static_path
|
2020-04-24 11:30:24 +02:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2020-04-09 19:33:49 +02:00
|
|
|
from zerver.lib.upload import upload_avatar_image
|
|
|
|
from zerver.lib.webhooks.common import get_fixture_http_headers
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models import Message, UserProfile
|
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user_by_delivery_email
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
|
2021-04-28 17:47:38 +02:00
|
|
|
def create_integration_bot(integration: Integration, bot_name: Optional[str] = None) -> UserProfile:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2020-04-09 19:33:49 +02:00
|
|
|
owner = get_user_by_delivery_email("iago@zulip.com", realm)
|
2020-06-09 00:25:09 +02:00
|
|
|
bot_email = f"{integration.name}-bot@example.com"
|
2020-05-01 18:19:35 +02:00
|
|
|
if bot_name is None:
|
2020-06-09 00:25:09 +02:00
|
|
|
bot_name = f"{integration.name.capitalize()} Bot"
|
2020-04-09 19:33:49 +02:00
|
|
|
try:
|
|
|
|
bot = UserProfile.objects.get(email=bot_email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
bot = do_create_user(
|
|
|
|
email=bot_email,
|
|
|
|
password="123",
|
|
|
|
realm=owner.realm,
|
|
|
|
full_name=bot_name,
|
|
|
|
bot_type=UserProfile.INCOMING_WEBHOOK_BOT,
|
|
|
|
bot_owner=owner,
|
2020-06-29 13:12:17 +02:00
|
|
|
acting_user=owner,
|
2020-04-09 19:33:49 +02:00
|
|
|
)
|
|
|
|
notify_created_bot(bot)
|
|
|
|
|
2020-05-15 10:12:26 +02:00
|
|
|
bot_avatar_path = integration.get_bot_avatar_path()
|
|
|
|
if bot_avatar_path is not None:
|
|
|
|
bot_avatar_path = static_path(bot_avatar_path)
|
|
|
|
if os.path.isfile(bot_avatar_path):
|
|
|
|
with open(bot_avatar_path, "rb") as f:
|
|
|
|
upload_avatar_image(f, owner, bot)
|
2020-06-29 12:47:44 +02:00
|
|
|
do_change_avatar_fields(bot, UserProfile.AVATAR_FROM_USER, acting_user=owner)
|
2020-04-09 19:33:49 +02:00
|
|
|
|
|
|
|
return bot
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-28 17:47:38 +02:00
|
|
|
def create_integration_stream(integration: Integration, bot: UserProfile) -> None:
|
2020-04-24 11:30:24 +02:00
|
|
|
assert isinstance(bot.bot_owner, UserProfile)
|
2020-10-13 15:16:27 +02:00
|
|
|
realm = bot.bot_owner.realm
|
|
|
|
stream, created = create_stream_if_needed(realm, integration.stream_name)
|
2021-04-02 18:33:28 +02:00
|
|
|
bulk_add_subscriptions(realm, [stream], [bot, bot.bot_owner], acting_user=bot)
|
2020-04-24 11:30:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-29 02:10:56 +02:00
|
|
|
def get_fixture_info(fixture_path: str) -> Tuple[Any, bool, str]:
|
|
|
|
json_fixture = fixture_path.endswith(".json")
|
|
|
|
_, fixture_name = split_fixture_path(fixture_path)
|
|
|
|
|
|
|
|
if fixture_name:
|
|
|
|
if json_fixture:
|
|
|
|
with open(fixture_path, "rb") as fb:
|
|
|
|
data = orjson.loads(fb.read())
|
|
|
|
else:
|
|
|
|
with open(fixture_path) as f:
|
|
|
|
data = f.read().strip()
|
|
|
|
else:
|
|
|
|
data = ""
|
|
|
|
|
|
|
|
return data, json_fixture, fixture_name
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration(integration_name: str) -> Integration:
|
2020-04-09 19:33:49 +02:00
|
|
|
integration = INTEGRATIONS[integration_name]
|
|
|
|
return integration
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
def get_requests_headers(integration_name: str, fixture_name: str) -> Dict[str, Any]:
|
|
|
|
headers = get_fixture_http_headers(integration_name, fixture_name)
|
|
|
|
|
|
|
|
def fix_name(header: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
header = header if not header.startswith("HTTP_") else header[len("HTTP_") :]
|
|
|
|
return header.replace("_", "-")
|
2020-04-09 19:33:49 +02:00
|
|
|
|
|
|
|
return {fix_name(k): v for k, v in headers.items()}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-17 11:27:53 +02:00
|
|
|
def custom_headers(headers_json: str) -> Dict[str, str]:
|
|
|
|
if not headers_json:
|
|
|
|
return {}
|
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
return orjson.loads(headers_json)
|
2020-08-12 20:23:23 +02:00
|
|
|
except orjson.JSONDecodeError as ve:
|
2020-04-17 11:27:53 +02:00
|
|
|
raise argparse.ArgumentTypeError(
|
2023-08-03 00:28:59 +02:00
|
|
|
f"Encountered an error while attempting to parse custom headers: {ve}\n"
|
|
|
|
"Note: all strings must be enclosed within \"\" instead of ''"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2020-04-17 11:27:53 +02:00
|
|
|
|
2021-03-29 16:35:59 +02:00
|
|
|
def send_bot_mock_message(
|
|
|
|
bot: UserProfile, integration: Integration, fixture_path: str, config: BaseScreenshotConfig
|
|
|
|
) -> None:
|
|
|
|
# Delete all messages, so new message is the only one it's message group
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(realm_id=bot.realm_id, sender=bot).delete()
|
2021-03-29 16:35:59 +02:00
|
|
|
data, _, _ = get_fixture_info(fixture_path)
|
|
|
|
|
|
|
|
assert bot.bot_owner is not None
|
|
|
|
url = f"{bot.bot_owner.realm.uri}"
|
|
|
|
client = zulip.Client(email=bot.email, api_key=bot.api_key, site=url)
|
|
|
|
|
|
|
|
try:
|
|
|
|
request = {
|
|
|
|
"type": "stream",
|
|
|
|
"to": integration.stream_name,
|
|
|
|
"topic": data["subject"],
|
|
|
|
"content": data["body"],
|
|
|
|
}
|
|
|
|
client.send_message(request)
|
|
|
|
except KeyError:
|
|
|
|
print(
|
2023-08-03 00:28:59 +02:00
|
|
|
f"{fixture_path} contains invalid configuration. "
|
|
|
|
'Fields "subject" and "body" are required for non-webhook integrations.'
|
2021-03-29 16:35:59 +02:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_bot_payload_message(
|
|
|
|
bot: UserProfile, integration: WebhookIntegration, fixture_path: str, config: ScreenshotConfig
|
|
|
|
) -> bool:
|
2020-04-17 10:14:13 +02:00
|
|
|
# Delete all messages, so new message is the only one it's message group
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(realm_id=bot.realm_id, sender=bot).delete()
|
2021-03-29 02:10:56 +02:00
|
|
|
data, json_fixture, fixture_name = get_fixture_info(fixture_path)
|
2020-05-14 23:47:26 +02:00
|
|
|
|
2020-04-17 10:14:13 +02:00
|
|
|
headers = get_requests_headers(integration.name, fixture_name)
|
2020-06-13 03:34:01 +02:00
|
|
|
headers.update(config.custom_headers)
|
2020-05-01 08:28:25 +02:00
|
|
|
if config.use_basic_auth:
|
2021-08-02 23:20:39 +02:00
|
|
|
credentials = base64.b64encode(f"{bot.email}:{bot.api_key}".encode()).decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
auth = f"basic {credentials}"
|
2020-05-01 08:28:25 +02:00
|
|
|
headers.update(dict(Authorization=auth))
|
|
|
|
|
2020-05-01 17:55:11 +02:00
|
|
|
assert isinstance(bot.bot_owner, UserProfile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = integration.stream_name or "devel"
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{bot.bot_owner.realm.uri}/{integration.url}"
|
2021-02-12 08:20:45 +01:00
|
|
|
params = {"api_key": bot.api_key, "stream": stream}
|
2020-06-13 03:34:01 +02:00
|
|
|
params.update(config.extra_params)
|
2020-05-14 23:47:26 +02:00
|
|
|
|
|
|
|
extra_args = {}
|
2020-05-15 06:29:51 +02:00
|
|
|
if not json_fixture and data:
|
2020-05-14 23:47:26 +02:00
|
|
|
# We overwrite any params in fixture with our params. stream name, for
|
|
|
|
# example, may be defined in the fixture.
|
2021-03-29 02:10:56 +02:00
|
|
|
assert isinstance(data, str)
|
2020-05-14 23:47:26 +02:00
|
|
|
parsed_params = dict(parse_qsl(data))
|
|
|
|
parsed_params.update(params)
|
|
|
|
params = parsed_params
|
|
|
|
|
|
|
|
elif config.payload_as_query_param:
|
2020-08-07 01:09:47 +02:00
|
|
|
params[config.payload_param_name] = orjson.dumps(data).decode()
|
2020-05-01 17:55:11 +02:00
|
|
|
|
2020-05-14 23:47:26 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_args = {"json": data}
|
2020-05-14 23:47:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
url = f"{url}?{urlencode(params)}"
|
2020-05-14 23:47:26 +02:00
|
|
|
|
2020-04-17 10:56:08 +02:00
|
|
|
try:
|
2020-05-01 17:55:11 +02:00
|
|
|
response = requests.post(url=url, headers=headers, **extra_args)
|
2020-04-17 10:56:08 +02:00
|
|
|
except requests.exceptions.ConnectionError:
|
2021-02-12 08:19:30 +01:00
|
|
|
print(
|
2021-02-12 08:20:45 +01:00
|
|
|
"This tool needs the local dev server to be running. "
|
2023-03-04 02:17:54 +01:00
|
|
|
"Please start it using tools/run-dev before running this tool."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-04-17 10:56:08 +02:00
|
|
|
sys.exit(1)
|
2020-04-17 10:14:13 +02:00
|
|
|
if response.status_code != 200:
|
|
|
|
print(response.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
print("Failed to trigger webhook")
|
2020-04-24 12:21:26 +02:00
|
|
|
return False
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
print(f"Triggered {integration.name} webhook")
|
2020-04-24 12:21:26 +02:00
|
|
|
return True
|
2020-04-17 10:14:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-24 14:48:28 +02:00
|
|
|
def capture_last_message_screenshot(bot: UserProfile, image_path: str) -> None:
|
2023-08-30 21:19:37 +02:00
|
|
|
message = Message.objects.filter(realm_id=bot.realm_id, sender=bot).last()
|
2021-04-16 00:06:18 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-04-17 13:25:05 +02:00
|
|
|
if message is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
print(f"No message found for {bot.full_name}")
|
2020-04-17 13:25:05 +02:00
|
|
|
return
|
|
|
|
message_id = str(message.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
screenshot_script = os.path.join(TOOLS_DIR, "message-screenshot.js")
|
2021-04-16 00:06:18 +02:00
|
|
|
subprocess.check_call(["node", screenshot_script, message_id, image_path, realm.uri])
|
2020-04-17 10:14:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def generate_screenshot_from_config(
|
2021-04-28 17:47:38 +02:00
|
|
|
integration_name: str, screenshot_config: BaseScreenshotConfig
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-04-26 08:35:53 +02:00
|
|
|
integration = get_integration(integration_name)
|
|
|
|
fixture_path, image_path = get_fixture_and_image_paths(integration, screenshot_config)
|
2020-05-01 18:19:35 +02:00
|
|
|
bot = create_integration_bot(integration, screenshot_config.bot_name)
|
2020-04-26 08:35:53 +02:00
|
|
|
create_integration_stream(integration, bot)
|
2021-04-28 17:47:38 +02:00
|
|
|
if isinstance(integration, WebhookIntegration):
|
2021-03-29 16:35:59 +02:00
|
|
|
assert isinstance(
|
|
|
|
screenshot_config, ScreenshotConfig
|
|
|
|
), "Webhook integrations require ScreenshotConfig"
|
2021-04-28 17:47:38 +02:00
|
|
|
message_sent = send_bot_payload_message(bot, integration, fixture_path, screenshot_config)
|
|
|
|
else:
|
2021-03-29 16:35:59 +02:00
|
|
|
send_bot_mock_message(bot, integration, fixture_path, screenshot_config)
|
|
|
|
message_sent = True
|
2020-04-26 08:35:53 +02:00
|
|
|
if message_sent:
|
|
|
|
capture_last_message_screenshot(bot, image_path)
|
2021-02-12 08:20:45 +01:00
|
|
|
print(f"Screenshot captured to: {BOLDRED}{image_path}{ENDC}")
|
2020-04-26 08:35:53 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-09 19:33:49 +02:00
|
|
|
parser = argparse.ArgumentParser()
|
2020-04-26 10:35:13 +02:00
|
|
|
group = parser.add_mutually_exclusive_group(required=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
group.add_argument("--all", action="store_true")
|
2023-07-15 02:19:50 +02:00
|
|
|
group.add_argument(
|
|
|
|
"--skip-until", help="Name of the integration whose predecessor are skipped. Similar to --all"
|
|
|
|
)
|
|
|
|
group.add_argument("--integration", nargs="+", help="Names of the integrations")
|
|
|
|
fixture_group = parser.add_argument_group("integration")
|
2021-02-12 08:20:45 +01:00
|
|
|
parser.add_argument("--fixture", help="Name of the fixture file to use")
|
|
|
|
parser.add_argument("--image-name", help="Name for the screenshot image")
|
|
|
|
parser.add_argument("--image-dir", help="Directory name where to save the screenshot image")
|
|
|
|
parser.add_argument("--bot-name", help="Name to use for the bot")
|
2021-02-12 08:19:30 +01:00
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"-A", "--use-basic-auth", action="store_true", help="Add basic auth headers to the request"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"-Q",
|
|
|
|
"--payload-as-query-param",
|
|
|
|
action="store_true",
|
|
|
|
help="Send payload as query param instead of body",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
parser.add_argument("-P", "--payload-param-name", help="Param name to use for the payload")
|
2021-02-12 08:19:30 +01:00
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"-H",
|
|
|
|
"--custom-headers",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=custom_headers,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Any additional headers to be sent with the request.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-04-09 19:33:49 +02:00
|
|
|
|
2020-04-26 08:35:53 +02:00
|
|
|
options = parser.parse_args()
|
2020-04-17 09:31:49 +02:00
|
|
|
prepare_puppeteer_run()
|
2020-04-26 10:35:13 +02:00
|
|
|
|
|
|
|
if options.all:
|
|
|
|
for key, value in vars(options).items():
|
2021-02-12 08:20:45 +01:00
|
|
|
if key != "all" and value:
|
|
|
|
print("Generating screenshots for all integrations. Ignoring all command-line options")
|
2023-07-15 02:19:50 +02:00
|
|
|
break
|
2020-04-26 10:35:13 +02:00
|
|
|
for integration_name, screenshot_configs in DOC_SCREENSHOT_CONFIG.items():
|
|
|
|
for screenshot_config in screenshot_configs:
|
|
|
|
generate_screenshot_from_config(integration_name, screenshot_config)
|
|
|
|
|
2023-07-15 02:19:50 +02:00
|
|
|
elif options.skip_until:
|
|
|
|
for key, value in vars(options).items():
|
|
|
|
if key != "skip_until" and value:
|
|
|
|
print(
|
|
|
|
f"Generating screenshots for all integrations skipping until {options.skip_until}. Ignoring all command-line options"
|
|
|
|
)
|
|
|
|
break
|
|
|
|
skip = True
|
|
|
|
for integration_name, screenshot_configs in DOC_SCREENSHOT_CONFIG.items():
|
|
|
|
if integration_name == options.skip_until:
|
|
|
|
skip = False
|
|
|
|
if skip:
|
|
|
|
continue
|
|
|
|
for screenshot_config in screenshot_configs:
|
|
|
|
generate_screenshot_from_config(integration_name, screenshot_config)
|
|
|
|
|
2020-05-15 09:13:56 +02:00
|
|
|
elif options.fixture:
|
2023-07-15 02:19:50 +02:00
|
|
|
if len(options.integration) != 1:
|
|
|
|
parser.error(
|
|
|
|
"Exactly one integration should be specified for --integration when --fixture is provided"
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
config = dict(
|
|
|
|
fixture_name=options.fixture,
|
|
|
|
use_basic_auth=options.use_basic_auth,
|
|
|
|
custom_headers=options.custom_headers,
|
|
|
|
payload_as_query_param=options.payload_as_query_param,
|
|
|
|
)
|
2020-04-26 10:35:13 +02:00
|
|
|
if options.image_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
config["image_name"] = options.image_name
|
2020-04-26 10:35:13 +02:00
|
|
|
if options.image_dir:
|
2021-02-12 08:20:45 +01:00
|
|
|
config["image_dir"] = options.image_dir
|
2020-04-26 10:35:13 +02:00
|
|
|
if options.bot_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
config["bot_name"] = options.bot_name
|
2020-04-26 10:35:13 +02:00
|
|
|
if options.payload_param_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
config["payload_param_name"] = options.payload_param_name
|
2020-04-26 10:35:13 +02:00
|
|
|
screenshot_config = ScreenshotConfig(**config)
|
2023-07-15 02:19:50 +02:00
|
|
|
generate_screenshot_from_config(options.integration[0], screenshot_config)
|
|
|
|
|
|
|
|
elif options.integration:
|
|
|
|
for integration in options.integration:
|
|
|
|
assert integration in DOC_SCREENSHOT_CONFIG
|
|
|
|
configs = DOC_SCREENSHOT_CONFIG[integration]
|
|
|
|
for screenshot_config in configs:
|
|
|
|
generate_screenshot_from_config(integration, screenshot_config)
|
2020-05-15 09:13:56 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
parser.error(
|
|
|
|
"Could not find configuration for integration. "
|
|
|
|
"You can specify a fixture file to use, using the --fixture flag. "
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"Or add a configuration to zerver.lib.integrations.DOC_SCREENSHOT_CONFIG",
|
2020-05-15 09:13:56 +02:00
|
|
|
)
|