kandra: Use vector to plumb SES logs into S3.

This commit is contained in:
Alex Vandiver 2024-09-24 18:00:22 -04:00 committed by Tim Abbott
parent 60759ab5fb
commit 0c7d83f7da
3 changed files with 40 additions and 0 deletions

View File

@ -12,6 +12,9 @@ class kandra::profile::prometheus_server inherits kandra::profile::base {
# Ditto the Akamai logs
include kandra::prometheus::akamai
# The SES log ETL (writing to S3) runs on vector
include kandra::ses_logs
# Export prometheus stats to status.zulip.com
include kandra::statuspage

View File

@ -0,0 +1,12 @@
class kandra::ses_logs {
include kandra::vector
$ses_logs_sqs_url = zulipsecret('secrets', 'ses_logs_sqs_url', '')
$ses_logs_s3_bucket = zulipsecret('secrets', 'ses_logs_s3_bucket', '')
concat::fragment { 'vector_ses_logs':
target => $kandra::vector::conf,
order => '50',
content => template('kandra/vector_ses.toml.template.erb'),
}
}

View File

@ -0,0 +1,25 @@
# SES writes all of its logs to a single SQS queue; we consume them,
# batch them, and write them to files in S3 for later analysis.
[sources.ses_logs_sqs]
type = "aws_sqs"
queue_url = "<%= @ses_logs_sqs_url %>"
[transforms.extract_ses_message]
type = "remap"
inputs = ["ses_logs_sqs"]
# SES puts its JSON into the text .Message field of the SQS JSON
# event, which itself is serialized before we get it
source = '''
. = parse_json!(string!(.message)).Message
'''
[sinks.ses_logs_s3]
type = "aws_s3"
inputs = ["extract_ses_message"]
bucket = "<%= @ses_logs_s3_bucket %>"
compression = "gzip"
batch.max_bytes = 10000000 # 100k, before compression
batch.timeout_secs = 300 # ..or 5min, whichever is first
encoding.codec = "text"
key_prefix = "%F/"
storage_class = "STANDARD_IA"