puppet: Add vector pipelines for other Akamai SQS queues.

This commit is contained in:
Alex Vandiver 2024-01-25 15:42:21 -05:00 committed by Tim Abbott
parent dcae35196c
commit cd565058cd
2 changed files with 31 additions and 24 deletions

View File

@ -7,7 +7,10 @@ class zulip_ops::prometheus::akamai {
$bin = $zulip_ops::vector::bin
$conf = '/etc/vector.toml'
$sqs_url = zulipsecret('secrets', 'akamai_sqs_url', '')
$pipelines = {
'static' => zulipsecret('secrets', 'akamai_static_sqs_url', ''),
'realm' => zulipsecret('secrets', 'akamai_realm_sqs_url', ''),
}
file { $conf:
ensure => file,

View File

@ -1,65 +1,69 @@
[sources.vector_metrics]
type = "internal_metrics"
[sources.s3_akamai_static]
# Akamai Datastream2 logs all accesses into AWS S3:
# https://techdocs.akamai.com/datastream2/docs/stream-amazon-s3
#
# The S3 bucket is configured to send event notifications to the SQS
# queue, which this host is allowed to read from. This consumer
# deletes the messages from the queue, and the S3 bucket is
# configured to purge old logs.
# https://vector.dev/docs/reference/configuration/sources/aws_s3/
# Akamai Datastream2 logs all accesses into AWS S3:
# https://techdocs.akamai.com/datastream2/docs/stream-amazon-s3
#
# The S3 bucket is configured to send event notifications to the SQS
# queue, which this host is allowed to read from. This consumer
# deletes the messages from the queue, and the S3 bucket is
# configured to purge old logs.
# https://vector.dev/docs/reference/configuration/sources/aws_s3/
<% @pipelines.each do |key,sqs_url| %>
[sources.s3_akamai_<%= key %>]
type = "aws_s3"
region = "us-east-1"
compression = "gzip"
sqs.delete_message = true
sqs.poll_secs = 15
sqs.queue_url = "<%= @sqs_url %>"
sqs.queue_url = "<%= sqs_url %>"
[transforms.parsing]
[transforms.parse_<%= key %>]
type = "remap"
inputs = ["s3_akamai_static"]
inputs = ["s3_akamai_<%= key %>"]
source = '''
. = parse_json!(string!(.message))
.turnAroundTimeSec = to_int!(.turnAroundTimeMSec) / 1000.0
'''
[transforms.logs2metrics-requests]
[transforms.logs2metrics_<%= key %>]
type = "log_to_metric"
inputs = ["parsing"]
inputs = ["parse_<%= key %>"]
[[transforms.logs2metrics-requests.metrics]]
[[transforms.logs2metrics_<%= key %>.metrics]]
field = "cacheStatus"
name = "requests_cache_count"
namespace = "akamai_static"
namespace = "akamai_<%= key %>"
type = "counter"
[transforms.logs2metrics-requests.metrics.tags]
[transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}"
cached = "{{cacheStatus}}"
host = "{{reqHost}}"
[[transforms.logs2metrics-requests.metrics]]
[[transforms.logs2metrics_<%= key %>.metrics]]
field = "bytes"
name = "requests_bytes"
namespace = "akamai_static"
namespace = "akamai_<%= key %>"
type = "counter"
increment_by_value = true
[transforms.logs2metrics-requests.metrics.tags]
[transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}"
cached = "{{cacheStatus}}"
host = "{{reqHost}}"
[[transforms.logs2metrics-requests.metrics]]
[[transforms.logs2metrics_<%= key %>.metrics]]
field = "turnAroundTimeSec"
name = "turnaround_time_sec"
namespace = "akamai_static"
namespace = "akamai_<%= key %>"
type = "histogram"
[transforms.logs2metrics-requests.metrics.tags]
[transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}"
cached = "{{cacheStatus}}"
host = "{{reqHost}}"
<% end %>
[sinks.prometheus_exporter]
type = "prometheus_exporter"
inputs = ["vector_metrics", "logs2metrics*"]