puppet: Add vector pipelines for other Akamai SQS queues.

This commit is contained in:
Alex Vandiver 2024-01-25 15:42:21 -05:00 committed by Tim Abbott
parent dcae35196c
commit cd565058cd
2 changed files with 31 additions and 24 deletions

View File

@ -7,7 +7,10 @@ class zulip_ops::prometheus::akamai {
$bin = $zulip_ops::vector::bin $bin = $zulip_ops::vector::bin
$conf = '/etc/vector.toml' $conf = '/etc/vector.toml'
$sqs_url = zulipsecret('secrets', 'akamai_sqs_url', '') $pipelines = {
'static' => zulipsecret('secrets', 'akamai_static_sqs_url', ''),
'realm' => zulipsecret('secrets', 'akamai_realm_sqs_url', ''),
}
file { $conf: file { $conf:
ensure => file, ensure => file,

View File

@ -1,65 +1,69 @@
[sources.vector_metrics] [sources.vector_metrics]
type = "internal_metrics" type = "internal_metrics"
[sources.s3_akamai_static] # Akamai Datastream2 logs all accesses into AWS S3:
# Akamai Datastream2 logs all accesses into AWS S3: # https://techdocs.akamai.com/datastream2/docs/stream-amazon-s3
# https://techdocs.akamai.com/datastream2/docs/stream-amazon-s3 #
# # The S3 bucket is configured to send event notifications to the SQS
# The S3 bucket is configured to send event notifications to the SQS # queue, which this host is allowed to read from. This consumer
# queue, which this host is allowed to read from. This consumer # deletes the messages from the queue, and the S3 bucket is
# deletes the messages from the queue, and the S3 bucket is # configured to purge old logs.
# configured to purge old logs. # https://vector.dev/docs/reference/configuration/sources/aws_s3/
# https://vector.dev/docs/reference/configuration/sources/aws_s3/
<% @pipelines.each do |key,sqs_url| %>
[sources.s3_akamai_<%= key %>]
type = "aws_s3" type = "aws_s3"
region = "us-east-1" region = "us-east-1"
compression = "gzip" compression = "gzip"
sqs.delete_message = true sqs.delete_message = true
sqs.poll_secs = 15 sqs.poll_secs = 15
sqs.queue_url = "<%= @sqs_url %>" sqs.queue_url = "<%= sqs_url %>"
[transforms.parsing] [transforms.parse_<%= key %>]
type = "remap" type = "remap"
inputs = ["s3_akamai_static"] inputs = ["s3_akamai_<%= key %>"]
source = ''' source = '''
. = parse_json!(string!(.message)) . = parse_json!(string!(.message))
.turnAroundTimeSec = to_int!(.turnAroundTimeMSec) / 1000.0 .turnAroundTimeSec = to_int!(.turnAroundTimeMSec) / 1000.0
''' '''
[transforms.logs2metrics-requests] [transforms.logs2metrics_<%= key %>]
type = "log_to_metric" type = "log_to_metric"
inputs = ["parsing"] inputs = ["parse_<%= key %>"]
[[transforms.logs2metrics-requests.metrics]] [[transforms.logs2metrics_<%= key %>.metrics]]
field = "cacheStatus" field = "cacheStatus"
name = "requests_cache_count" name = "requests_cache_count"
namespace = "akamai_static" namespace = "akamai_<%= key %>"
type = "counter" type = "counter"
[transforms.logs2metrics-requests.metrics.tags] [transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}" status_code = "{{statusCode}}"
cached = "{{cacheStatus}}" cached = "{{cacheStatus}}"
host = "{{reqHost}}" host = "{{reqHost}}"
[[transforms.logs2metrics-requests.metrics]] [[transforms.logs2metrics_<%= key %>.metrics]]
field = "bytes" field = "bytes"
name = "requests_bytes" name = "requests_bytes"
namespace = "akamai_static" namespace = "akamai_<%= key %>"
type = "counter" type = "counter"
increment_by_value = true increment_by_value = true
[transforms.logs2metrics-requests.metrics.tags] [transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}" status_code = "{{statusCode}}"
cached = "{{cacheStatus}}" cached = "{{cacheStatus}}"
host = "{{reqHost}}" host = "{{reqHost}}"
[[transforms.logs2metrics-requests.metrics]] [[transforms.logs2metrics_<%= key %>.metrics]]
field = "turnAroundTimeSec" field = "turnAroundTimeSec"
name = "turnaround_time_sec" name = "turnaround_time_sec"
namespace = "akamai_static" namespace = "akamai_<%= key %>"
type = "histogram" type = "histogram"
[transforms.logs2metrics-requests.metrics.tags] [transforms.logs2metrics_<%= key %>.metrics.tags]
status_code = "{{statusCode}}" status_code = "{{statusCode}}"
cached = "{{cacheStatus}}" cached = "{{cacheStatus}}"
host = "{{reqHost}}" host = "{{reqHost}}"
<% end %>
[sinks.prometheus_exporter] [sinks.prometheus_exporter]
type = "prometheus_exporter" type = "prometheus_exporter"
inputs = ["vector_metrics", "logs2metrics*"] inputs = ["vector_metrics", "logs2metrics*"]