-
Notifications
You must be signed in to change notification settings - Fork 22
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
use base otel with config via env (#285)
this allows us to avoid building a new image to include the otel config file. seeing metrics in my dev env ![Screenshot 2024-04-18 at 9 36 09 PM](https://github.com/chainguard-dev/terraform-infra-common/assets/16194785/088eaa54-3b60-4281-9d49-9a0599d0196e) Signed-off-by: Kenny Leung <kleung@chainguard.dev>
- Loading branch information
Showing
4 changed files
with
113 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
receivers: | ||
prometheus: | ||
config: | ||
scrape_configs: | ||
- job_name: "localhost" | ||
scrape_interval: 10s | ||
static_configs: | ||
# TODO: make this configurable | ||
- targets: ["localhost:2112"] | ||
# Do not relabel job and instance labels if existed. | ||
honor_labels: true | ||
metric_relabel_configs: | ||
- source_labels: [ __name__ ] | ||
regex: '^prometheus_.*' | ||
action: drop | ||
- source_labels: [ __name__ ] | ||
regex: '^process_.*' | ||
action: drop | ||
- source_labels: [ __name__ ] | ||
regex: '^go_.*' | ||
action: drop | ||
|
||
processors: | ||
batch: | ||
# batch metrics before sending to reduce API usage | ||
send_batch_max_size: 200 | ||
send_batch_size: 200 | ||
timeout: 5s | ||
|
||
memory_limiter: | ||
# drop metrics if memory usage gets too high | ||
check_interval: 1s | ||
limit_percentage: 65 | ||
spike_limit_percentage: 20 | ||
|
||
# automatically detect Cloud Run resource metadata | ||
resourcedetection: | ||
detectors: [env, gcp] | ||
|
||
resource: | ||
attributes: | ||
# Add instance_id as a resource attribute, so to avoid race conditions | ||
# between multiple otel sidecar instance uploading overlapping time series | ||
# to the same buckets. | ||
- key: service.instance.id | ||
from_attribute: faas.id | ||
action: upsert | ||
# The `gcp` resourcedetection processor sets `faas.name` to the name of the | ||
# Cloud Run service or the Cloud Run job. | ||
- from_attribute: faas.name | ||
# The googlemanagedprometheus exporter consumes `service.name` attribute | ||
# and set the `job` resource label to this value. (See | ||
# https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/pull/764) | ||
key: "service.name" | ||
action: upsert | ||
|
||
exporters: | ||
googlemanagedprometheus: | ||
sending_queue: | ||
enabled: true | ||
# we are handling metrics for a single pod, no need to have | ||
# too many senders. this will also avoid out-of-order data. | ||
num_consumers: 1 | ||
|
||
extensions: | ||
health_check: | ||
|
||
service: | ||
telemetry: | ||
logs: | ||
# We don't want to see scraper startup logging every | ||
# cold start. | ||
level: "error" | ||
# Stack trace is less useful and break lines. | ||
disable_stacktrace: true | ||
encoding: json | ||
|
||
extensions: [health_check] | ||
pipelines: | ||
metrics: | ||
receivers: [prometheus] | ||
processors: [batch, memory_limiter, resourcedetection, resource] | ||
exporters: [googlemanagedprometheus] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters