chore: mirror k8s-monitoring-2.0.12
upstream_repo:
This commit is contained in:
@ -0,0 +1,26 @@
|
||||
{{/* Returns an alloy-formatted array of destination targets given the name */}}
|
||||
{{/* Inputs: destinations (array of destination definition), names ([]string), type (string) ecosystem (string) */}}
|
||||
{{- define "destinations.alloy.targets" -}}
|
||||
{{- range $destination := .destinations }}
|
||||
{{- if (has $destination.name $.names ) }}
|
||||
{{- if eq (include (printf "destinations.%s.supports_%s" $destination.type $.type) $destination) "true" }}
|
||||
{{ include (printf "destinations.%s.alloy.%s.%s.target" $destination.type $.ecosystem $.type) $destination | trim }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/* Adds the Alloy components for destinations */}}
|
||||
{{/*Inputs: destinations (array of destination definition), names([]string) clusterName (string), Release (Release object) Chart (Chart object) Files (Files object) */}}
|
||||
{{- define "destinations.alloy.config" }}
|
||||
{{- range $destination := .Values.destinations }}
|
||||
{{- if (has $destination.name $.names ) }}
|
||||
// Destination: {{ $destination.name }} ({{ $destination.type }})
|
||||
{{- include (printf "destinations.%s.alloy" $destination.type) (deepCopy $ | merge (dict "destination" $destination)) | indent 0 }}
|
||||
|
||||
{{- if eq (include "secrets.usesKubernetesSecret" $destination) "true" }}
|
||||
{{- include "secret.alloy" (deepCopy $ | merge (dict "object" $destination)) | nindent 0 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
@ -0,0 +1,40 @@
|
||||
{{/* Inputs: destinations (array of destination names), type (string), feature (string) */}}
|
||||
{{- define "destinations.validate_destination_list" -}}
|
||||
{{- if empty .destinations }}
|
||||
{{- $msg := list "" (printf "No destinations found that can accept %s from %s" .type .feature) }}
|
||||
{{- $msg = append $msg (printf "Please add a destination with %s support." .type) }}
|
||||
{{- $msg = append $msg "See https://github.com/grafana/k8s-monitoring-helm/blob/main/charts/k8s-monitoring/docs/destinations/README.md for more details." }}
|
||||
{{- fail (join "\n" $msg) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/* Inputs: destinations (array of destination definition), type (string), ecosystem (string), filter (list of destination names) */}}
|
||||
{{- define "destinations.get" -}}
|
||||
{{- $destinations := list }}
|
||||
{{- $backupDestinations := list }}
|
||||
{{- range $destination := .destinations }}
|
||||
{{- /* Does this destination support the telemetry data type? */}}
|
||||
{{- if eq (include (printf "destinations.%s.supports_%s" $destination.type $.type) $destination) "true" }}
|
||||
{{- if empty $.filter }}
|
||||
{{- /* Is this destination in the ecosystem? */}}
|
||||
{{- if eq $.ecosystem (include (printf "destinations.%s.ecosystem" $destination.type) .) }}
|
||||
{{- $destinations = append $destinations $destination.name }}
|
||||
{{- else }}
|
||||
{{- $backupDestinations = append $backupDestinations $destination.name }}
|
||||
{{- end }}
|
||||
|
||||
{{- /* Did the data source choose this destination? */}}
|
||||
{{- else if has $destination.name $.filter }}
|
||||
{{- $destinations = append $destinations $destination.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if not (empty $destinations) }}
|
||||
{{- $destinations | toYaml | indent 0 }}
|
||||
{{- end }}
|
||||
{{- /* Output non-ecosystem matching destinations if no ecosystem destinations are found */}}
|
||||
{{- if and (empty $destinations) (not (empty $backupDestinations)) }}
|
||||
{{- $backupDestinations | toYaml | indent 0 }}
|
||||
{{- end }}
|
||||
|
||||
{{- end }}
|
@ -0,0 +1,148 @@
|
||||
{{- define "destinations.loki.alloy" }}
|
||||
{{- $defaultValues := "destinations/loki-values.yaml" | .Files.Get | fromYaml }}
|
||||
{{- with merge .destination $defaultValues }}
|
||||
otelcol.exporter.loki {{ include "helper.alloy_name" .name | quote }} {
|
||||
forward_to = [{{ include "destinations.loki.alloy.loki.logs.target" . }}]
|
||||
}
|
||||
{{- if .logProcessingStages }}
|
||||
|
||||
loki.process {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{ .logProcessingStages | indent 2 }}
|
||||
forward_to = [loki.write.{{ include "helper.alloy_name" .name }}.receiver]
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
loki.write {{ include "helper.alloy_name" .name | quote }} {
|
||||
endpoint {
|
||||
{{- if .urlFrom }}
|
||||
url = {{ .urlFrom }}
|
||||
{{- else }}
|
||||
url = {{ .url | quote }}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "tenantId")) "true" }}
|
||||
tenant_id = {{ include "secrets.read" (dict "object" . "key" "tenantId" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if or .extraHeaders .extraHeadersFrom }}
|
||||
headers = {
|
||||
{{- range $key, $value := .extraHeaders }}
|
||||
{{ $key | quote }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeadersFrom }}
|
||||
{{ $key | quote }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .proxyURL }}
|
||||
proxy_url = {{ .proxyURL | quote }}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.authType" .) "basic" }}
|
||||
basic_auth {
|
||||
username = {{ include "secrets.read" (dict "object" . "key" "auth.username" "nonsensitive" true) }}
|
||||
password = {{ include "secrets.read" (dict "object" . "key" "auth.password") }}
|
||||
}
|
||||
{{- else if eq (include "secrets.authType" .) "bearerToken" }}
|
||||
{{- if .auth.bearerTokenFile }}
|
||||
bearer_token_file = {{ .auth.bearerTokenFile | quote }}
|
||||
{{- else }}
|
||||
bearer_token = {{ include "secrets.read" (dict "object" . "key" "auth.bearerToken") }}
|
||||
{{- end }}
|
||||
{{- else if eq (include "secrets.authType" .) "oauth2" }}
|
||||
oauth2 {
|
||||
client_id = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientId" "nonsensitive" true) }}
|
||||
{{- if eq .auth.oauth2.clientSecretFile "" }}
|
||||
client_secret = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientSecret") }}
|
||||
{{- else }}
|
||||
client_secret_file = {{ .auth.oauth2.clientSecretFile | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.endpointParams }}
|
||||
endpoint_params = {
|
||||
{{- range $k, $v := .auth.oauth2.endpointParams }}
|
||||
{{ $k }} = {{ $v | toJson }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyURL }}
|
||||
proxy_url = {{ .auth.oauth2.proxyURL | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.noProxy }}
|
||||
no_proxy = {{ .auth.oauth2.noProxy | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyFromEnvironment }}
|
||||
proxyFromEnvironment = {{ .auth.oauth2.proxyFromEnvironment }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyConnectHeader }}
|
||||
proxy_connect_header = {{ .auth.oauth2.proxyConnectHeader | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.scopes }}
|
||||
scopes = {{ .auth.oauth2.scopes | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.tokenURL }}
|
||||
token_url = {{ .auth.oauth2.tokenURL | quote }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .tls }}
|
||||
tls_config {
|
||||
insecure_skip_verify = {{ .tls.insecureSkipVerify | default false }}
|
||||
{{- if .tls.caFile }}
|
||||
ca_file = {{ .tls.caFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.ca")) "true" }}
|
||||
ca_pem = {{ include "secrets.read" (dict "object" . "key" "tls.ca" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.certFile }}
|
||||
cert_file = {{ .tls.certFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.cert")) "true" }}
|
||||
cert_pem = {{ include "secrets.read" (dict "object" . "key" "tls.cert" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.keyFile }}
|
||||
key_file = {{ .tls.keyFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.key")) "true" }}
|
||||
key_pem = {{ include "secrets.read" (dict "object" . "key" "tls.key") }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
}
|
||||
external_labels = {
|
||||
cluster = {{ $.Values.cluster.name | quote }},
|
||||
"k8s_cluster_name" = {{ $.Values.cluster.name | quote }},
|
||||
{{- if .extraLabels }}
|
||||
{{- range $k, $v := .extraLabels }}
|
||||
{{ $k }} = {{ $v | quote }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .extraLabelsFrom }}
|
||||
{{- range $k, $v := .extraLabelsFrom }}
|
||||
{{ $k }} = {{ $v }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "secrets.list.loki" -}}
|
||||
- tenantId
|
||||
- auth.username
|
||||
- auth.password
|
||||
- auth.bearerToken
|
||||
- auth.oauth2.clientId
|
||||
- auth.oauth2.clientSecret
|
||||
- tls.ca
|
||||
- tls.cert
|
||||
- tls.key
|
||||
{{- end -}}
|
||||
|
||||
{{- define "destinations.loki.alloy.loki.logs.target" }}
|
||||
{{- if .logProcessingStages -}}
|
||||
loki.process.{{ include "helper.alloy_name" .name }}.receiver
|
||||
{{- else -}}
|
||||
loki.write.{{ include "helper.alloy_name" .name }}.receiver
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- define "destinations.loki.alloy.otlp.logs.target" }}otelcol.exporter.loki.{{ include "helper.alloy_name" .name }}.input{{ end -}}
|
||||
|
||||
{{- define "destinations.loki.supports_metrics" }}false{{ end -}}
|
||||
{{- define "destinations.loki.supports_logs" }}true{{ end -}}
|
||||
{{- define "destinations.loki.supports_traces" }}false{{ end -}}
|
||||
{{- define "destinations.loki.supports_profiles" }}false{{ end -}}
|
||||
{{- define "destinations.loki.ecosystem" }}loki{{ end -}}
|
@ -0,0 +1,422 @@
|
||||
{{- define "destinations.otlp.alloy" }}
|
||||
{{- $defaultValues := "destinations/otlp-values.yaml" | .Files.Get | fromYaml }}
|
||||
{{- with merge .destination $defaultValues }}
|
||||
{{- if eq (include "destinations.otlp.supports_metrics" .) "true" }}
|
||||
otelcol.receiver.prometheus {{ include "helper.alloy_name" .name | quote }} {
|
||||
output {
|
||||
metrics = [{{ include "destinations.otlp.alloy.otlp.metrics.target" . | trim }}]
|
||||
}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if eq (include "destinations.otlp.supports_logs" .) "true" }}
|
||||
otelcol.receiver.loki {{ include "helper.alloy_name" .name | quote }} {
|
||||
output {
|
||||
logs = [{{ include "destinations.otlp.alloy.otlp.logs.target" . | trim }}]
|
||||
}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.authType" .) "basic" }}
|
||||
otelcol.auth.basic {{ include "helper.alloy_name" .name | quote }} {
|
||||
username = {{ include "secrets.read" (dict "object" . "key" "auth.username" "nonsensitive" true) }}
|
||||
password = {{ include "secrets.read" (dict "object" . "key" "auth.password") }}
|
||||
}
|
||||
{{- else if eq (include "secrets.authType" .) "bearerToken" }}
|
||||
{{- if .auth.bearerTokenFile }}
|
||||
local.file {{ include "helper.alloy_name" .name | quote }} {
|
||||
filename = {{ .auth.bearerTokenFile | quote }}
|
||||
}
|
||||
otelcol.auth.bearer {{ include "helper.alloy_name" .name | quote }} {
|
||||
token = local.file.{{ include "helper.alloy_name" .name }}.content
|
||||
}
|
||||
{{- else }}
|
||||
otelcol.auth.bearer {{ include "helper.alloy_name" .name | quote }} {
|
||||
token = {{ include "secrets.read" (dict "object" . "key" "auth.bearerToken") }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- else if eq (include "secrets.authType" .) "oauth2" }}
|
||||
otelcol.auth.oauth2 {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "auth.oauth2.clientId")) "true" }}
|
||||
client_id = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientId" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.clientSecretFile }}
|
||||
client_secret_file = {{ .auth.oauth2.clientSecretFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "auth.oauth2.clientSecret")) "true" }}
|
||||
client_secret = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientSecret") }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.endpointParams }}
|
||||
endpoint_params = {
|
||||
{{- range $k, $v := .auth.oauth2.endpointParams }}
|
||||
{{ $k }} = {{ $v | toJson }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.scopes }}
|
||||
scopes = {{ .auth.oauth2.scopes | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.tokenURL }}
|
||||
token_url = {{ .auth.oauth2.tokenURL | quote }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
otelcol.processor.attributes {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{- range $action := .processors.attributes.actions }}
|
||||
action {
|
||||
key = {{ $action.key | quote }}
|
||||
action = {{ $action.action | quote }}
|
||||
{{- if $action.value }}
|
||||
value = {{ $action.value | quote }}
|
||||
{{- else if $action.valueFrom }}
|
||||
value = {{ $action.valueFrom }}
|
||||
{{- end }}
|
||||
{{- if $action.pattern }}
|
||||
pattern = {{ $action.pattern | quote }}
|
||||
{{- end }}
|
||||
{{- if $action.fromAttribute }}
|
||||
from_attribute = {{ $action.fromAttribute | quote }}
|
||||
{{- end }}
|
||||
{{- if $action.fromContext }}
|
||||
from_context = {{ $action.fromContext | quote }}
|
||||
{{- end }}
|
||||
{{- if $action.convertedType }}
|
||||
converted_type = {{ $action.convertedType | quote }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
output {
|
||||
metrics = [otelcol.processor.transform.{{ include "helper.alloy_name" .name }}.input]
|
||||
logs = [otelcol.processor.transform.{{ include "helper.alloy_name" .name }}.input]
|
||||
traces = [otelcol.processor.transform.{{ include "helper.alloy_name" .name }}.input]
|
||||
}
|
||||
}
|
||||
|
||||
otelcol.processor.transform {{ include "helper.alloy_name" .name | quote }} {
|
||||
error_mode = "ignore"
|
||||
{{- if ne .metrics.enabled false }}
|
||||
metric_statements {
|
||||
context = "resource"
|
||||
statements = [
|
||||
`set(attributes["cluster"], {{ $.Values.cluster.name | quote }})`,
|
||||
`set(attributes["k8s.cluster.name"], {{ $.Values.cluster.name | quote }})`,
|
||||
{{- if .processors.transform.metrics.resource }}
|
||||
{{- range $transform := .processors.transform.metrics.resource }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
|
||||
{{- if .processors.transform.metrics.metric }}
|
||||
metric_statements {
|
||||
context = "metric"
|
||||
statements = [
|
||||
|
||||
{{- range $transform := .processors.transform.metrics.metric }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
metric_statements {
|
||||
context = "datapoint"
|
||||
statements = [
|
||||
`set(attributes["cluster"], {{ $.Values.cluster.name | quote }})`,
|
||||
`set(attributes["k8s.cluster.name"], {{ $.Values.cluster.name | quote }})`,
|
||||
{{- if .processors.transform.metrics.datapoint }}
|
||||
{{- range $transform := .processors.transform.metrics.datapoint }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
|
||||
{{- end }}
|
||||
{{- if ne .logs.enabled false }}
|
||||
log_statements {
|
||||
context = "resource"
|
||||
statements = [
|
||||
`set(attributes["cluster"], {{ $.Values.cluster.name | quote }})`,
|
||||
`set(attributes["k8s.cluster.name"], {{ $.Values.cluster.name | quote }})`,
|
||||
{{- if .processors.transform.logs.resource }}
|
||||
{{- range $transform := .processors.transform.logs.resource }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
|
||||
log_statements {
|
||||
context = "log"
|
||||
statements = [
|
||||
`delete_key(attributes, "loki.attribute.labels")`,
|
||||
`set(resource.attributes["service.name"], attributes["service_name"]) where resource.attributes["service.name"] == nil and attributes["service_name"] != nil`,
|
||||
`delete_key(attributes, "service_name") where attributes["service_name"] != nil`,
|
||||
`set(resource.attributes["service.namespace"], attributes["service_namespace"] ) where resource.attributes["service.namespace"] == nil and attributes["service_namespace"] != nil`,
|
||||
`delete_key(attributes, "service_namespace") where attributes["service_namespace"] != nil`,
|
||||
`set(resource.attributes["deployment.environment.name"], attributes["deployment_environment_name"] ) where resource.attributes["deployment.environment.name"] == nil and attributes["deployment_environment_name"] != nil`,
|
||||
`delete_key(attributes, "deployment_environment_name") where attributes["deployment_environment_name"] != nil`,
|
||||
`set(resource.attributes["deployment.environment"], attributes["deployment_environment"] ) where resource.attributes["deployment.environment"] == nil and attributes["deployment_environment"] != nil`,
|
||||
`delete_key(attributes, "deployment_environment") where attributes["deployment_environment"] != nil`,
|
||||
{{- if .processors.transform.logs.log }}
|
||||
{{- range $transform := .processors.transform.logs.log }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- if .processors.transform.logs.scope }}
|
||||
log_statements {
|
||||
context = "scope"
|
||||
statements = [
|
||||
{{- range $transform := .processors.transform.logs.scope }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if ne .traces.enabled false }}
|
||||
|
||||
trace_statements {
|
||||
context = "resource"
|
||||
statements = [
|
||||
`set(attributes["cluster"], {{ $.Values.cluster.name | quote }})`,
|
||||
`set(attributes["k8s.cluster.name"], {{ $.Values.cluster.name | quote }})`,
|
||||
{{- if .processors.transform.traces.resource }}
|
||||
{{- range $transform := .processors.transform.traces.resource }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- if .processors.transform.traces.span }}
|
||||
trace_statements {
|
||||
context = "span"
|
||||
statements = [
|
||||
{{- range $transform := .processors.transform.traces.span }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .processors.transform.traces.spanevent }}
|
||||
trace_statements {
|
||||
context = "spanevent"
|
||||
statements = [
|
||||
{{- range $transform := .processors.transform.traces.spanevent }}
|
||||
{{ $transform | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .processors.filters.enabled }}
|
||||
|
||||
output {
|
||||
{{- if ne .metrics.enabled false }}
|
||||
metrics = [otelcol.processor.filter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .logs.enabled false }}
|
||||
logs = [otelcol.processor.filter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .traces.enabled false }}
|
||||
traces = [otelcol.processor.filter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
|
||||
otelcol.processor.filter {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{- if and .metrics.enabled (or .processors.filters.metrics.metric .processors.filters.metrics.datapoint) }}
|
||||
metrics {
|
||||
{{- if .processors.filters.metrics.metric }}
|
||||
metric = [
|
||||
{{- range $filter := .processors.filters.metrics.metric }}
|
||||
{{ $filter | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
{{- end }}
|
||||
{{- if .processors.filters.metrics.datapoint }}
|
||||
datapoint = [
|
||||
{{- range $filter := .processors.filters.metrics.datapoint }}
|
||||
{{ $filter | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if and .logs.enabled .processors.filters.logs.log_record }}
|
||||
logs {
|
||||
log_record = [
|
||||
{{- range $filter := .processors.filters.logs.log_record }}
|
||||
{{ $filter | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
}
|
||||
{{- end }}
|
||||
{{- if and .traces.enabled (or .processors.filters.traces.span .processors.filters.traces.spanevent) }}
|
||||
traces {
|
||||
{{- if .processors.filters.traces.span }}
|
||||
span = [
|
||||
{{- range $filter := .processors.filters.traces.span }}
|
||||
{{ $filter | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
{{- end }}
|
||||
{{- if .processors.filters.traces.spanevent }}
|
||||
spanevent = [
|
||||
{{- range $filter := .processors.filters.traces.spanevent }}
|
||||
{{ $filter | quote | indent 6 }},
|
||||
{{- end }}
|
||||
]
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .processors.batch.enabled }}
|
||||
|
||||
output {
|
||||
{{- if ne .metrics.enabled false }}
|
||||
metrics = [otelcol.processor.batch.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .logs.enabled false }}
|
||||
logs = [otelcol.processor.batch.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .traces.enabled false }}
|
||||
traces = [otelcol.processor.batch.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
|
||||
otelcol.processor.batch {{ include "helper.alloy_name" .name | quote }} {
|
||||
timeout = {{ .processors.batch.timeout | quote }}
|
||||
send_batch_size = {{ .processors.batch.size | int }}
|
||||
send_batch_max_size = {{ .processors.batch.maxSize | int }}
|
||||
{{- end }}
|
||||
{{- if .processors.memoryLimiter.enabled }}
|
||||
|
||||
output {
|
||||
{{- if ne .metrics.enabled false }}
|
||||
metrics = [otelcol.processor.memory_limiter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .logs.enabled false }}
|
||||
logs = [otelcol.processor.memory_limiter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
{{- if ne .traces.enabled false }}
|
||||
traces = [otelcol.processor.memory_limiter.{{ include "helper.alloy_name" .name }}.input]
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
|
||||
otelcol.processor.memory_limiter {{ include "helper.alloy_name" .name | quote }} {
|
||||
check_interval = {{ .processors.memoryLimiter.checkInterval | quote }}
|
||||
limit = {{ .processors.memoryLimiter.limit | quote }}
|
||||
{{- end }}
|
||||
|
||||
output {
|
||||
{{- $target := "" }}
|
||||
{{- if eq .protocol "grpc" }}
|
||||
{{- $target = printf "otelcol.exporter.otlp.%s.input" (include "helper.alloy_name" .name) }}
|
||||
{{- else if eq .protocol "http" }}
|
||||
{{- $target = printf "otelcol.exporter.otlphttp.%s.input" (include "helper.alloy_name" .name) }}
|
||||
{{- end }}
|
||||
{{- if ne .metrics.enabled false }}
|
||||
metrics = [{{ $target }}]
|
||||
{{- end }}
|
||||
{{- if ne .logs.enabled false }}
|
||||
logs = [{{ $target }}]
|
||||
{{- end }}
|
||||
{{- if ne .traces.enabled false }}
|
||||
traces = [{{ $target }}]
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
|
||||
{{- if eq .protocol "grpc" }}
|
||||
otelcol.exporter.otlp {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{- else if eq .protocol "http" }}
|
||||
otelcol.exporter.otlphttp {{ include "helper.alloy_name" .name | quote }} {
|
||||
{{- end }}
|
||||
client {
|
||||
{{- if .urlFrom }}
|
||||
endpoint = {{ .urlFrom }}
|
||||
{{- else }}
|
||||
endpoint = {{ .url | quote }}
|
||||
{{- end }}
|
||||
{{- if eq .auth.type "basic" }}
|
||||
auth = otelcol.auth.basic.{{ include "helper.alloy_name" .name }}.handler
|
||||
{{- else if eq .auth.type "bearerToken" }}
|
||||
auth = otelcol.auth.bearer.{{ include "helper.alloy_name" .name }}.handler
|
||||
{{- else if eq .auth.type "oauth2" }}
|
||||
auth = otelcol.auth.oauth2.{{ include "helper.alloy_name" .name }}.handler
|
||||
{{- end }}
|
||||
{{- if or (eq (include "secrets.usesSecret" (dict "object" . "key" "tenantId")) "true") .extraHeaders .extraHeadersFrom }}
|
||||
headers = {
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "tenantId")) "true" }}
|
||||
"X-Scope-OrgID" = {{ include "secrets.read" (dict "object" . "key" "tenantId" "nonsensitive" true) }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeaders }}
|
||||
{{ $key | quote }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeadersFrom }}
|
||||
{{ $key | quote }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .readBufferSize }}
|
||||
read_buffer_size = {{ .readBufferSize | quote }}
|
||||
{{- end }}
|
||||
{{- if .writeBufferSize }}
|
||||
write_buffer_size = {{ .writeBufferSize | quote }}
|
||||
{{- end }}
|
||||
|
||||
{{- if .tls }}
|
||||
tls {
|
||||
insecure = {{ .tls.insecure | default false }}
|
||||
insecure_skip_verify = {{ .tls.insecureSkipVerify | default false }}
|
||||
{{- if .tls.caFile }}
|
||||
ca_file = {{ .tls.caFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.ca")) "true" }}
|
||||
ca_pem = {{ include "secrets.read" (dict "object" . "key" "tls.ca" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.certFile }}
|
||||
cert_file = {{ .tls.certFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.cert")) "true" }}
|
||||
cert_pem = {{ include "secrets.read" (dict "object" . "key" "tls.cert" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.keyFile }}
|
||||
key_file = {{ .tls.keyFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.key")) "true" }}
|
||||
key_pem = {{ include "secrets.read" (dict "object" . "key" "tls.key") }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "secrets.list.otlp" -}}
|
||||
- tenantId
|
||||
- auth.username
|
||||
- auth.password
|
||||
- auth.bearerToken
|
||||
- auth.oauth2.clientId
|
||||
- auth.oauth2.clientSecret
|
||||
- tls.ca
|
||||
- tls.cert
|
||||
- tls.key
|
||||
{{- end -}}
|
||||
|
||||
{{- define "destinations.otlp.alloy.prometheus.metrics.target" }}otelcol.receiver.prometheus.{{ include "helper.alloy_name" .name }}.receiver{{ end }}
|
||||
{{- define "destinations.otlp.alloy.loki.logs.target" }}otelcol.receiver.loki.{{ include "helper.alloy_name" .name }}.receiver{{ end }}
|
||||
{{- define "destinations.otlp.alloy.otlp.target" }}otelcol.processor.attributes.{{ include "helper.alloy_name" .name }}.input{{ end }}
|
||||
{{- define "destinations.otlp.alloy.otlp.metrics.target" }}{{ include "destinations.otlp.alloy.otlp.target" . }}{{- end }}
|
||||
{{- define "destinations.otlp.alloy.otlp.logs.target" }}{{ include "destinations.otlp.alloy.otlp.target" . }}{{- end }}
|
||||
{{- define "destinations.otlp.alloy.otlp.traces.target" }}{{ include "destinations.otlp.alloy.otlp.target" . }}{{- end }}
|
||||
|
||||
{{- define "destinations.otlp.supports_metrics" }}{{ dig "metrics" "enabled" "false" . }}{{ end -}}
|
||||
{{- define "destinations.otlp.supports_logs" }}{{ dig "logs" "enabled" "false" . }}{{ end -}}
|
||||
{{- define "destinations.otlp.supports_traces" }}{{ dig "traces" "enabled" "true" . }}{{ end -}}
|
||||
{{- define "destinations.otlp.supports_profiles" }}false{{ end -}}
|
||||
{{- define "destinations.otlp.ecosystem" }}otlp{{ end -}}
|
@ -0,0 +1,188 @@
|
||||
{{- define "destinations.prometheus.alloy" }}
|
||||
{{- $defaultValues := "destinations/prometheus-values.yaml" | .Files.Get | fromYaml }}
|
||||
{{- with merge .destination $defaultValues }}
|
||||
otelcol.exporter.prometheus {{ include "helper.alloy_name" .name | quote }} {
|
||||
add_metric_suffixes = {{ .openTelemetryConversion.addMetricSuffixes }}
|
||||
forward_to = [prometheus.remote_write.{{ include "helper.alloy_name" .name }}.receiver]
|
||||
}
|
||||
|
||||
prometheus.remote_write {{ include "helper.alloy_name" .name | quote }} {
|
||||
endpoint {
|
||||
{{- if .urlFrom }}
|
||||
url = {{ .urlFrom }}
|
||||
{{- else }}
|
||||
url = {{ .url | quote }}
|
||||
{{- end }}
|
||||
headers = {
|
||||
{{- if ne (include "secrets.authType" .) "sigv4" }}
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "tenantId")) "true" }}
|
||||
"X-Scope-OrgID" = {{ include "secrets.read" (dict "object" . "key" "tenantId" "nonsensitive" true) }},
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeaders }}
|
||||
{{ $key | quote }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeadersFrom }}
|
||||
{{ $key | quote }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- if .proxyURL }}
|
||||
proxy_url = {{ .proxyURL | quote }}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.authType" .) "basic" }}
|
||||
basic_auth {
|
||||
username = {{ include "secrets.read" (dict "object" . "key" "auth.username" "nonsensitive" true) }}
|
||||
password = {{ include "secrets.read" (dict "object" . "key" "auth.password") }}
|
||||
}
|
||||
{{- else if eq (include "secrets.authType" .) "bearerToken" }}
|
||||
{{- if .auth.bearerTokenFile }}
|
||||
bearer_token_file = {{ .auth.bearerTokenFile | quote }}
|
||||
{{- else }}
|
||||
bearer_token = {{ include "secrets.read" (dict "object" . "key" "auth.bearerToken") }}
|
||||
{{- end }}
|
||||
{{- else if eq (include "secrets.authType" .) "oauth2" }}
|
||||
oauth2 {
|
||||
client_id = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientId" "nonsensitive" true) }}
|
||||
{{- if eq .auth.oauth2.clientSecretFile "" }}
|
||||
client_secret = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientSecret") }}
|
||||
{{- else }}
|
||||
client_secret_file = {{ .auth.oauth2.clientSecretFile | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.endpointParams }}
|
||||
endpoint_params = {
|
||||
{{- range $k, $v := .auth.oauth2.endpointParams }}
|
||||
{{ $k }} = {{ $v | toJson }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyURL }}
|
||||
proxy_url = {{ .auth.oauth2.proxyURL | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.noProxy }}
|
||||
no_proxy = {{ .auth.oauth2.noProxy | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyFromEnvironment }}
|
||||
proxyFromEnvironment = {{ .auth.oauth2.proxyFromEnvironment }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyConnectHeader }}
|
||||
proxy_connect_header = {{ .auth.oauth2.proxyConnectHeader | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.scopes }}
|
||||
scopes = {{ .auth.oauth2.scopes | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.tokenURL }}
|
||||
token_url = {{ .auth.oauth2.tokenURL | quote }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- else if eq (include "secrets.authType" .) "sigv4" }}
|
||||
sigv4 {
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "auth.sigv4.accessKey")) "true" }}
|
||||
access_key = {{ include "secrets.read" (dict "object" . "key" "auth.sigv4.accessKey" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .auth.sigv4.profile }}
|
||||
profile = {{ .auth.sigv4.profile | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.sigv4.region }}
|
||||
region = {{ .auth.sigv4.region | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.sigv4.roleArn }}
|
||||
role_arn = {{ .auth.sigv4.roleArn | quote }}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "auth.sigv4.secretKey")) "true" }}
|
||||
secret_key = {{ include "secrets.read" (dict "object" . "key" "auth.sigv4.secretKey") }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
{{- if .tls }}
|
||||
tls_config {
|
||||
insecure_skip_verify = {{ .tls.insecureSkipVerify | default false }}
|
||||
{{- if .tls.caFile }}
|
||||
ca_file = {{ .tls.caFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.ca")) "true" }}
|
||||
ca_pem = {{ include "secrets.read" (dict "object" . "key" "tls.ca" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.certFile }}
|
||||
cert_file = {{ .tls.certFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.cert")) "true" }}
|
||||
cert_pem = {{ include "secrets.read" (dict "object" . "key" "tls.cert" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.keyFile }}
|
||||
key_file = {{ .tls.keyFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.key")) "true" }}
|
||||
key_pem = {{ include "secrets.read" (dict "object" . "key" "tls.key") }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
send_native_histograms = {{ .sendNativeHistograms | default false }}
|
||||
|
||||
queue_config {
|
||||
capacity = {{ .queueConfig.capacity | default 10000}}
|
||||
min_shards = {{ .queueConfig.minShards | default 1 }}
|
||||
max_shards = {{ .queueConfig.maxShards | default 50 }}
|
||||
max_samples_per_send = {{ .queueConfig.maxSamplesPerSend | default 2000 }}
|
||||
batch_send_deadline = {{ .queueConfig.batchSendDeadline | default "5s" | quote }}
|
||||
min_backoff = {{ .queueConfig.minBackoff | default "30ms" | quote }}
|
||||
max_backoff = {{ .queueConfig.maxBackoff | default "5s" | quote }}
|
||||
retry_on_http_429 = {{ .queueConfig.retryOnHttp429 | default true }}
|
||||
sample_age_limit = {{ .queueConfig.sampleAgeLimit | default "0s" | quote }}
|
||||
}
|
||||
|
||||
write_relabel_config {
|
||||
source_labels = ["cluster"]
|
||||
regex = ""
|
||||
replacement = {{ $.Values.cluster.name | quote }}
|
||||
target_label = "cluster"
|
||||
}
|
||||
write_relabel_config {
|
||||
source_labels = ["k8s.cluster.name"]
|
||||
regex = ""
|
||||
replacement = {{ $.Values.cluster.name | quote }}
|
||||
target_label = "cluster"
|
||||
}
|
||||
{{- if .metricProcessingRules }}
|
||||
{{ .metricProcessingRules | indent 4 }}
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
wal {
|
||||
truncate_frequency = {{ .writeAheadLog.truncateFrequency | quote }}
|
||||
min_keepalive_time = {{ .writeAheadLog.minKeepaliveTime | quote }}
|
||||
max_keepalive_time = {{ .writeAheadLog.maxKeepaliveTime | quote }}
|
||||
}
|
||||
{{- if or .extraLabels .extraLabelsFrom }}
|
||||
external_labels = {
|
||||
{{- range $key, $value := .extraLabels }}
|
||||
{{ $key }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraLabelsFrom }}
|
||||
{{ $key }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "secrets.list.prometheus" -}}
|
||||
- tenantId
|
||||
- auth.username
|
||||
- auth.password
|
||||
- auth.bearerToken
|
||||
- auth.oauth2.clientId
|
||||
- auth.oauth2.clientSecret
|
||||
- auth.sigv4.accessKey
|
||||
- auth.sigv4.secretKey
|
||||
- tls.ca
|
||||
- tls.cert
|
||||
- tls.key
|
||||
{{- end -}}
|
||||
|
||||
{{- define "destinations.prometheus.alloy.prometheus.metrics.target" }}prometheus.remote_write.{{ include "helper.alloy_name" .name }}.receiver{{ end -}}
|
||||
{{- define "destinations.prometheus.alloy.otlp.metrics.target" }}otelcol.exporter.prometheus.{{ include "helper.alloy_name" .name }}.input{{ end -}}
|
||||
|
||||
{{- define "destinations.prometheus.supports_metrics" }}true{{ end -}}
|
||||
{{- define "destinations.prometheus.supports_logs" }}false{{ end -}}
|
||||
{{- define "destinations.prometheus.supports_traces" }}false{{ end -}}
|
||||
{{- define "destinations.prometheus.supports_profiles" }}false{{ end -}}
|
||||
{{- define "destinations.prometheus.ecosystem" }}prometheus{{ end -}}
|
@ -0,0 +1,120 @@
|
||||
{{- define "destinations.pyroscope.alloy" }}
|
||||
{{- $defaultValues := "destinations/pyroscope-values.yaml" | .Files.Get | fromYaml }}
|
||||
{{- with merge .destination $defaultValues }}
|
||||
pyroscope.write {{ include "helper.alloy_name" .name | quote }} {
|
||||
endpoint {
|
||||
{{- if .urlFrom }}
|
||||
url = {{ .urlFrom }}
|
||||
{{- else }}
|
||||
url = {{ .url | quote }}
|
||||
{{- end }}
|
||||
headers = {
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" . "key" "tenantId")) "true" }}
|
||||
"X-Scope-OrgID" = {{ include "secrets.read" (dict "object" . "key" "tenantId" "nonsensitive" true) }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeaders }}
|
||||
{{ $key | quote }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraHeadersFrom }}
|
||||
{{ $key | quote }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
{{- if eq (include "secrets.authType" .) "basic" }}
|
||||
basic_auth {
|
||||
username = {{ include "secrets.read" (dict "object" . "key" "auth.username" "nonsensitive" true) }}
|
||||
password = {{ include "secrets.read" (dict "object" . "key" "auth.password") }}
|
||||
}
|
||||
{{- else if eq (include "secrets.authType" .) "bearerToken" }}
|
||||
bearer_token = {{ include "secrets.read" (dict "object" . "key" "auth.bearerToken") }}
|
||||
{{- else if eq (include "secrets.authType" .) "oauth2" }}
|
||||
oauth2 {
|
||||
client_id = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientId" "nonsensitive" true) }}
|
||||
{{- if eq .auth.oauth2.clientSecretFile "" }}
|
||||
client_secret = {{ include "secrets.read" (dict "object" . "key" "auth.oauth2.clientSecret") }}
|
||||
{{- else }}
|
||||
client_secret_file = {{ .auth.oauth2.clientSecretFile | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.endpointParams }}
|
||||
endpoint_params = {
|
||||
{{- range $k, $v := .auth.oauth2.endpointParams }}
|
||||
{{ $k }} = {{ $v | toJson }},
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyURL }}
|
||||
proxy_url = {{ .auth.oauth2.proxyURL | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.noProxy }}
|
||||
no_proxy = {{ .auth.oauth2.noProxy | quote }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyFromEnvironment }}
|
||||
proxyFromEnvironment = {{ .auth.oauth2.proxyFromEnvironment }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.proxyConnectHeader }}
|
||||
proxy_connect_header = {{ .auth.oauth2.proxyConnectHeader | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.scopes }}
|
||||
scopes = {{ .auth.oauth2.scopes | toJson }}
|
||||
{{- end }}
|
||||
{{- if .auth.oauth2.tokenURL }}
|
||||
token_url = {{ .auth.oauth2.tokenURL | quote }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
{{- if .tls }}
|
||||
tls_config {
|
||||
insecure_skip_verify = {{ .tls.insecureSkipVerify | default false }}
|
||||
{{- if .tls.caFile }}
|
||||
ca_file = {{ .tls.caFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.ca")) "true" }}
|
||||
ca_pem = {{ include "secrets.read" (dict "object" . "key" "tls.ca" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.certFile }}
|
||||
cert_file = {{ .tls.certFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.cert")) "true" }}
|
||||
cert_pem = {{ include "secrets.read" (dict "object" . "key" "tls.cert" "nonsensitive" true) }}
|
||||
{{- end }}
|
||||
{{- if .tls.keyFile }}
|
||||
key_file = {{ .tls.keyFile | quote }}
|
||||
{{- else if eq (include "secrets.usesSecret" (dict "object" . "key" "tls.key")) "true" }}
|
||||
key_pem = {{ include "secrets.read" (dict "object" . "key" "tls.key") }}
|
||||
{{- end }}
|
||||
}
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
external_labels = {
|
||||
cluster = {{ $.Values.cluster.name | quote }},
|
||||
k8s_cluster_name = {{ $.Values.cluster.name | quote }},
|
||||
{{- range $key, $value := .extraLabels }}
|
||||
{{ $key }} = {{ $value | quote }},
|
||||
{{- end }}
|
||||
{{- range $key, $value := .extraLabelsFrom }}
|
||||
{{ $key }} = {{ $value }},
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "secrets.list.pyroscope" -}}
|
||||
- tenantId
|
||||
- auth.username
|
||||
- auth.password
|
||||
- auth.bearerToken
|
||||
- auth.oauth2.clientId
|
||||
- auth.oauth2.clientSecret
|
||||
- tls.ca
|
||||
- tls.cert
|
||||
- tls.key
|
||||
{{- end -}}
|
||||
|
||||
{{- define "destinations.pyroscope.alloy.pyroscope.profiles.target" }}pyroscope.write.{{ include "helper.alloy_name" .name }}.receiver{{ end -}}
|
||||
|
||||
{{- define "destinations.pyroscope.supports_metrics" }}false{{ end -}}
|
||||
{{- define "destinations.pyroscope.supports_logs" }}false{{ end -}}
|
||||
{{- define "destinations.pyroscope.supports_traces" }}false{{ end -}}
|
||||
{{- define "destinations.pyroscope.supports_profiles" }}true{{ end -}}
|
||||
{{- define "destinations.pyroscope.ecosystem" }}pyroscope{{ end -}}
|
@ -0,0 +1,7 @@
|
||||
{{/* Do not edit this file. It is generated by the Makefile */}}
|
||||
{{- define "destinations.types" -}}
|
||||
- loki
|
||||
- otlp
|
||||
- prometheus
|
||||
- pyroscope
|
||||
{{- end -}}
|
@ -0,0 +1,69 @@
|
||||
{{/* Does some basic destination validation */}}
|
||||
{{/* Inputs: . (Values) */}}
|
||||
{{- define "destinations.validate" }}
|
||||
{{- range $i, $destination := .Values.destinations }}
|
||||
{{- if not $destination.name }}
|
||||
{{- $msg := list "" (printf "Destination #%d does not have a name." $i) }}
|
||||
{{- $msg = append $msg "Please set:" }}
|
||||
{{- $msg = append $msg "destinations:" }}
|
||||
{{- $msg = append $msg " - name: my-destination-name" }}
|
||||
{{- fail (join "\n" $msg) }}
|
||||
{{- end }}
|
||||
|
||||
{{- if (regexFind "[^-_ a-zA-Z0-9]" $destination.name) }}
|
||||
{{- $msg := list "" (printf "Destination #%d (%s) invalid characters in its name." $i $destination.name) }}
|
||||
{{- $msg = append $msg "Please only use alphanumeric, underscores, dashes, or spaces." }}
|
||||
{{- fail (join "\n" $msg) }}
|
||||
{{- end }}
|
||||
|
||||
{{- $types := (include "destinations.types" . ) | fromYamlArray }}
|
||||
{{- if not $destination.type }}
|
||||
{{ fail (printf "\nDestination #%d (%s) does not have a type.\nPlease set:\ndestinations:\n - name: %s\n type: %s" $i $destination.name $destination.name (include "english_list_or" $types)) }}
|
||||
{{- end }}
|
||||
|
||||
{{- if not (has $destination.type $types) }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using an unknown type (%s).\nPlease set:\ndestinations:\n - name: %s\n type: \"[%s]\"" $i $destination.name $destination.type $destination.name (include "english_list_or" $types)) }}
|
||||
{{- end }}
|
||||
|
||||
{{- if and (eq $destination.type "otlp") (not (has ($destination.protocol | default "grpc") (list "grpc" "http"))) }}
|
||||
{{- $msg := list "" (printf "Destination #%d (%s) has an unsupported protocol: %s." $i $destination.name $destination.protocol) }}
|
||||
{{- $msg = append $msg "The protocol must be either \"grpc\" or \"http\"" }}
|
||||
{{- $msg = append $msg "Please set:" }}
|
||||
{{- $msg = append $msg "destinations:" }}
|
||||
{{- $msg = append $msg (printf " - name: %s" $destination.name) }}
|
||||
{{- $msg = append $msg " type: otlp" }}
|
||||
{{- $msg = append $msg " protocol: otlp / http" }}
|
||||
{{- fail (join "\n" $msg) }}
|
||||
{{- end }}
|
||||
|
||||
{{/* Check if OTLP destination using Grafana Cloud OTLP gateway has protocol set */}}
|
||||
{{- if and (eq $destination.type "otlp") (ne $destination.protocol "http") (regexMatch "otlp-gateway-.+grafana\\.net" $destination.url) }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using Grafana Cloud OTLP gateway but has incorrect protocol '%s'. The gateway requires 'http'.\nPlease set:\ndestinations:\n - name: %s\n type: otlp\n url: %s\n protocol: http" $i $destination.name ($destination.protocol | default "grpc (default)") $destination.name $destination.url) }}
|
||||
{{- end }}
|
||||
|
||||
{{/* Check if OTLP destination using Grafana Cloud Tempo checks */}}
|
||||
{{- if and (eq $destination.type "otlp") (regexMatch "tempo-.+grafana\\.net" $destination.url) }}
|
||||
{{- if ne $destination.protocol "grpc" }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using Grafana Cloud Traces but has incorrect protocol '%s'. Grafana Cloud Traces requires 'grpc'.\nPlease set:\ndestinations:\n - name: %s\n type: otlp\n url: %s\n protocol: grpc" $i $destination.name ($destination.protocol | default "grpc (default)") $destination.name $destination.url) }}
|
||||
{{- end }}
|
||||
{{- if eq (dig "metrics" "enabled" true $destination) true }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using Grafana Cloud Traces but has metrics enabled. Tempo only supports traces.\nPlease set:\ndestinations:\n - name: %s\n type: otlp\n url: %s\n metrics:\n enabled: false" $i $destination.name $destination.name $destination.url) }}
|
||||
{{- end }}
|
||||
{{- if eq (dig "logs" "enabled" true $destination) true }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using Grafana Cloud Traces but has logs enabled. Tempo only supports traces.\nPlease set:\ndestinations:\n - name: %s\n type: otlp\n url: %s\n logs:\n enabled: false" $i $destination.name $destination.name $destination.url) }}
|
||||
{{- end }}
|
||||
{{- if eq (dig "traces" "enabled" true $destination) false }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using Grafana Cloud Traces but has traces disabled.\nPlease set:\ndestinations:\n - name: %s\n type: otlp\n url: %s\n traces:\n enabled: true" $i $destination.name $destination.name $destination.url) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- if eq (include "secrets.authType" $destination) "basic" }}
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" $destination "key" "auth.username")) "false" }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using basic auth but does not have a username.\nPlease set:\ndestinations:\n - name: %s\n auth:\n type: basic\n username: my-username\n password: my-password" $i $destination.name $destination.name) }}
|
||||
{{- end }}
|
||||
{{- if eq (include "secrets.usesSecret" (dict "object" $destination "key" "auth.password")) "false" }}
|
||||
{{ fail (printf "\nDestination #%d (%s) is using basic auth but does not have a password.\nPlease set:\ndestinations:\n - name: %s\n auth:\n type: basic\n username: my-username\n password: my-password" $i $destination.name $destination.name) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
Reference in New Issue
Block a user