Files
infrastructure/clusters/cl01tl/manifests/loki/loki.yaml
gitea-bot 693da9f62c Automated Manifest Update (#2174)
This PR contains newly rendered Kubernetes manifests automatically generated by the CI workflow.

Reviewed-on: #2174
Co-authored-by: gitea-bot <gitea-bot@alexlebens.net>
Co-committed-by: gitea-bot <gitea-bot@alexlebens.net>
2025-12-02 02:23:24 +00:00

1536 lines
42 KiB
YAML

---
# Source: loki/templates/namespace.yaml
apiVersion: v1
kind: Namespace
metadata:
name: loki
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/part-of: loki
pod-security.kubernetes.io/audit: privileged
pod-security.kubernetes.io/enforce: privileged
pod-security.kubernetes.io/warn: privileged
---
# Source: loki/charts/loki/templates/loki-canary/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: loki-canary
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: canary
automountServiceAccountToken: true
---
# Source: loki/charts/loki/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: loki
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
automountServiceAccountToken: true
---
# Source: loki/charts/promtail/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: loki-promtail
namespace: loki
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
automountServiceAccountToken: true
---
# Source: loki/charts/promtail/templates/secret.yaml
apiVersion: v1
kind: Secret
metadata:
name: loki-promtail
namespace: loki
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
stringData:
promtail.yaml: |
server:
log_level: info
log_format: logfmt
http_listen_port: 3101
clients:
- tenant_id: 1
url: http://loki-gateway.loki.svc.cluster.local:80/loki/api/v1/push
positions:
filename: /run/promtail/positions.yaml
scrape_configs:
# See also https://github.com/grafana/loki/blob/master/production/ksonnet/promtail/scrape_config.libsonnet for reference
- job_name: kubernetes-pods
pipeline_stages:
- cri: {}
kubernetes_sd_configs:
- role: pod
relabel_configs:
- source_labels:
- __meta_kubernetes_pod_controller_name
regex: ([0-9a-z-.]+?)(-[0-9a-f]{8,10})?
action: replace
target_label: __tmp_controller_name
- source_labels:
- __meta_kubernetes_pod_label_app_kubernetes_io_name
- __meta_kubernetes_pod_label_app
- __tmp_controller_name
- __meta_kubernetes_pod_name
regex: ^;*([^;]+)(;.*)?$
action: replace
target_label: app
- source_labels:
- __meta_kubernetes_pod_label_app_kubernetes_io_instance
- __meta_kubernetes_pod_label_instance
regex: ^;*([^;]+)(;.*)?$
action: replace
target_label: instance
- source_labels:
- __meta_kubernetes_pod_label_app_kubernetes_io_component
- __meta_kubernetes_pod_label_component
regex: ^;*([^;]+)(;.*)?$
action: replace
target_label: component
- action: replace
source_labels:
- __meta_kubernetes_pod_node_name
target_label: node_name
- action: replace
source_labels:
- __meta_kubernetes_namespace
target_label: namespace
- action: replace
replacement: $1
separator: /
source_labels:
- namespace
- app
target_label: job
- action: replace
source_labels:
- __meta_kubernetes_pod_name
target_label: pod
- action: replace
source_labels:
- __meta_kubernetes_pod_container_name
target_label: container
- action: replace
replacement: /var/log/pods/*$1/*.log
separator: /
source_labels:
- __meta_kubernetes_pod_uid
- __meta_kubernetes_pod_container_name
target_label: __path__
- action: replace
regex: true/(.*)
replacement: /var/log/pods/*$1/*.log
separator: /
source_labels:
- __meta_kubernetes_pod_annotationpresent_kubernetes_io_config_hash
- __meta_kubernetes_pod_annotation_kubernetes_io_config_hash
- __meta_kubernetes_pod_container_name
target_label: __path__
limits_config:
tracing:
enabled: false
---
# Source: loki/charts/loki/templates/config.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: loki
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
data:
config.yaml: |
auth_enabled: false
bloom_build:
builder:
planner_address: ""
enabled: false
bloom_gateway:
client:
addresses: ""
enabled: false
chunk_store_config:
chunk_cache_config:
background:
writeback_buffer: 500000
writeback_goroutines: 1
writeback_size_limit: 500MB
default_validity: 0s
memcached:
batch_size: 4
parallelism: 5
memcached_client:
addresses: dnssrvnoa+_memcached-client._tcp.loki-chunks-cache.loki.svc.cluster.local
consistent_hash: true
max_idle_conns: 72
timeout: 2000ms
common:
compactor_grpc_address: 'loki.loki.svc.cluster.local:9095'
path_prefix: /var/loki
replication_factor: 1
storage:
filesystem:
chunks_directory: /var/loki/chunks
rules_directory: /var/loki/rules
compactor:
compaction_interval: 10m
delete_request_store: filesystem
retention_delete_delay: 2h
retention_delete_worker_count: 150
retention_enabled: true
working_directory: /var/loki/compactor
frontend:
scheduler_address: ""
tail_proxy_url: ""
frontend_worker:
scheduler_address: ""
index_gateway:
mode: simple
ingester_client:
pool_config:
remote_timeout: 10s
remote_timeout: 10s
limits_config:
allow_structured_metadata: false
ingestion_burst_size_mb: 1024
ingestion_rate_mb: 1024
max_cache_freshness_per_query: 10m
max_streams_per_user: 100000
query_timeout: 300s
reject_old_samples: true
reject_old_samples_max_age: 168h
retention_period: 7d
split_queries_by_interval: 15m
volume_enabled: true
memberlist:
join_members:
- loki-memberlist.loki.svc.cluster.local
pattern_ingester:
enabled: false
query_range:
align_queries_with_step: true
cache_results: true
results_cache:
cache:
background:
writeback_buffer: 500000
writeback_goroutines: 1
writeback_size_limit: 500MB
default_validity: 12h
memcached_client:
addresses: dnssrvnoa+_memcached-client._tcp.loki-results-cache.loki.svc.cluster.local
consistent_hash: true
timeout: 500ms
update_interval: 1m
ruler:
storage:
type: local
wal:
dir: /var/loki/ruler-wal
runtime_config:
file: /etc/loki/runtime-config/runtime-config.yaml
schema_config:
configs:
- from: "2024-01-11"
index:
period: 24h
object_store: filesystem
schema: v13
store: boltdb-shipper
server:
grpc_listen_port: 9095
http_listen_port: 3100
http_server_read_timeout: 600s
http_server_write_timeout: 600s
storage_config:
bloom_shipper:
working_directory: /var/loki/data/bloomshipper
boltdb_shipper:
index_gateway_client:
server_address: ""
hedging:
at: 250ms
max_per_second: 20
up_to: 3
tsdb_shipper:
index_gateway_client:
server_address: ""
use_thanos_objstore: false
tracing:
enabled: false
---
# Source: loki/charts/loki/templates/gateway/configmap-gateway.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: loki-gateway
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: gateway
data:
nginx.conf: |
worker_processes 5; ## Default: 1
error_log /dev/stderr;
pid /tmp/nginx.pid;
worker_rlimit_nofile 8192;
events {
worker_connections 4096; ## Default: 1024
}
http {
client_body_temp_path /tmp/client_temp;
proxy_temp_path /tmp/proxy_temp_path;
fastcgi_temp_path /tmp/fastcgi_temp;
uwsgi_temp_path /tmp/uwsgi_temp;
scgi_temp_path /tmp/scgi_temp;
client_max_body_size 4M;
proxy_read_timeout 600; ## 10 minutes
proxy_send_timeout 600;
proxy_connect_timeout 600;
proxy_http_version 1.1;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /dev/stderr main;
sendfile on;
tcp_nopush on;
resolver kube-dns.kube-system.svc.cluster.local.;
# if the X-Query-Tags header is empty, set a noop= without a value as empty values are not logged
map $http_x_query_tags $query_tags {
"" "noop="; # When header is empty, set noop=
default $http_x_query_tags; # Otherwise, preserve the original value
}
server {
listen 8080;
listen [::]:8080;
location = / {
return 200 'OK';
auth_basic off;
}
########################################################
# Configure backend targets
location ^~ /ui {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# Distributor
location = /api/prom/push {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1/push {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /distributor/ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /otlp/v1/logs {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# Ingester
location = /flush {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location ^~ /ingester/ {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /ingester {
internal; # to suppress 301
}
# Ring
location = /ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# MemberListKV
location = /memberlist {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# Ruler
location = /ruler/ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /api/prom/rules {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location ^~ /api/prom/rules/ {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1/rules {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location ^~ /loki/api/v1/rules/ {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /prometheus/api/v1/alerts {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /prometheus/api/v1/rules {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# Compactor
location = /compactor/ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1/delete {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1/cache/generation_numbers {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# IndexGateway
location = /indexgateway/ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# QueryScheduler
location = /scheduler/ring {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# Config
location = /config {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
# QueryFrontend, Querier
location = /api/prom/tail {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1/tail {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location ^~ /api/prom/ {
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /api/prom {
internal; # to suppress 301
}
location ^~ /loki/api/v1/ {
# pass custom headers set by Grafana as X-Query-Tags which are logged as key/value pairs in metrics.go log messages
proxy_set_header X-Query-Tags "${query_tags},user=${http_x_grafana_user},dashboard_id=${http_x_dashboard_uid},dashboard_title=${http_x_dashboard_title},panel_id=${http_x_panel_id},panel_title=${http_x_panel_title},source_rule_uid=${http_x_rule_uid},rule_name=${http_x_rule_name},rule_folder=${http_x_rule_folder},rule_version=${http_x_rule_version},rule_source=${http_x_rule_source},rule_type=${http_x_rule_type}";
proxy_pass http://loki.loki.svc.cluster.local:3100$request_uri;
}
location = /loki/api/v1 {
internal; # to suppress 301
}
}
}
---
# Source: loki/charts/loki/templates/runtime-configmap.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: loki-runtime
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
data:
runtime-config.yaml: |
{}
---
# Source: loki/charts/loki/templates/backend/clusterrole.yaml
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1
metadata:
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
name: loki-clusterrole
rules:
- apiGroups: [""] # "" indicates the core API group
resources: ["configmaps", "secrets"]
verbs: ["get", "watch", "list"]
---
# Source: loki/charts/promtail/templates/clusterrole.yaml
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: loki-promtail
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
rules:
- apiGroups:
- ""
resources:
- nodes
- nodes/proxy
- services
- endpoints
- pods
verbs:
- get
- watch
- list
---
# Source: loki/charts/loki/templates/backend/clusterrolebinding.yaml
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: loki-clusterrolebinding
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
subjects:
- kind: ServiceAccount
name: loki
namespace: loki
roleRef:
kind: ClusterRole
name: loki-clusterrole
apiGroup: rbac.authorization.k8s.io
---
# Source: loki/charts/promtail/templates/clusterrolebinding.yaml
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: loki-promtail
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
subjects:
- kind: ServiceAccount
name: loki-promtail
namespace: loki
roleRef:
kind: ClusterRole
name: loki-promtail
apiGroup: rbac.authorization.k8s.io
---
# Source: loki/charts/loki/templates/chunks-cache/service-chunks-cache-headless.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-chunks-cache
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: "memcached-chunks-cache"
annotations:
{}
namespace: "loki"
spec:
type: ClusterIP
clusterIP: None
ports:
- name: memcached-client
port: 11211
targetPort: client
- name: http-metrics
port: 9150
targetPort: http-metrics
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-chunks-cache"
---
# Source: loki/charts/loki/templates/gateway/service-gateway.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-gateway
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: gateway
prometheus.io/service-monitor: "false"
annotations:
spec:
type: ClusterIP
ports:
- name: http-metrics
port: 80
targetPort: http-metrics
protocol: TCP
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: gateway
---
# Source: loki/charts/loki/templates/loki-canary/service.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-canary
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: canary
annotations:
spec:
type: ClusterIP
ports:
- name: http-metrics
port: 3500
targetPort: http-metrics
protocol: TCP
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: canary
---
# Source: loki/charts/loki/templates/results-cache/service-results-cache-headless.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-results-cache
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: "memcached-results-cache"
annotations:
{}
namespace: "loki"
spec:
type: ClusterIP
clusterIP: None
ports:
- name: memcached-client
port: 11211
targetPort: client
- name: http-metrics
port: 9150
targetPort: http-metrics
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-results-cache"
---
# Source: loki/charts/loki/templates/service-memberlist.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-memberlist
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
annotations:
spec:
type: ClusterIP
clusterIP: None
ports:
- name: tcp
port: 7946
targetPort: http-memberlist
protocol: TCP
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/part-of: memberlist
---
# Source: loki/charts/loki/templates/single-binary/service-headless.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-headless
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
variant: headless
prometheus.io/service-monitor: "false"
annotations:
spec:
clusterIP: None
ports:
- name: http-metrics
port: 3100
targetPort: http-metrics
protocol: TCP
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
---
# Source: loki/charts/loki/templates/single-binary/service.yaml
apiVersion: v1
kind: Service
metadata:
name: loki
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
annotations:
spec:
type: ClusterIP
ports:
- name: http-metrics
port: 3100
targetPort: http-metrics
protocol: TCP
- name: grpc
port: 9095
targetPort: grpc
protocol: TCP
selector:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: single-binary
---
# Source: loki/charts/promtail/templates/service-metrics.yaml
apiVersion: v1
kind: Service
metadata:
name: loki-promtail-metrics
namespace: loki
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
promtail: 3.0.0
spec:
clusterIP: None
ports:
- name: http-metrics
port: 3101
targetPort: http-metrics
protocol: TCP
selector:
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
---
# Source: loki/charts/loki/templates/loki-canary/daemonset.yaml
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: loki-canary
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: canary
spec:
selector:
matchLabels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: canary
updateStrategy:
rollingUpdate:
maxUnavailable: 1
type: RollingUpdate
template:
metadata:
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: canary
spec:
serviceAccountName: loki-canary
securityContext:
fsGroup: 10001
runAsGroup: 10001
runAsNonRoot: true
runAsUser: 10001
containers:
- name: loki-canary
image: docker.io/grafana/loki-canary:3.5.7
imagePullPolicy: IfNotPresent
args:
- -addr=loki-gateway.loki.svc.cluster.local.:80
- -labelname=pod
- -labelvalue=$(POD_NAME)
- -push=true
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
volumeMounts:
ports:
- name: http-metrics
containerPort: 3500
protocol: TCP
env:
- name: POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
readinessProbe:
httpGet:
path: /metrics
port: http-metrics
initialDelaySeconds: 15
timeoutSeconds: 1
volumes:
---
# Source: loki/charts/promtail/templates/daemonset.yaml
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: loki-promtail
namespace: loki
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
spec:
selector:
matchLabels:
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
updateStrategy:
{}
template:
metadata:
labels:
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
annotations:
checksum/config: c63810d2a03283062a5987b913985abc93a7e5cb90fde608a9f3ef77cb4e3412
spec:
serviceAccountName: loki-promtail
automountServiceAccountToken: true
enableServiceLinks: true
securityContext:
runAsGroup: 0
runAsUser: 0
containers:
- name: promtail
image: "docker.io/grafana/promtail:3.5.1"
imagePullPolicy: IfNotPresent
args:
- "-config.file=/etc/promtail/promtail.yaml"
volumeMounts:
- name: config
mountPath: /etc/promtail
- mountPath: /run/promtail
name: run
- mountPath: /var/lib/docker/containers
name: containers
readOnly: true
- mountPath: /var/log/pods
name: pods
readOnly: true
env:
- name: HOSTNAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
ports:
- name: http-metrics
containerPort: 3101
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 5
httpGet:
path: '/ready'
port: http-metrics
initialDelaySeconds: 10
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
tolerations:
- effect: NoSchedule
key: node-role.kubernetes.io/master
operator: Exists
- effect: NoSchedule
key: node-role.kubernetes.io/control-plane
operator: Exists
volumes:
- name: config
secret:
secretName: loki-promtail
- hostPath:
path: /run/promtail
name: run
- hostPath:
path: /var/lib/docker/containers
name: containers
- hostPath:
path: /var/log/pods
name: pods
---
# Source: loki/charts/loki/templates/gateway/deployment-gateway-nginx.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: loki-gateway
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: gateway
spec:
replicas: 1
strategy:
type: RollingUpdate
revisionHistoryLimit: 10
selector:
matchLabels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: gateway
template:
metadata:
annotations:
checksum/config: d76bd0b627b1549dddc6ce5304d9322ebdeb13e5b813234d8067357925630015
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: gateway
spec:
serviceAccountName: loki
enableServiceLinks: true
securityContext:
fsGroup: 101
runAsGroup: 101
runAsNonRoot: true
runAsUser: 101
terminationGracePeriodSeconds: 30
containers:
- name: nginx
image: docker.io/nginxinc/nginx-unprivileged:1.29-alpine
imagePullPolicy: IfNotPresent
ports:
- name: http-metrics
containerPort: 8080
protocol: TCP
readinessProbe:
httpGet:
path: /
port: http-metrics
initialDelaySeconds: 15
timeoutSeconds: 1
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
volumeMounts:
- name: config
mountPath: /etc/nginx
- name: tmp
mountPath: /tmp
- name: docker-entrypoint-d-override
mountPath: /docker-entrypoint.d
resources:
{}
affinity:
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
app.kubernetes.io/component: gateway
app.kubernetes.io/instance: 'loki'
app.kubernetes.io/name: 'loki'
topologyKey: kubernetes.io/hostname
volumes:
- name: config
configMap:
name: loki-gateway
- name: tmp
emptyDir: {}
- name: docker-entrypoint-d-override
emptyDir: {}
---
# Source: loki/charts/loki/templates/chunks-cache/statefulset-chunks-cache.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: loki-chunks-cache
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: "memcached-chunks-cache"
name: "memcached-chunks-cache"
annotations:
{}
namespace: "loki"
spec:
podManagementPolicy: Parallel
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-chunks-cache"
name: "memcached-chunks-cache"
updateStrategy:
type: RollingUpdate
serviceName: loki-chunks-cache
template:
metadata:
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-chunks-cache"
name: "memcached-chunks-cache"
annotations:
spec:
serviceAccountName: loki
securityContext:
fsGroup: 11211
runAsGroup: 11211
runAsNonRoot: true
runAsUser: 11211
initContainers:
[]
nodeSelector:
{}
affinity:
{}
topologySpreadConstraints:
[]
tolerations:
[]
terminationGracePeriodSeconds: 60
containers:
- name: memcached
image: memcached:1.6.39-alpine
imagePullPolicy: IfNotPresent
resources:
limits:
memory: 9830Mi
requests:
cpu: 500m
memory: 9830Mi
ports:
- containerPort: 11211
name: client
args:
- -m 8192
- --extended=modern,track_sizes
- -I 5m
- -c 16384
- -v
- -u 11211
env:
envFrom:
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 6
initialDelaySeconds: 5
periodSeconds: 5
tcpSocket:
port: client
timeoutSeconds: 3
livenessProbe:
failureThreshold: 3
initialDelaySeconds: 30
periodSeconds: 10
tcpSocket:
port: client
timeoutSeconds: 5
- name: exporter
image: prom/memcached-exporter:v0.15.3
imagePullPolicy: IfNotPresent
ports:
- containerPort: 9150
name: http-metrics
args:
- "--memcached.address=localhost:11211"
- "--web.listen-address=0.0.0.0:9150"
resources:
limits: {}
requests: {}
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 3
httpGet:
path: /metrics
port: http-metrics
initialDelaySeconds: 5
periodSeconds: 5
timeoutSeconds: 3
livenessProbe:
failureThreshold: 3
httpGet:
path: /metrics
port: http-metrics
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
---
# Source: loki/charts/loki/templates/results-cache/statefulset-results-cache.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: loki-results-cache
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: "memcached-results-cache"
name: "memcached-results-cache"
annotations:
{}
namespace: "loki"
spec:
podManagementPolicy: Parallel
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-results-cache"
name: "memcached-results-cache"
updateStrategy:
type: RollingUpdate
serviceName: loki-results-cache
template:
metadata:
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: "memcached-results-cache"
name: "memcached-results-cache"
annotations:
spec:
serviceAccountName: loki
securityContext:
fsGroup: 11211
runAsGroup: 11211
runAsNonRoot: true
runAsUser: 11211
initContainers:
[]
nodeSelector:
{}
affinity:
{}
topologySpreadConstraints:
[]
tolerations:
[]
terminationGracePeriodSeconds: 60
containers:
- name: memcached
image: memcached:1.6.39-alpine
imagePullPolicy: IfNotPresent
resources:
limits:
memory: 1229Mi
requests:
cpu: 500m
memory: 1229Mi
ports:
- containerPort: 11211
name: client
args:
- -m 1024
- --extended=modern,track_sizes
- -I 5m
- -c 16384
- -v
- -u 11211
env:
envFrom:
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 6
initialDelaySeconds: 5
periodSeconds: 5
tcpSocket:
port: client
timeoutSeconds: 3
livenessProbe:
failureThreshold: 3
initialDelaySeconds: 30
periodSeconds: 10
tcpSocket:
port: client
timeoutSeconds: 5
- name: exporter
image: prom/memcached-exporter:v0.15.3
imagePullPolicy: IfNotPresent
ports:
- containerPort: 9150
name: http-metrics
args:
- "--memcached.address=localhost:11211"
- "--web.listen-address=0.0.0.0:9150"
resources:
limits: {}
requests: {}
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 3
httpGet:
path: /metrics
port: http-metrics
initialDelaySeconds: 5
periodSeconds: 5
timeoutSeconds: 3
livenessProbe:
failureThreshold: 3
httpGet:
path: /metrics
port: http-metrics
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
---
# Source: loki/charts/loki/templates/single-binary/statefulset.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: loki
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: single-binary
app.kubernetes.io/part-of: memberlist
spec:
replicas: 1
podManagementPolicy: Parallel
updateStrategy:
rollingUpdate:
partition: 0
serviceName: loki-headless
revisionHistoryLimit: 10
persistentVolumeClaimRetentionPolicy:
whenDeleted: Delete
whenScaled: Delete
selector:
matchLabels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: single-binary
template:
metadata:
annotations:
checksum/config: 9cded33d7ba292eb76711b451f5ecd9bade13c7fb5ffb5622229f5706f8f90dd
storage/size: "150Gi"
kubectl.kubernetes.io/default-container: "loki"
labels:
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/component: single-binary
app.kubernetes.io/part-of: memberlist
spec:
serviceAccountName: loki
automountServiceAccountToken: true
enableServiceLinks: true
securityContext:
fsGroup: 10001
runAsGroup: 10001
runAsNonRoot: true
runAsUser: 10001
terminationGracePeriodSeconds: 30
containers:
- name: loki
image: docker.io/grafana/loki:3.5.7
imagePullPolicy: IfNotPresent
args:
- -config.file=/etc/loki/config/config.yaml
- -target=all
ports:
- name: http-metrics
containerPort: 3100
protocol: TCP
- name: grpc
containerPort: 9095
protocol: TCP
- name: http-memberlist
containerPort: 7946
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
readinessProbe:
failureThreshold: 3
httpGet:
path: /ready
port: http-metrics
initialDelaySeconds: 15
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
volumeMounts:
- name: tmp
mountPath: /tmp
- name: config
mountPath: /etc/loki/config
- name: runtime-config
mountPath: /etc/loki/runtime-config
- name: storage
mountPath: /var/loki
- name: sc-rules-volume
mountPath: "/rules"
resources:
{}
- name: loki-sc-rules
image: docker.io/kiwigrid/k8s-sidecar:1.30.10
imagePullPolicy: IfNotPresent
env:
- name: METHOD
value: WATCH
- name: LABEL
value: "loki_rule"
- name: FOLDER
value: "/rules"
- name: RESOURCE
value: "both"
- name: WATCH_SERVER_TIMEOUT
value: "60"
- name: WATCH_CLIENT_TIMEOUT
value: "60"
- name: LOG_LEVEL
value: "INFO"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
volumeMounts:
- name: sc-rules-volume
mountPath: "/rules"
affinity:
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
app.kubernetes.io/component: single-binary
app.kubernetes.io/instance: 'loki'
app.kubernetes.io/name: 'loki'
topologyKey: kubernetes.io/hostname
volumes:
- name: tmp
emptyDir: {}
- name: config
configMap:
name: loki
items:
- key: "config.yaml"
path: "config.yaml"
- name: runtime-config
configMap:
name: loki-runtime
- name: sc-rules-volume
emptyDir: {}
volumeClaimTemplates:
- apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: storage
spec:
accessModes:
- ReadWriteOnce
storageClassName: synology-iscsi-delete
resources:
requests:
storage: "150Gi"
---
# Source: loki/charts/promtail/templates/servicemonitor.yaml
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: loki-promtail
labels:
helm.sh/chart: promtail-6.17.1
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.1"
app.kubernetes.io/managed-by: Helm
spec:
selector:
matchLabels:
app.kubernetes.io/name: promtail
app.kubernetes.io/instance: loki
endpoints:
- port: http-metrics
scheme: http
---
# Source: loki/charts/loki/templates/tests/test-canary.yaml
apiVersion: v1
kind: Pod
metadata:
name: "loki-helm-test"
namespace: loki
labels:
helm.sh/chart: loki-6.46.0
app.kubernetes.io/name: loki
app.kubernetes.io/instance: loki
app.kubernetes.io/version: "3.5.7"
app.kubernetes.io/component: helm-test
annotations:
"helm.sh/hook": test
spec:
containers:
- name: loki-helm-test
image: docker.io/grafana/loki-helm-test:latest
env:
- name: CANARY_SERVICE_ADDRESS
value: "http://loki-canary.loki.svc.cluster.local:3500/metrics"
- name: CANARY_PROMETHEUS_ADDRESS
value: ""
- name: CANARY_TEST_TIMEOUT
value: "1m"
args:
- -test.v
restartPolicy: Never