chore: Update manifests after automerge

This commit is contained in:
2025-12-27 19:53:24 +00:00
parent a36c5e277b
commit 89f601ea41
348 changed files with 659 additions and 4853 deletions

View File

@@ -4,11 +4,11 @@ metadata:
name: authentik-postgresql-18-cluster
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
spec:
instances: 3

View File

@@ -4,11 +4,11 @@ metadata:
name: authentik-postgresql-18-backup-garage-local-secret
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: authentik-postgresql-18-backup-garage-local-secret
spec:

View File

@@ -4,11 +4,11 @@ metadata:
name: authentik-postgresql-18-recovery-secret
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: authentik-postgresql-18-recovery-secret
spec:

View File

@@ -4,11 +4,11 @@ metadata:
name: authentik-postgresql-18-backup-garage-local
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: authentik-postgresql-18-backup-garage-local
spec:

View File

@@ -4,11 +4,11 @@ metadata:
name: "authentik-postgresql-18-recovery"
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: "authentik-postgresql-18-recovery"
spec:

View File

@@ -4,11 +4,11 @@ metadata:
name: authentik-postgresql-18-alert-rules
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
spec:
groups:
@@ -21,7 +21,7 @@ spec:
Pod {{ $labels.pod }}
has been waiting for longer than 5 minutes
expr: |
cnpg_backends_waiting_total > 300
cnpg_backends_waiting_total{namespace="authentik"} > 300
for: 1m
labels:
severity: warning
@@ -34,7 +34,7 @@ spec:
There are over 10 deadlock conflicts in
{{ $labels.pod }}
expr: |
cnpg_pg_stat_database_deadlocks > 10
cnpg_pg_stat_database_deadlocks{namespace="authentik"} > 10
for: 1m
labels:
severity: warning
@@ -151,7 +151,7 @@ spec:
CloudNativePG Cluster Pod {{ $labels.pod }}
is taking more than 5 minutes (300 seconds) for a query.
expr: |-
cnpg_backends_max_tx_duration_seconds > 300
cnpg_backends_max_tx_duration_seconds{namespace="authentik"} > 300
for: 1m
labels:
severity: warning
@@ -222,7 +222,7 @@ spec:
Over 300,000,000 transactions from frozen xid
on pod {{ $labels.pod }}
expr: |
cnpg_pg_database_xid_age > 300000000
cnpg_pg_database_xid_age{namespace="authentik"} > 300000000
for: 1m
labels:
severity: warning
@@ -247,7 +247,7 @@ spec:
Replica {{ $labels.pod }}
is failing to replicate
expr: |
cnpg_pg_replication_in_recovery > cnpg_pg_replication_is_wal_receiver_up
cnpg_pg_replication_in_recovery{namespace="authentik"} > cnpg_pg_replication_is_wal_receiver_up{namespace="authentik"}
for: 1m
labels:
severity: warning

View File

@@ -1,29 +0,0 @@
apiVersion: redis.redis.opstreelabs.in/v1beta2
kind: RedisSentinel
metadata:
name: redis-sentinel-authentik
namespace: authentik
labels:
helm.sh/chart: redis-replication-0.5.0
app.kubernetes.io/version: "0.5.0"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis-sentinel-authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
spec:
clusterSize: 3
podSecurityContext:
fsGroup: 1000
runAsUser: 1000
redisSentinelConfig:
redisReplicationName: redis-replication-authentik
kubernetesConfig:
image: "quay.io/opstree/redis-sentinel:v8.4.0"
imagePullPolicy: IfNotPresent
resources:
requests:
cpu: 10m
memory: 32Mi
redisExporter:
enabled: true
image: "quay.io/opstree/redis-exporter:v1.80.1"

View File

@@ -4,11 +4,11 @@ metadata:
name: "authentik-postgresql-18-scheduled-backup-live-backup"
namespace: authentik
labels:
helm.sh/chart: postgres-18-cluster-7.4.4
helm.sh/chart: postgres-18-cluster-7.4.5
app.kubernetes.io/name: authentik-postgresql-18
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
app.kubernetes.io/version: "7.4.4"
app.kubernetes.io/version: "7.4.5"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: "authentik-postgresql-18-scheduled-backup-live-backup"
spec:

View File

@@ -1,22 +0,0 @@
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: redis-sentinel-authentik
namespace: authentik
labels:
helm.sh/chart: redis-replication-0.5.0
app.kubernetes.io/version: "0.5.0"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis-sentinel-authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/part-of: authentik
spec:
selector:
matchLabels:
app: redis-sentinel-authentik
redis_setup_type: sentinel
role: sentinel
endpoints:
- port: sentinel-client
interval: 30s
scrapeTimeout: 10s