chore: Update manifests after change
This commit is contained in:
@@ -0,0 +1,81 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/cluster.yaml
|
||||
apiVersion: postgresql.cnpg.io/v1
|
||||
kind: Cluster
|
||||
metadata:
|
||||
name: authentik-postgresql-17-cluster
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
instances: 3
|
||||
imageName: "ghcr.io/cloudnative-pg/postgresql:17.7-standard-trixie"
|
||||
imagePullPolicy: IfNotPresent
|
||||
postgresUID: 26
|
||||
postgresGID: 26
|
||||
plugins:
|
||||
- name: barman-cloud.cloudnative-pg.io
|
||||
enabled: true
|
||||
isWALArchiver: false
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-external-backup"
|
||||
serverName: "authentik-postgresql-17-backup-1"
|
||||
- name: barman-cloud.cloudnative-pg.io
|
||||
enabled: true
|
||||
isWALArchiver: true
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-garage-local-backup"
|
||||
serverName: "authentik-postgresql-17-backup-1"
|
||||
externalClusters:
|
||||
- name: recovery
|
||||
plugin:
|
||||
name: barman-cloud.cloudnative-pg.io
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-recovery"
|
||||
serverName: authentik-postgresql-17-backup-1
|
||||
storage:
|
||||
size: 10Gi
|
||||
storageClass: local-path
|
||||
walStorage:
|
||||
size: 2Gi
|
||||
storageClass: local-path
|
||||
resources:
|
||||
limits:
|
||||
hugepages-2Mi: 256Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
affinity:
|
||||
enablePodAntiAffinity: true
|
||||
topologyKey: kubernetes.io/hostname
|
||||
primaryUpdateMethod: switchover
|
||||
primaryUpdateStrategy: unsupervised
|
||||
logLevel: info
|
||||
enableSuperuserAccess: false
|
||||
enablePDB: true
|
||||
postgresql:
|
||||
parameters:
|
||||
hot_standby_feedback: "on"
|
||||
max_slot_wal_keep_size: 2000MB
|
||||
shared_buffers: 128MB
|
||||
monitoring:
|
||||
enablePodMonitor: true
|
||||
disableDefaultQueries: false
|
||||
bootstrap:
|
||||
recovery:
|
||||
database: app
|
||||
source: authentik-postgresql-17-backup-1
|
||||
externalClusters:
|
||||
- name: authentik-postgresql-17-backup-1
|
||||
plugin:
|
||||
name: barman-cloud.cloudnative-pg.io
|
||||
enabled: true
|
||||
isWALArchiver: false
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-recovery"
|
||||
serverName: authentik-postgresql-17-backup-1
|
||||
@@ -0,0 +1,20 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/charts/serviceAccount/templates/clusterrole.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: "authentik-authentik"
|
||||
labels:
|
||||
helm.sh/chart: "serviceAccount-2.1.0"
|
||||
app.kubernetes.io/name: "serviceAccount"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2.1.0"
|
||||
rules:
|
||||
- apiGroups:
|
||||
- apiextensions.k8s.io
|
||||
resources:
|
||||
- customresourcedefinitions
|
||||
verbs:
|
||||
- list
|
||||
@@ -0,0 +1,21 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/charts/serviceAccount/templates/clusterrolebinding.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: "authentik-authentik"
|
||||
labels:
|
||||
helm.sh/chart: "serviceAccount-2.1.0"
|
||||
app.kubernetes.io/name: "serviceAccount"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2.1.0"
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: "authentik-authentik"
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
@@ -0,0 +1,60 @@
|
||||
---
|
||||
# Source: authentik/charts/cloudflared/templates/common.yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: authentik-cloudflared
|
||||
labels:
|
||||
app.kubernetes.io/controller: main
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
app.kubernetes.io/name: cloudflared
|
||||
app.kubernetes.io/version: 2025.10.0
|
||||
helm.sh/chart: cloudflared-1.23.1
|
||||
namespace: authentik
|
||||
spec:
|
||||
revisionHistoryLimit: 3
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: Recreate
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/controller: main
|
||||
app.kubernetes.io/name: cloudflared
|
||||
app.kubernetes.io/instance: authentik
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/controller: main
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/name: cloudflared
|
||||
spec:
|
||||
enableServiceLinks: false
|
||||
serviceAccountName: default
|
||||
automountServiceAccountToken: true
|
||||
hostIPC: false
|
||||
hostNetwork: false
|
||||
hostPID: false
|
||||
dnsPolicy: ClusterFirst
|
||||
containers:
|
||||
- args:
|
||||
- tunnel
|
||||
- --protocol
|
||||
- http2
|
||||
- --no-autoupdate
|
||||
- run
|
||||
- --token
|
||||
- $(CF_MANAGED_TUNNEL_TOKEN)
|
||||
env:
|
||||
- name: CF_MANAGED_TUNNEL_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: cf-tunnel-token
|
||||
name: authentik-cloudflared-secret
|
||||
image: cloudflare/cloudflared:2025.11.1
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: main
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 128Mi
|
||||
@@ -0,0 +1,128 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/server/deployment.yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: authentik-server
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
replicas: 1
|
||||
revisionHistoryLimit: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
annotations:
|
||||
checksum/secret: 4401a384e06edae0ac2d17733f15a6d40b1b11603c0ce881f1f93313e11b088b
|
||||
spec:
|
||||
terminationGracePeriodSeconds: 30
|
||||
containers:
|
||||
- name: server
|
||||
image: ghcr.io/goauthentik/server:2025.10.2
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- server
|
||||
env:
|
||||
- name: AUTHENTIK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: key
|
||||
name: authentik-key-secret
|
||||
- name: AUTHENTIK_POSTGRESQL__HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: host
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: dbname
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: user
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: password
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_LISTEN__HTTP
|
||||
value: "0.0.0.0:9000"
|
||||
- name: AUTHENTIK_LISTEN__HTTPS
|
||||
value: "0.0.0.0:9443"
|
||||
- name: AUTHENTIK_LISTEN__METRICS
|
||||
value: "0.0.0.0:9300"
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: authentik
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 9000
|
||||
protocol: TCP
|
||||
- name: https
|
||||
containerPort: 9443
|
||||
protocol: TCP
|
||||
- name: metrics
|
||||
containerPort: 9300
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: '/-/health/live/'
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
readinessProbe:
|
||||
failureThreshold: 3
|
||||
httpGet:
|
||||
path: '/-/health/ready/'
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
startupProbe:
|
||||
failureThreshold: 60
|
||||
httpGet:
|
||||
path: '/-/health/live/'
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
resources: {}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 100
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
topologyKey: kubernetes.io/hostname
|
||||
enableServiceLinks: true
|
||||
@@ -0,0 +1,127 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/worker/deployment.yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: authentik-worker
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "worker"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
replicas: 1
|
||||
revisionHistoryLimit: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "worker"
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "worker"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
annotations:
|
||||
checksum/secret: 4401a384e06edae0ac2d17733f15a6d40b1b11603c0ce881f1f93313e11b088b
|
||||
spec:
|
||||
serviceAccountName: authentik
|
||||
terminationGracePeriodSeconds: 30
|
||||
containers:
|
||||
- name: worker
|
||||
image: ghcr.io/goauthentik/server:2025.10.2
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- worker
|
||||
env:
|
||||
- name: AUTHENTIK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: key
|
||||
name: authentik-key-secret
|
||||
- name: AUTHENTIK_POSTGRESQL__HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: host
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: dbname
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: user
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: password
|
||||
name: authentik-postgresql-17-cluster-app
|
||||
- name: AUTHENTIK_LISTEN__HTTP
|
||||
value: "0.0.0.0:9000"
|
||||
- name: AUTHENTIK_LISTEN__METRICS
|
||||
value: "0.0.0.0:9300"
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: authentik
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 9000
|
||||
protocol: TCP
|
||||
- name: metrics
|
||||
containerPort: 9300
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ak
|
||||
- healthcheck
|
||||
failureThreshold: 3
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
readinessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ak
|
||||
- healthcheck
|
||||
failureThreshold: 3
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
startupProbe:
|
||||
exec:
|
||||
command:
|
||||
- ak
|
||||
- healthcheck
|
||||
failureThreshold: 60
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
successThreshold: 1
|
||||
timeoutSeconds: 3
|
||||
resources: {}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 100
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "worker"
|
||||
topologyKey: kubernetes.io/hostname
|
||||
enableServiceLinks: true
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
# Source: authentik/templates/external-secret.yaml
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: authentik-cloudflared-secret
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: authentik-cloudflared-secret
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
secretStoreRef:
|
||||
kind: ClusterSecretStore
|
||||
name: vault
|
||||
data:
|
||||
- secretKey: cf-tunnel-token
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /cloudflare/tunnels/authentik
|
||||
metadataPolicy: None
|
||||
property: token
|
||||
@@ -0,0 +1,23 @@
|
||||
---
|
||||
# Source: authentik/templates/external-secret.yaml
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: authentik-key-secret
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: authentik-key-secret
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
secretStoreRef:
|
||||
kind: ClusterSecretStore
|
||||
name: vault
|
||||
data:
|
||||
- secretKey: key
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /cl01tl/authentik/key
|
||||
metadataPolicy: None
|
||||
property: key
|
||||
@@ -0,0 +1,37 @@
|
||||
---
|
||||
# Source: authentik/templates/external-secret.yaml
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
secretStoreRef:
|
||||
kind: ClusterSecretStore
|
||||
name: vault
|
||||
data:
|
||||
- secretKey: ACCESS_KEY_ID
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /garage/home-infra/postgres-backups
|
||||
metadataPolicy: None
|
||||
property: ACCESS_KEY_ID
|
||||
- secretKey: ACCESS_SECRET_KEY
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /garage/home-infra/postgres-backups
|
||||
metadataPolicy: None
|
||||
property: ACCESS_SECRET_KEY
|
||||
- secretKey: ACCESS_REGION
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /garage/home-infra/postgres-backups
|
||||
metadataPolicy: None
|
||||
property: ACCESS_REGION
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
# Source: authentik/templates/external-secret.yaml
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: authentik-postgresql-17-cluster-backup-secret
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: authentik-postgresql-17-cluster-backup-secret
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
secretStoreRef:
|
||||
kind: ClusterSecretStore
|
||||
name: vault
|
||||
data:
|
||||
- secretKey: ACCESS_KEY_ID
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /digital-ocean/home-infra/postgres-backups
|
||||
metadataPolicy: None
|
||||
property: access
|
||||
- secretKey: ACCESS_SECRET_KEY
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
key: /digital-ocean/home-infra/postgres-backups
|
||||
metadataPolicy: None
|
||||
property: secret
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
# Source: authentik/templates/http-route.yaml
|
||||
apiVersion: gateway.networking.k8s.io/v1
|
||||
kind: HTTPRoute
|
||||
metadata:
|
||||
name: http-route-authentik
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: http-route-authentik
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
parentRefs:
|
||||
- group: gateway.networking.k8s.io
|
||||
kind: Gateway
|
||||
name: traefik-gateway
|
||||
namespace: traefik
|
||||
hostnames:
|
||||
- authentik.alexlebens.net
|
||||
rules:
|
||||
- matches:
|
||||
- path:
|
||||
type: PathPrefix
|
||||
value: /
|
||||
backendRefs:
|
||||
- group: ''
|
||||
kind: Service
|
||||
name: authentik-server
|
||||
port: 80
|
||||
weight: 100
|
||||
@@ -0,0 +1,31 @@
|
||||
---
|
||||
# Source: authentik/templates/ingress.yaml
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: authentik-tailscale
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: authentik-tailscale
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
tailscale.com/proxy-class: no-metrics
|
||||
annotations:
|
||||
tailscale.com/experimental-forward-cluster-traffic-via-ingress: "true"
|
||||
spec:
|
||||
ingressClassName: tailscale
|
||||
tls:
|
||||
- hosts:
|
||||
- auth-cl01tl
|
||||
secretName: auth-cl01tl
|
||||
rules:
|
||||
- host: auth-cl01tl
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: authentik-server
|
||||
port:
|
||||
number: 80
|
||||
@@ -0,0 +1,26 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/object-store.yaml
|
||||
apiVersion: barmancloud.cnpg.io/v1
|
||||
kind: ObjectStore
|
||||
metadata:
|
||||
name: "authentik-postgresql-17-external-backup"
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
retentionPolicy: 30d
|
||||
configuration:
|
||||
destinationPath: s3://postgres-backups-ce540ddf106d186bbddca68a/cl01tl/authentik/authentik-postgresql-17-cluster
|
||||
endpointURL: https://nyc3.digitaloceanspaces.com
|
||||
s3Credentials:
|
||||
accessKeyId:
|
||||
name: authentik-postgresql-17-cluster-backup-secret
|
||||
key: ACCESS_KEY_ID
|
||||
secretAccessKey:
|
||||
name: authentik-postgresql-17-cluster-backup-secret
|
||||
key: ACCESS_SECRET_KEY
|
||||
@@ -0,0 +1,29 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/object-store.yaml
|
||||
apiVersion: barmancloud.cnpg.io/v1
|
||||
kind: ObjectStore
|
||||
metadata:
|
||||
name: "authentik-postgresql-17-garage-local-backup"
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
retentionPolicy: 3d
|
||||
configuration:
|
||||
destinationPath: s3://postgres-backups/cl01tl/authentik/authentik-postgresql-17-cluster
|
||||
endpointURL: http://garage-main.garage:3900
|
||||
s3Credentials:
|
||||
accessKeyId:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
key: ACCESS_KEY_ID
|
||||
secretAccessKey:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
key: ACCESS_SECRET_KEY
|
||||
region:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
key: ACCESS_REGION
|
||||
@@ -0,0 +1,31 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/object-store.yaml
|
||||
apiVersion: barmancloud.cnpg.io/v1
|
||||
kind: ObjectStore
|
||||
metadata:
|
||||
name: "authentik-postgresql-17-recovery"
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
configuration:
|
||||
destinationPath: s3://postgres-backups/cl01tl/authentik/authentik-postgresql-17-cluster
|
||||
endpointURL: http://garage-main.garage:3900
|
||||
wal:
|
||||
compression: snappy
|
||||
maxParallel: 1
|
||||
data:
|
||||
compression: snappy
|
||||
jobs: 1
|
||||
s3Credentials:
|
||||
accessKeyId:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
key: ACCESS_KEY_ID
|
||||
secretAccessKey:
|
||||
name: authentik-postgresql-17-cluster-backup-secret-garage
|
||||
key: ACCESS_SECRET_KEY
|
||||
@@ -0,0 +1,272 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/prometheus-rule.yaml
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: PrometheusRule
|
||||
metadata:
|
||||
name: authentik-postgresql-17-alert-rules
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
groups:
|
||||
- name: cloudnative-pg/authentik-postgresql-17
|
||||
rules:
|
||||
- alert: CNPGClusterBackendsWaitingWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster a backend is waiting for longer than 5 minutes.
|
||||
description: |-
|
||||
Pod {{ $labels.pod }}
|
||||
has been waiting for longer than 5 minutes
|
||||
expr: |
|
||||
cnpg_backends_waiting_total > 300
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterDatabaseDeadlockConflictsWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster has over 10 deadlock conflicts.
|
||||
description: |-
|
||||
There are over 10 deadlock conflicts in
|
||||
{{ $labels.pod }}
|
||||
expr: |
|
||||
cnpg_pg_stat_database_deadlocks > 10
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterHACritical
|
||||
annotations:
|
||||
summary: CNPG Cluster has no standby replicas!
|
||||
description: |-
|
||||
CloudNativePG Cluster "{{`{{`}} $labels.job {{`}}`}}" has no ready standby replicas. Your cluster at a severe
|
||||
risk of data loss and downtime if the primary instance fails.
|
||||
|
||||
The primary instance is still online and able to serve queries, although connections to the `-ro` endpoint
|
||||
will fail. The `-r` endpoint os operating at reduced capacity and all traffic is being served by the main.
|
||||
|
||||
This can happen during a normal fail-over or automated minor version upgrades in a cluster with 2 or less
|
||||
instances. The replaced instance may need some time to catch-up with the cluster primary instance.
|
||||
|
||||
This alarm will be always trigger if your cluster is configured to run with only 1 instance. In this
|
||||
case you may want to silence it.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterHACritical.md
|
||||
expr: |
|
||||
max by (job) (cnpg_pg_replication_streaming_replicas{namespace="authentik"} - cnpg_pg_replication_is_wal_receiver_up{namespace="authentik"}) < 1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: critical
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterHAWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster less than 2 standby replicas.
|
||||
description: |-
|
||||
CloudNativePG Cluster "{{`{{`}} $labels.job {{`}}`}}" has only {{`{{`}} $value {{`}}`}} standby replicas, putting
|
||||
your cluster at risk if another instance fails. The cluster is still able to operate normally, although
|
||||
the `-ro` and `-r` endpoints operate at reduced capacity.
|
||||
|
||||
This can happen during a normal fail-over or automated minor version upgrades. The replaced instance may
|
||||
need some time to catch-up with the cluster primary instance.
|
||||
|
||||
This alarm will be constantly triggered if your cluster is configured to run with less than 3 instances.
|
||||
In this case you may want to silence it.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterHAWarning.md
|
||||
expr: |
|
||||
max by (job) (cnpg_pg_replication_streaming_replicas{namespace="authentik"} - cnpg_pg_replication_is_wal_receiver_up{namespace="authentik"}) < 2
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterHighConnectionsCritical
|
||||
annotations:
|
||||
summary: CNPG Instance maximum number of connections critical!
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" instance {{`{{`}} $labels.pod {{`}}`}} is using {{`{{`}} $value {{`}}`}}% of
|
||||
the maximum number of connections.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterHighConnectionsCritical.md
|
||||
expr: |
|
||||
sum by (pod) (cnpg_backends_total{namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) / max by (pod) (cnpg_pg_settings_setting{name="max_connections", namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) * 100 > 95
|
||||
for: 5m
|
||||
labels:
|
||||
severity: critical
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterHighConnectionsWarning
|
||||
annotations:
|
||||
summary: CNPG Instance is approaching the maximum number of connections.
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" instance {{`{{`}} $labels.pod {{`}}`}} is using {{`{{`}} $value {{`}}`}}% of
|
||||
the maximum number of connections.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterHighConnectionsWarning.md
|
||||
expr: |
|
||||
sum by (pod) (cnpg_backends_total{namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) / max by (pod) (cnpg_pg_settings_setting{name="max_connections", namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) * 100 > 80
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterHighReplicationLag
|
||||
annotations:
|
||||
summary: CNPG Cluster high replication lag
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" is experiencing a high replication lag of
|
||||
{{`{{`}} $value {{`}}`}}ms.
|
||||
|
||||
High replication lag indicates network issues, busy instances, slow queries or suboptimal configuration.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterHighReplicationLag.md
|
||||
expr: |
|
||||
max(cnpg_pg_replication_lag{namespace="authentik",pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) * 1000 > 1000
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterInstancesOnSameNode
|
||||
annotations:
|
||||
summary: CNPG Cluster instances are located on the same node.
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" has {{`{{`}} $value {{`}}`}}
|
||||
instances on the same node {{`{{`}} $labels.node {{`}}`}}.
|
||||
|
||||
A failure or scheduled downtime of a single node will lead to a potential service disruption and/or data loss.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterInstancesOnSameNode.md
|
||||
expr: |
|
||||
count by (node) (kube_pod_info{namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) > 1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterLongRunningTransactionWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster query is taking longer than 5 minutes.
|
||||
description: |-
|
||||
CloudNativePG Cluster Pod {{ $labels.pod }}
|
||||
is taking more than 5 minutes (300 seconds) for a query.
|
||||
expr: |-
|
||||
cnpg_backends_max_tx_duration_seconds > 300
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterLowDiskSpaceCritical
|
||||
annotations:
|
||||
summary: CNPG Instance is running out of disk space!
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" is running extremely low on disk space. Check attached PVCs!
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterLowDiskSpaceCritical.md
|
||||
expr: |
|
||||
max(max by(persistentvolumeclaim) (1 - kubelet_volume_stats_available_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"} / kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"})) > 0.9 OR
|
||||
max(max by(persistentvolumeclaim) (1 - kubelet_volume_stats_available_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-wal"} / kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-wal"})) > 0.9 OR
|
||||
max(sum by (namespace,persistentvolumeclaim) (kubelet_volume_stats_used_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-tbs.*"})
|
||||
/
|
||||
sum by (namespace,persistentvolumeclaim) (kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-tbs.*"})
|
||||
*
|
||||
on(namespace, persistentvolumeclaim) group_left(volume)
|
||||
kube_pod_spec_volumes_persistentvolumeclaims_info{pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}
|
||||
) > 0.9
|
||||
for: 5m
|
||||
labels:
|
||||
severity: critical
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterLowDiskSpaceWarning
|
||||
annotations:
|
||||
summary: CNPG Instance is running out of disk space.
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" is running low on disk space. Check attached PVCs.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterLowDiskSpaceWarning.md
|
||||
expr: |
|
||||
max(max by(persistentvolumeclaim) (1 - kubelet_volume_stats_available_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"} / kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"})) > 0.7 OR
|
||||
max(max by(persistentvolumeclaim) (1 - kubelet_volume_stats_available_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-wal"} / kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-wal"})) > 0.7 OR
|
||||
max(sum by (namespace,persistentvolumeclaim) (kubelet_volume_stats_used_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-tbs.*"})
|
||||
/
|
||||
sum by (namespace,persistentvolumeclaim) (kubelet_volume_stats_capacity_bytes{namespace="authentik", persistentvolumeclaim=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$-tbs.*"})
|
||||
*
|
||||
on(namespace, persistentvolumeclaim) group_left(volume)
|
||||
kube_pod_spec_volumes_persistentvolumeclaims_info{pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}
|
||||
) > 0.7
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterOffline
|
||||
annotations:
|
||||
summary: CNPG Cluster has no running instances!
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" has no ready instances.
|
||||
|
||||
Having an offline cluster means your applications will not be able to access the database, leading to
|
||||
potential service disruption and/or data loss.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterOffline.md
|
||||
expr: |
|
||||
(count(cnpg_collector_up{namespace="authentik",pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"}) OR on() vector(0)) == 0
|
||||
for: 5m
|
||||
labels:
|
||||
severity: critical
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterPGDatabaseXidAgeWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster has a number of transactions from the frozen XID to the current one.
|
||||
description: |-
|
||||
Over 300,000,000 transactions from frozen xid
|
||||
on pod {{ $labels.pod }}
|
||||
expr: |
|
||||
cnpg_pg_database_xid_age > 300000000
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterPGReplicationWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster standby is lagging behind the primary.
|
||||
description: |-
|
||||
Standby is lagging behind by over 300 seconds (5 minutes)
|
||||
expr: |
|
||||
cnpg_pg_replication_lag > 300
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterReplicaFailingReplicationWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster has a replica is failing to replicate.
|
||||
description: |-
|
||||
Replica {{ $labels.pod }}
|
||||
is failing to replicate
|
||||
expr: |
|
||||
cnpg_pg_replication_in_recovery > cnpg_pg_replication_is_wal_receiver_up
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
- alert: CNPGClusterZoneSpreadWarning
|
||||
annotations:
|
||||
summary: CNPG Cluster instances in the same zone.
|
||||
description: |-
|
||||
CloudNativePG Cluster "authentik/authentik-postgresql-17-cluster" has instances in the same availability zone.
|
||||
|
||||
A disaster in one availability zone will lead to a potential service disruption and/or data loss.
|
||||
runbook_url: https://github.com/cloudnative-pg/charts/blob/main/charts/cluster/docs/runbooks/CNPGClusterZoneSpreadWarning.md
|
||||
expr: |
|
||||
3 > count(count by (label_topology_kubernetes_io_zone) (kube_pod_info{namespace="authentik", pod=~"authentik-postgresql-17-cluster-([1-9][0-9]*)$"} * on(node,instance) group_left(label_topology_kubernetes_io_zone) kube_node_labels)) < 3
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
namespace: authentik
|
||||
cnpg_cluster: authentik-postgresql-17-cluster
|
||||
150
clusters/cl01tl/manifests/authentik/PrometheusRule-authentik.yml
Normal file
150
clusters/cl01tl/manifests/authentik/PrometheusRule-authentik.yml
Normal file
@@ -0,0 +1,150 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/prometheusrule.yaml
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: PrometheusRule
|
||||
metadata:
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
groups:
|
||||
- name: authentik Aggregate request counters
|
||||
rules:
|
||||
- record: job:django_http_requests_before_middlewares_total:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_before_middlewares_total[30s])) by (job)
|
||||
- record: job:django_http_requests_unknown_latency_total:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_unknown_latency_total[30s])) by (job)
|
||||
- record: job:django_http_ajax_requests_total:sum_rate30s
|
||||
expr: sum(rate(django_http_ajax_requests_total[30s])) by (job)
|
||||
- record: job:django_http_responses_before_middlewares_total:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_before_middlewares_total[30s])) by (job)
|
||||
- record: job:django_http_requests_unknown_latency_including_middlewares_total:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_unknown_latency_including_middlewares_total[30s])) by (job)
|
||||
- record: job:django_http_requests_body_total_bytes:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_body_total_bytes[30s])) by (job)
|
||||
- record: job:django_http_responses_streaming_total:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_streaming_total[30s])) by (job)
|
||||
- record: job:django_http_responses_body_total_bytes:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_body_total_bytes[30s])) by (job)
|
||||
- record: job:django_http_requests_total:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_total_by_method[30s])) by (job)
|
||||
- record: job:django_http_requests_total_by_method:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_total_by_method[30s])) by (job,method)
|
||||
- record: job:django_http_requests_total_by_transport:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_total_by_transport[30s])) by (job,transport)
|
||||
- record: job:django_http_requests_total_by_view:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_total_by_view_transport_method[30s])) by (job,view)
|
||||
- record: job:django_http_requests_total_by_view_transport_method:sum_rate30s
|
||||
expr: sum(rate(django_http_requests_total_by_view_transport_method[30s])) by (job,view,transport,method)
|
||||
- record: job:django_http_responses_total_by_templatename:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_total_by_templatename[30s])) by (job,templatename)
|
||||
- record: job:django_http_responses_total_by_status:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_total_by_status[30s])) by (job,status)
|
||||
- record: job:django_http_responses_total_by_status_name_method:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_total_by_status_name_method[30s])) by (job,status,name,method)
|
||||
- record: job:django_http_responses_total_by_charset:sum_rate30s
|
||||
expr: sum(rate(django_http_responses_total_by_charset[30s])) by (job,charset)
|
||||
- record: job:django_http_exceptions_total_by_type:sum_rate30s
|
||||
expr: sum(rate(django_http_exceptions_total_by_type[30s])) by (job,type)
|
||||
- record: job:django_http_exceptions_total_by_view:sum_rate30s
|
||||
expr: sum(rate(django_http_exceptions_total_by_view[30s])) by (job,view)
|
||||
- name: authentik Aggregate latency histograms
|
||||
rules:
|
||||
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "50"
|
||||
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "95"
|
||||
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "99"
|
||||
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "99.9"
|
||||
- record: job:django_http_requests_latency_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "50"
|
||||
- record: job:django_http_requests_latency_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "95"
|
||||
- record: job:django_http_requests_latency_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "99"
|
||||
- record: job:django_http_requests_latency_seconds:quantile_rate30s
|
||||
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
|
||||
labels:
|
||||
quantile: "99.9"
|
||||
- name: authentik Aggregate model operations
|
||||
rules:
|
||||
- record: job:django_model_inserts_total:sum_rate1m
|
||||
expr: sum(rate(django_model_inserts_total[1m])) by (job, model)
|
||||
- record: job:django_model_updates_total:sum_rate1m
|
||||
expr: sum(rate(django_model_updates_total[1m])) by (job, model)
|
||||
- record: job:django_model_deletes_total:sum_rate1m
|
||||
expr: sum(rate(django_model_deletes_total[1m])) by (job, model)
|
||||
- name: authentik Aggregate database operations
|
||||
rules:
|
||||
- record: job:django_db_new_connections_total:sum_rate30s
|
||||
expr: sum(rate(django_db_new_connections_total[30s])) by (alias, vendor)
|
||||
- record: job:django_db_new_connection_errors_total:sum_rate30s
|
||||
expr: sum(rate(django_db_new_connection_errors_total[30s])) by (alias, vendor)
|
||||
- record: job:django_db_execute_total:sum_rate30s
|
||||
expr: sum(rate(django_db_execute_total[30s])) by (alias, vendor)
|
||||
- record: job:django_db_execute_many_total:sum_rate30s
|
||||
expr: sum(rate(django_db_execute_many_total[30s])) by (alias, vendor)
|
||||
- record: job:django_db_errors_total:sum_rate30s
|
||||
expr: sum(rate(django_db_errors_total[30s])) by (alias, vendor, type)
|
||||
- name: authentik Aggregate migrations
|
||||
rules:
|
||||
- record: job:django_migrations_applied_total:max
|
||||
expr: max(django_migrations_applied_total) by (job, connection)
|
||||
- record: job:django_migrations_unapplied_total:max
|
||||
expr: max(django_migrations_unapplied_total) by (job, connection)
|
||||
- name: authentik Alerts
|
||||
rules:
|
||||
- alert: NoWorkersConnected
|
||||
labels:
|
||||
severity: critical
|
||||
expr: max (authentik_tasks_workers) < 1
|
||||
for: 10m
|
||||
annotations:
|
||||
summary: No workers connected
|
||||
message: authentik instance {{ $labels.instance }}'s worker are either not running or not connected.
|
||||
- alert: PendingMigrations
|
||||
labels:
|
||||
severity: critical
|
||||
expr: max without (pid) (django_migrations_unapplied_total) > 0
|
||||
for: 10m
|
||||
annotations:
|
||||
summary: Pending database migrations
|
||||
message: authentik instance {{ $labels.instance }} has pending database migrations
|
||||
- alert: FailedSystemTasks
|
||||
labels:
|
||||
severity: critical
|
||||
expr: sum(increase(authentik_tasks_errors_total[2h])) by (actor_name) > 0
|
||||
for: 2h
|
||||
annotations:
|
||||
summary: Failed system tasks
|
||||
message: System task {{ $labels.actor_name }} has failed on authentik instance {{ $labels.instance }}
|
||||
- alert: DisconnectedOutposts
|
||||
labels:
|
||||
severity: critical
|
||||
expr: sum by (outpost) (max without (pid) (authentik_outposts_connected{uid!~"specific.*"})) < 1
|
||||
for: 30m
|
||||
annotations:
|
||||
summary: Disconnected outpost
|
||||
message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
# Source: authentik/templates/redis-replication.yaml
|
||||
apiVersion: redis.redis.opstreelabs.in/v1beta2
|
||||
kind: RedisReplication
|
||||
metadata:
|
||||
name: redis-replication-authentik
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: redis-replication-authentik
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
spec:
|
||||
clusterSize: 3
|
||||
podSecurityContext:
|
||||
runAsUser: 1000
|
||||
fsGroup: 1000
|
||||
kubernetesConfig:
|
||||
image: quay.io/opstree/redis:v8.0.3
|
||||
imagePullPolicy: IfNotPresent
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
storage:
|
||||
volumeClaimTemplate:
|
||||
spec:
|
||||
storageClassName: ceph-block
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 1Gi
|
||||
redisExporter:
|
||||
enabled: true
|
||||
image: quay.io/opstree/redis-exporter:v1.48.0
|
||||
86
clusters/cl01tl/manifests/authentik/Role-authentik.yml
Normal file
86
clusters/cl01tl/manifests/authentik/Role-authentik.yml
Normal file
@@ -0,0 +1,86 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/charts/serviceAccount/templates/role.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "serviceAccount-2.1.0"
|
||||
app.kubernetes.io/name: "serviceAccount"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2.1.0"
|
||||
rules:
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- secrets
|
||||
- services
|
||||
- configmaps
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- extensions
|
||||
- apps
|
||||
resources:
|
||||
- deployments
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- extensions
|
||||
- networking.k8s.io
|
||||
resources:
|
||||
- ingresses
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- traefik.containo.us
|
||||
- traefik.io
|
||||
resources:
|
||||
- middlewares
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- gateway.networking.k8s.io
|
||||
resources:
|
||||
- httproutes
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- monitoring.coreos.com
|
||||
resources:
|
||||
- servicemonitors
|
||||
verbs:
|
||||
- get
|
||||
- create
|
||||
- delete
|
||||
- list
|
||||
- patch
|
||||
- apiGroups:
|
||||
- apiextensions.k8s.io
|
||||
resources:
|
||||
- customresourcedefinitions
|
||||
verbs:
|
||||
- list
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/charts/serviceAccount/templates/rolebinding.yaml
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "serviceAccount-2.1.0"
|
||||
app.kubernetes.io/name: "serviceAccount"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2.1.0"
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: authentik
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
@@ -0,0 +1,26 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/scheduled-backup.yaml
|
||||
apiVersion: postgresql.cnpg.io/v1
|
||||
kind: ScheduledBackup
|
||||
metadata:
|
||||
name: "authentik-postgresql-17-daily-backup-scheduled-backup"
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
immediate: false
|
||||
suspend: false
|
||||
schedule: "0 0 0 * * *"
|
||||
backupOwnerReference: self
|
||||
cluster:
|
||||
name: authentik-postgresql-17-cluster
|
||||
method: plugin
|
||||
pluginConfiguration:
|
||||
name: barman-cloud.cloudnative-pg.io
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-external-backup"
|
||||
@@ -0,0 +1,26 @@
|
||||
---
|
||||
# Source: authentik/charts/postgres-17-cluster/templates/scheduled-backup.yaml
|
||||
apiVersion: postgresql.cnpg.io/v1
|
||||
kind: ScheduledBackup
|
||||
metadata:
|
||||
name: "authentik-postgresql-17-live-backup-scheduled-backup"
|
||||
namespace: authentik
|
||||
labels:
|
||||
helm.sh/chart: postgres-17-cluster-6.16.1
|
||||
app.kubernetes.io/name: authentik-postgresql-17
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
app.kubernetes.io/version: "6.16.1"
|
||||
app.kubernetes.io/managed-by: Helm
|
||||
spec:
|
||||
immediate: true
|
||||
suspend: false
|
||||
schedule: "0 0 0 * * *"
|
||||
backupOwnerReference: self
|
||||
cluster:
|
||||
name: authentik-postgresql-17-cluster
|
||||
method: plugin
|
||||
pluginConfiguration:
|
||||
name: barman-cloud.cloudnative-pg.io
|
||||
parameters:
|
||||
barmanObjectName: "authentik-postgresql-17-garage-local-backup"
|
||||
33
clusters/cl01tl/manifests/authentik/Secret-authentik.yml
Normal file
33
clusters/cl01tl/manifests/authentik/Secret-authentik.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/secret.yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
data:
|
||||
AUTHENTIK_EMAIL__PORT: "NTg3"
|
||||
AUTHENTIK_EMAIL__TIMEOUT: "MzA="
|
||||
AUTHENTIK_EMAIL__USE_SSL: "ZmFsc2U="
|
||||
AUTHENTIK_EMAIL__USE_TLS: "ZmFsc2U="
|
||||
AUTHENTIK_ENABLED: "dHJ1ZQ=="
|
||||
AUTHENTIK_ERROR_REPORTING__ENABLED: "ZmFsc2U="
|
||||
AUTHENTIK_ERROR_REPORTING__ENVIRONMENT: "azhz"
|
||||
AUTHENTIK_ERROR_REPORTING__SEND_PII: "ZmFsc2U="
|
||||
AUTHENTIK_EVENTS__CONTEXT_PROCESSORS__ASN: "L2dlb2lwL0dlb0xpdGUyLUFTTi5tbWRi"
|
||||
AUTHENTIK_EVENTS__CONTEXT_PROCESSORS__GEOIP: "L2dlb2lwL0dlb0xpdGUyLUNpdHkubW1kYg=="
|
||||
AUTHENTIK_LOG_LEVEL: "aW5mbw=="
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE: "Z2hjci5pby9nb2F1dGhlbnRpay8lKHR5cGUpczolKHZlcnNpb24pcw=="
|
||||
AUTHENTIK_POSTGRESQL__HOST: "YXV0aGVudGlrLXBvc3RncmVzcWw="
|
||||
AUTHENTIK_POSTGRESQL__NAME: "YXV0aGVudGlr"
|
||||
AUTHENTIK_POSTGRESQL__PORT: "NTQzMg=="
|
||||
AUTHENTIK_POSTGRESQL__USER: "YXV0aGVudGlr"
|
||||
AUTHENTIK_REDIS__HOST: "cmVkaXMtcmVwbGljYXRpb24tYXV0aGVudGlrLW1hc3Rlcg=="
|
||||
AUTHENTIK_WEB__PATH: "Lw=="
|
||||
@@ -0,0 +1,26 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/server/metrics.yaml
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: authentik-server-metrics
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server-metrics"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: metrics
|
||||
protocol: TCP
|
||||
port: 9300
|
||||
targetPort: metrics
|
||||
selector:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
@@ -0,0 +1,30 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/server/service.yaml
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: authentik-server
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: http
|
||||
protocol: TCP
|
||||
port: 80
|
||||
targetPort: 9000
|
||||
- name: https
|
||||
protocol: TCP
|
||||
port: 443
|
||||
targetPort: 9443
|
||||
selector:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server"
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/charts/serviceAccount/templates/serviceaccount.yaml
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: authentik
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "serviceAccount-2.1.0"
|
||||
app.kubernetes.io/name: "serviceAccount"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2.1.0"
|
||||
@@ -0,0 +1,29 @@
|
||||
---
|
||||
# Source: authentik/charts/authentik/templates/server/servicemonitor.yaml
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: ServiceMonitor
|
||||
metadata:
|
||||
name: authentik-server
|
||||
namespace: "authentik"
|
||||
labels:
|
||||
helm.sh/chart: "authentik-2025.10.2"
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server-metrics"
|
||||
app.kubernetes.io/managed-by: "Helm"
|
||||
app.kubernetes.io/part-of: "authentik"
|
||||
app.kubernetes.io/version: "2025.10.2"
|
||||
spec:
|
||||
endpoints:
|
||||
- port: metrics
|
||||
interval: 30s
|
||||
scrapeTimeout: 3s
|
||||
path: /metrics
|
||||
namespaceSelector:
|
||||
matchNames:
|
||||
- authentik
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentik"
|
||||
app.kubernetes.io/instance: "authentik"
|
||||
app.kubernetes.io/component: "server-metrics"
|
||||
@@ -0,0 +1,21 @@
|
||||
---
|
||||
# Source: authentik/templates/service-monitor.yaml
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: ServiceMonitor
|
||||
metadata:
|
||||
name: redis-replication-authentik
|
||||
namespace: authentik
|
||||
labels:
|
||||
app.kubernetes.io/name: redis-replication-authentik
|
||||
app.kubernetes.io/instance: authentik
|
||||
app.kubernetes.io/part-of: authentik
|
||||
redis-operator: "true"
|
||||
env: production
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
redis_setup_type: replication
|
||||
endpoints:
|
||||
- port: redis-exporter
|
||||
interval: 30s
|
||||
scrapeTimeout: 10s
|
||||
Reference in New Issue
Block a user