Files
infrastructure/clusters/cl01tl/monitoring/s3-exporter/values.yaml
Alex Lebens 76998e656e
All checks were successful
renovate / renovate (push) Successful in 1m0s
lint-test-helm / helm-lint (push) Successful in 11s
ceph monitor
2025-07-09 21:45:50 -05:00

96 lines
2.6 KiB
YAML

s3-exporter:
controllers:
digital-ocean:
type: deployment
replicas: 1
strategy: Recreate
revisionHistoryLimit: 3
containers:
main:
image:
repository: molu8bits/s3bucket_exporter
tag: 1.0.2
pullPolicy: IfNotPresent
env:
- name: S3_NAME
value: digital-ocean
- name: S3_ENDPOINT
value: https://nyc3.digitaloceanspaces.com
- name: S3_ACCESS_KEY
valueFrom:
secretKeyRef:
name: s3-do-home-infra-secret
key: AWS_ACCESS_KEY_ID
- name: S3_SECRET_KEY
valueFrom:
secretKeyRef:
name: s3-do-home-infra-secret
key: AWS_SECRET_ACCESS_KEY
- name: S3_REGION
valueFrom:
secretKeyRef:
name: s3-do-home-infra-secret
key: AWS_REGION
- name: LOG_LEVEL
value: debug
- name: S3_FORCE_PATH_STYLE
value: false
resources:
requests:
cpu: 10m
memory: 64Mi
ceph-directus:
type: deployment
replicas: 1
strategy: Recreate
revisionHistoryLimit: 3
containers:
main:
image:
repository: molu8bits/s3bucket_exporter
tag: 1.0.2
pullPolicy: IfNotPresent
env:
- name: S3_NAME
value: ceph-directus
- name: S3_ENDPOINT
valueFrom:
secretKeyRef:
name: s3-ceph-directus-secret
key: BUCKET_HOST
- name: S3_ACCESS_KEY
valueFrom:
secretKeyRef:
name: s3-ceph-directus-secret
key: AWS_ACCESS_KEY_ID
- name: S3_SECRET_KEY
valueFrom:
secretKeyRef:
name: s3-ceph-directus-secret
key: AWS_SECRET_ACCESS_KEY
- name: S3_REGION
value: us-east-1
- name: LOG_LEVEL
value: debug
- name: S3_FORCE_PATH_STYLE
value: true
resources:
requests:
cpu: 10m
memory: 64Mi
service:
digital-ocean:
controller: digital-ocean
ports:
metrics:
port: 9655
targetPort: 9655
protocol: TCP
ceph-directus:
controller: ceph-directus
ports:
metrics:
port: 9655
targetPort: 9655
protocol: TCP