Hey guys,
I’m trying to set up Keycloak on my rancher kubernetes test environment, but the Database JDBC Connection keeps failing.
Here’s my configuration:
apiVersion: v1
kind: Pod
metadata:
annotations:
checksum/configmap-env-vars: 9171c7da26dc7257a2aae80136f53b9f7a841fd8e0d9fdb86661c1d2b2c809ee
checksum/secrets: c6c278f69e59f814abf3368d9fbf7bbefaf14766e8a997fda6badd5cd197aa37
creationTimestamp: "2022-06-28T13:04:50Z"
generateName: keycloak-bitnami-
labels:
app.kubernetes.io/component: keycloak
app.kubernetes.io/instance: keycloak-bitnami
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: keycloak
controller-revision-hash: keycloak-bitnami-54c98ff9c8
helm.sh/chart: keycloak-9.3.2
statefulset.kubernetes.io/pod-name: keycloak-bitnami-0
managedFields:
- apiVersion: v1
fieldsType: FieldsV1
fieldsV1:
f:metadata:
f:annotations:
.: {}
f:checksum/configmap-env-vars: {}
f:checksum/secrets: {}
f:generateName: {}
f:labels:
.: {}
f:app.kubernetes.io/component: {}
f:app.kubernetes.io/instance: {}
f:app.kubernetes.io/managed-by: {}
f:app.kubernetes.io/name: {}
f:controller-revision-hash: {}
f:helm.sh/chart: {}
f:statefulset.kubernetes.io/pod-name: {}
f:ownerReferences:
.: {}
k:{"uid":"33c23b74-cab2-4290-954b-8271c6333b32"}: {}
f:spec:
f:affinity:
.: {}
f:podAntiAffinity:
.: {}
f:preferredDuringSchedulingIgnoredDuringExecution: {}
f:containers:
k:{"name":"keycloak"}:
.: {}
f:env:
.: {}
k:{"name":"BITNAMI_DEBUG"}:
.: {}
f:name: {}
f:value: {}
k:{"name":"KEYCLOAK_ADMIN_PASSWORD"}:
.: {}
f:name: {}
f:valueFrom:
.: {}
f:secretKeyRef: {}
k:{"name":"KEYCLOAK_DATABASE_PASSWORD"}:
.: {}
f:name: {}
f:valueFrom:
.: {}
f:secretKeyRef: {}
k:{"name":"KEYCLOAK_MANAGEMENT_PASSWORD"}:
.: {}
f:name: {}
f:valueFrom:
.: {}
f:secretKeyRef: {}
k:{"name":"KUBERNETES_NAMESPACE"}:
.: {}
f:name: {}
f:valueFrom:
.: {}
f:fieldRef: {}
f:envFrom: {}
f:image: {}
f:imagePullPolicy: {}
f:livenessProbe:
.: {}
f:failureThreshold: {}
f:httpGet:
.: {}
f:path: {}
f:port: {}
f:scheme: {}
f:initialDelaySeconds: {}
f:periodSeconds: {}
f:successThreshold: {}
f:timeoutSeconds: {}
f:name: {}
f:ports:
.: {}
k:{"containerPort":8080,"protocol":"TCP"}:
.: {}
f:containerPort: {}
f:name: {}
f:protocol: {}
k:{"containerPort":8443,"protocol":"TCP"}:
.: {}
f:containerPort: {}
f:name: {}
f:protocol: {}
k:{"containerPort":9990,"protocol":"TCP"}:
.: {}
f:containerPort: {}
f:name: {}
f:protocol: {}
f:readinessProbe:
.: {}
f:failureThreshold: {}
f:httpGet:
.: {}
f:path: {}
f:port: {}
f:scheme: {}
f:initialDelaySeconds: {}
f:periodSeconds: {}
f:successThreshold: {}
f:timeoutSeconds: {}
f:resources: {}
f:securityContext:
.: {}
f:runAsNonRoot: {}
f:runAsUser: {}
f:terminationMessagePath: {}
f:terminationMessagePolicy: {}
f:dnsPolicy: {}
f:enableServiceLinks: {}
f:hostname: {}
f:restartPolicy: {}
f:schedulerName: {}
f:securityContext:
.: {}
f:fsGroup: {}
f:serviceAccount: {}
f:serviceAccountName: {}
f:subdomain: {}
f:terminationGracePeriodSeconds: {}
manager: kube-controller-manager
operation: Update
time: "2022-06-28T13:04:50Z"
- apiVersion: v1
fieldsType: FieldsV1
fieldsV1:
f:status:
f:conditions:
k:{"type":"ContainersReady"}:
.: {}
f:lastProbeTime: {}
f:lastTransitionTime: {}
f:message: {}
f:reason: {}
f:status: {}
f:type: {}
k:{"type":"Initialized"}:
.: {}
f:lastProbeTime: {}
f:lastTransitionTime: {}
f:status: {}
f:type: {}
k:{"type":"Ready"}:
.: {}
f:lastProbeTime: {}
f:lastTransitionTime: {}
f:message: {}
f:reason: {}
f:status: {}
f:type: {}
f:containerStatuses: {}
f:hostIP: {}
f:phase: {}
f:podIP: {}
f:podIPs:
.: {}
k:{"ip":"10.42.4.86"}:
.: {}
f:ip: {}
f:startTime: {}
manager: kubelet
operation: Update
subresource: status
time: "2022-06-28T13:04:52Z"
name: keycloak-bitnami-0
namespace: keycloak
ownerReferences:
- apiVersion: apps/v1
blockOwnerDeletion: true
controller: true
kind: StatefulSet
name: keycloak-bitnami
uid: 33c23b74-cab2-4290-954b-8271c6333b32
resourceVersion: "1986301"
uid: a328e91a-2c93-4425-8841-c211944fc356
spec:
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: keycloak-bitnami
app.kubernetes.io/name: keycloak
namespaces:
- keycloak
topologyKey: kubernetes.io/hostname
weight: 1
containers:
- env:
- name: KUBERNETES_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: BITNAMI_DEBUG
value: "false"
- name: KEYCLOAK_ADMIN_PASSWORD
valueFrom:
secretKeyRef:
key: admin-password
name: keycloak-bitnami
- name: KEYCLOAK_MANAGEMENT_PASSWORD
valueFrom:
secretKeyRef:
key: management-password
name: keycloak-bitnami
- name: KEYCLOAK_DATABASE_PASSWORD
valueFrom:
secretKeyRef:
key: password
name: keycloak-bitnami-postgresql
envFrom:
- configMapRef:
name: keycloak-bitnami-env-vars
image: docker.io/bitnami/keycloak:18.0.2-debian-11-r0
imagePullPolicy: IfNotPresent
livenessProbe:
failureThreshold: 3
httpGet:
path: /
port: http
scheme: HTTP
initialDelaySeconds: 300
periodSeconds: 1
successThreshold: 1
timeoutSeconds: 5
name: keycloak
ports:
- containerPort: 8080
name: http
protocol: TCP
- containerPort: 8443
name: https
protocol: TCP
- containerPort: 9990
name: http-management
protocol: TCP
readinessProbe:
failureThreshold: 3
httpGet:
path: /realms/master
port: http
scheme: HTTP
initialDelaySeconds: 30
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
resources: {}
securityContext:
runAsNonRoot: true
runAsUser: 1001
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
volumeMounts:
- mountPath: /var/run/secrets/kubernetes.io/serviceaccount
name: kube-api-access-89rqp
readOnly: true
dnsPolicy: ClusterFirst
enableServiceLinks: true
hostname: keycloak-bitnami-0
nodeName: worker0.pascals-lab.cool
preemptionPolicy: PreemptLowerPriority
priority: 0
restartPolicy: Always
schedulerName: default-scheduler
securityContext:
fsGroup: 1001
serviceAccount: keycloak-bitnami
serviceAccountName: keycloak-bitnami
subdomain: keycloak-bitnami-headless
terminationGracePeriodSeconds: 30
tolerations:
- effect: NoExecute
key: node.kubernetes.io/not-ready
operator: Exists
tolerationSeconds: 300
- effect: NoExecute
key: node.kubernetes.io/unreachable
operator: Exists
tolerationSeconds: 300
volumes:
- name: kube-api-access-89rqp
projected:
defaultMode: 420
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
items:
- key: ca.crt
path: ca.crt
name: kube-root-ca.crt
- downwardAPI:
items:
- fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
path: namespace
status:
conditions:
- lastProbeTime: null
lastTransitionTime: "2022-06-28T13:04:50Z"
status: "True"
type: Initialized
- lastProbeTime: null
lastTransitionTime: "2022-06-28T13:04:50Z"
message: 'containers with unready status: [keycloak]'
reason: ContainersNotReady
status: "False"
type: Ready
- lastProbeTime: null
lastTransitionTime: "2022-06-28T13:04:50Z"
message: 'containers with unready status: [keycloak]'
reason: ContainersNotReady
status: "False"
type: ContainersReady
- lastProbeTime: null
lastTransitionTime: "2022-06-28T13:04:50Z"
status: "True"
type: PodScheduled
containerStatuses:
- containerID: docker://1f6c0143020f45091651cb8f80e3596e96ca2c70f89c73630f053c0be9dca1c4
image: bitnami/keycloak:18.0.2-debian-11-r0
imageID: docker-pullable://bitnami/keycloak@sha256:2d4942583df301f89f64c0d0aa8da360c056ea9215eda17de0ef3d37fbc34217
lastState: {}
name: keycloak
ready: false
restartCount: 0
started: true
state:
running:
startedAt: "2022-06-28T13:04:51Z"
hostIP: 192.168.178.104
phase: Running
podIP: 10.42.4.86
podIPs:
- ip: 10.42.4.86
qosClass: BestEffort
startTime: "2022-06-28T13:04:50Z"
I hope my error can be identified easily. Thanks in advance,
Pascal