--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:55:34Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T23:55:34Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T23:55:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T23:55:34Z" name: 85f62347-c76b-4003-893a-38aa19749d62 namespace: default resourceVersion: "11539" uid: fd682e3d-ae4d-45f8-81f4-65773f010308 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T23:56:28Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Auth name: auth resourceVersion: "13745" uid: eef15db0-8ee9-4347-84a3-e6dcc570aa82 kind: Event lastTimestamp: "2026-04-16T23:56:35Z" message: 'failure deploying resource {map[apiVersion:rbac.authorization.k8s.io/v1 kind:Role metadata:map[annotations:map[platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:auth platform.opendatahub.io/instance.uid:eef15db0-8ee9-4347-84a3-e6dcc570aa82 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1] labels:map[platform.opendatahub.io/part-of:auth] name:data-science-admingroup-role namespace:opendatahub ownerReferences:[map[apiVersion:services.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Auth name:auth uid:eef15db0-8ee9-4347-84a3-e6dcc570aa82]]] rules:[map[apiGroups:[services.platform.opendatahub.io] resources:[auths] verbs:[get list watch patch update]] map[apiGroups:[services.opendatahub.io] resources:[auths/status] verbs:[get]] map[apiGroups:[infrastructure.opendatahub.io] resources:[hardwareprofiles] verbs:[create get list watch patch update delete]] map[apiGroups:[route.openshift.io] resources:[routes] verbs:[get list watch]] map[apiGroups:[batch] resources:[cronjobs] verbs:[get update watch]] map[apiGroups:[image.openshift.io] resources:[imagestreams] verbs:[create get list patch update delete watch]] map[apiGroups:[build.openshift.io] resources:[builds buildconfigs] verbs:[list get watch]] map[apiGroups:[apps] resources:[deployments] verbs:[patch update]] map[apiGroups:[opendatahub.io] resources:[odhdashboardconfigs] verbs:[get list watch create update patch]] map[apiGroups:[dashboard.opendatahub.io] resources:[odhapplications odhdocuments] verbs:[get list watch]] map[apiGroups:[console.openshift.io] resources:[odhquickstarts] verbs:[get list watch]] map[apiGroups:[template.openshift.io] resources:[templates] verbs:[get list watch create patch update delete]] map[apiGroups:[serving.kserve.io] resources:[servingruntimes] verbs:[create]] map[apiGroups:[nim.opendatahub.io] resources:[accounts] verbs:[watch update get list create patch delete]] map[apiGroups:[] resourceNames:[tier-to-group-mapping] resources:[configmaps] verbs:[get list watch patch update]]]]}: apply failed rbac.authorization.k8s.io/v1, Kind=Role: unable to patch rbac.authorization.k8s.io/v1, Kind=Role opendatahub/data-science-admingroup-role: roles.rbac.authorization.k8s.io "data-science-admingroup-role" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion services.platform.opendatahub.io/v1alpha1 Kind Auth: no matches for kind "Auth" in version "services.platform.opendatahub.io/v1alpha1"' metadata: creationTimestamp: "2026-04-16T23:56:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:56:35Z" name: auth.18a6fba4e95d42ce namespace: default resourceVersion: "14140" uid: e0ea617e-8740-4378-9b47-64061e4d94d8 reason: ProvisioningError reportingComponent: auth reportingInstance: "" source: component: auth type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:47Z" involvedObject: kind: CertificateSigningRequest name: csr-hdcb2 kind: Event lastTimestamp: "2026-04-16T23:50:47Z" message: CSR "csr-hdcb2" has been approved metadata: creationTimestamp: "2026-04-16T23:50:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:47Z" name: csr-hdcb2.18a6fb55877aa34a namespace: default resourceVersion: "6358" uid: ee71e519-6c5d-41c1-b722-0e6dc7836f1f reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:44Z" involvedObject: kind: CertificateSigningRequest name: csr-kf6np kind: Event lastTimestamp: "2026-04-16T23:50:44Z" message: CSR "csr-kf6np" has been approved metadata: creationTimestamp: "2026-04-16T23:50:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:44Z" name: csr-kf6np.18a6fb54cb47c794 namespace: default resourceVersion: "6291" uid: 4b885c53-d777-431e-b61c-8bd0ab66b08e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:50Z" involvedObject: kind: CertificateSigningRequest name: csr-s8f6d kind: Event lastTimestamp: "2026-04-16T23:50:50Z" message: CSR "csr-s8f6d" has been approved metadata: creationTimestamp: "2026-04-16T23:50:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:50Z" name: csr-s8f6d.18a6fb5645592595 namespace: default resourceVersion: "6515" uid: 085d28ed-8c77-4cae-8347-a8ad8513c7f9 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:59Z" involvedObject: kind: CertificateSigningRequest name: csr-v2gv8 kind: Event lastTimestamp: "2026-04-16T23:50:59Z" message: CSR "csr-v2gv8" has been approved metadata: creationTimestamp: "2026-04-16T23:50:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:59Z" name: csr-v2gv8.18a6fb5832887327 namespace: default resourceVersion: "6689" uid: 8fd9e499-bb03-41f2-b611-03759ae52a8e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:42Z" involvedObject: kind: CertificateSigningRequest name: csr-wdgw6 kind: Event lastTimestamp: "2026-04-16T23:50:42Z" message: CSR "csr-wdgw6" has been approved metadata: creationTimestamp: "2026-04-16T23:50:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:42Z" name: csr-wdgw6.18a6fb54400335fc namespace: default resourceVersion: "6242" uid: 75590736-cc70-4aca-aa76-715b3cdf0502 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:52Z" involvedObject: kind: CertificateSigningRequest name: csr-wwwl4 kind: Event lastTimestamp: "2026-04-16T23:50:52Z" message: CSR "csr-wwwl4" has been approved metadata: creationTimestamp: "2026-04-16T23:50:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:50:52Z" name: csr-wwwl4.18a6fb56a85029c9 namespace: default resourceVersion: "6577" uid: d8d7b9aa-b1bf-40cb-9dd5-afc58d7e70f7 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T23:56:28Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "13658" uid: 341da59a-f9fc-46d6-92ef-0cdcd2702650 kind: Event lastTimestamp: "2026-04-16T23:56:35Z" message: 'failed to create auth proxy secret: secrets "kube-auth-proxy-creds" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion services.platform.opendatahub.io/v1alpha1 Kind GatewayConfig: no matches for kind "GatewayConfig" in version "services.platform.opendatahub.io/v1alpha1"' metadata: creationTimestamp: "2026-04-16T23:56:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:56:35Z" name: default-gateway.18a6fba4df74bd15 namespace: default resourceVersion: "14145" uid: 2db10e44-7778-4c85-861c-d627d2d4f4f8 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 4 eventTime: null firstTimestamp: "2026-04-16T23:56:40Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14432" uid: 341da59a-f9fc-46d6-92ef-0cdcd2702650 kind: Event lastTimestamp: "2026-04-16T23:56:51Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T23:56:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:56:51Z" name: default-gateway.18a6fba7c0db092c namespace: default resourceVersion: "14643" uid: 3ea5d2b0-eb15-409a-b0d0-3fabfb59bb93 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T23:56:38Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14288" uid: 742e0aa2-3352-4da0-8812-154fd3c114cf kind: Event lastTimestamp: "2026-04-16T23:57:03Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:742e0aa2-3352-4da0-8812-154fd3c114cf platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:742e0aa2-3352-4da0-8812-154fd3c114cf]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T23:56:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:57:03Z" name: default-kserve.18a6fba73cc9b777 namespace: default resourceVersion: "15237" uid: 197f7035-05d3-4fef-9fb5-b6ed086b0745 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-16T23:56:28Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13622" uid: 167f1bdd-b805-4b74-9ba1-74f8f81ef063 kind: Event lastTimestamp: "2026-04-17T00:13:36Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T23:56:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T00:13:36Z" name: default-monitoring.18a6fba4d65f3352 namespace: default resourceVersion: "35402" uid: e6035a8e-1176-4f0d-b8ce-045070a8023f reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb50f8967623 namespace: default resourceVersion: "5769" uid: d5100063-a4c0-4559-ba3a-7b499b40d49a reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: 'Node ip-10-0-128-98.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb50fa635fff namespace: default resourceVersion: "5869" uid: 8132f8fb-d14c-4093-8d4d-a2cb9aff8493 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: 'Node ip-10-0-128-98.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb50fa63c058 namespace: default resourceVersion: "5872" uid: b60c804f-eb75-4070-9078-be3704772df4 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: 'Node ip-10-0-128-98.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb50fa63fad7 namespace: default resourceVersion: "5878" uid: 3360db70-1736-407d-ba9c-f488a4637d02 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb50fc96249c namespace: default resourceVersion: "5773" uid: 672d2d79-d431-4e90-a21e-e2a0b552eacc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:28Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-98.ec2.internal resourceVersion: "5776" uid: 43aca587-5a6c-41bd-89f2-2322f551841a kind: Event lastTimestamp: "2026-04-16T23:50:28Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:50:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:50:28Z" name: ip-10-0-128-98.ec2.internal.18a6fb51121eb100 namespace: default resourceVersion: "5884" uid: a20f11f2-306a-469c-8152-bbc8d2f5fbeb reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-98.ec2.internal uid: 43aca587-5a6c-41bd-89f2-2322f551841a kind: Event lastTimestamp: "2026-04-16T23:50:33Z" message: 'Node ip-10-0-128-98.ec2.internal event: Registered Node ip-10-0-128-98.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:50:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:50:33Z" name: ip-10-0-128-98.ec2.internal.18a6fb5229fab4a9 namespace: default resourceVersion: "5985" uid: c1d2692a-c4c0-4abd-9f1e-46159057faa7 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:51:00Z" involvedObject: kind: Node name: ip-10-0-128-98.ec2.internal uid: ip-10-0-128-98.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:51:00Z" message: 'Node ip-10-0-128-98.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:51:00Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:51:00Z" name: ip-10-0-128-98.ec2.internal.18a6fb588277f172 namespace: default resourceVersion: "6725" uid: 0c5432a5-0a58-4565-abe4-e44dd93a745e reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-128-98.ec2.internal source: component: kubelet host: ip-10-0-128-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:52:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-98.ec2.internal uid: 43aca587-5a6c-41bd-89f2-2322f551841a kind: Event lastTimestamp: "2026-04-16T23:52:44Z" message: 'Node ip-10-0-128-98.ec2.internal event: Registered Node ip-10-0-128-98.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:52:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:52:45Z" name: ip-10-0-128-98.ec2.internal.18a6fb70cf69b046 namespace: default resourceVersion: "8015" uid: 16bd5816-ed30-4cbc-82d1-cf30b3ef5cd7 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:15Z" involvedObject: kind: Node name: ip-10-0-133-231.ec2.internal uid: ip-10-0-133-231.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:16Z" message: 'Node ip-10-0-133-231.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:50:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:16Z" name: ip-10-0-133-231.ec2.internal.18a6fb4e19ca9445 namespace: default resourceVersion: "5346" uid: b83d154c-a36e-4324-a90f-965fbe2977fd reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-231.ec2.internal source: component: kubelet host: ip-10-0-133-231.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:15Z" involvedObject: kind: Node name: ip-10-0-133-231.ec2.internal uid: ip-10-0-133-231.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:16Z" message: 'Node ip-10-0-133-231.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:50:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:16Z" name: ip-10-0-133-231.ec2.internal.18a6fb4e19cb0b85 namespace: default resourceVersion: "5347" uid: 8a0831ad-7c7b-481f-98e2-0296e94d7fb6 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-231.ec2.internal source: component: kubelet host: ip-10-0-133-231.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:15Z" involvedObject: kind: Node name: ip-10-0-133-231.ec2.internal uid: ip-10-0-133-231.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:16Z" message: 'Node ip-10-0-133-231.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:50:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:16Z" name: ip-10-0-133-231.ec2.internal.18a6fb4e19cb3565 namespace: default resourceVersion: "5348" uid: 6627fd65-167c-4c01-8228-ce4bca7ef9c4 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-231.ec2.internal source: component: kubelet host: ip-10-0-133-231.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:15Z" involvedObject: kind: Node name: ip-10-0-133-231.ec2.internal uid: ip-10-0-133-231.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:15Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:50:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:15Z" name: ip-10-0-133-231.ec2.internal.18a6fb4e1c19c999 namespace: default resourceVersion: "5317" uid: 01242810-0664-4e92-b4c0-7058a22002b4 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-231.ec2.internal source: component: kubelet host: ip-10-0-133-231.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:16Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-231.ec2.internal resourceVersion: "5319" uid: f5573e47-2eee-40ae-ae61-f4bc4fd38a72 kind: Event lastTimestamp: "2026-04-16T23:50:16Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:50:16Z" name: ip-10-0-133-231.ec2.internal.18a6fb4e32866fa5 namespace: default resourceVersion: "5379" uid: 1706e29c-6774-49a3-99e8-d07db249ce4c reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:18Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-231.ec2.internal uid: f5573e47-2eee-40ae-ae61-f4bc4fd38a72 kind: Event lastTimestamp: "2026-04-16T23:50:18Z" message: 'Node ip-10-0-133-231.ec2.internal event: Registered Node ip-10-0-133-231.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:50:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:50:18Z" name: ip-10-0-133-231.ec2.internal.18a6fb4eabc83b3f namespace: default resourceVersion: "5478" uid: 7a1ab44f-ac0a-490b-b9f9-55af2e33461b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:49Z" involvedObject: kind: Node name: ip-10-0-133-231.ec2.internal uid: ip-10-0-133-231.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:49Z" message: 'Node ip-10-0-133-231.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:50:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:49Z" name: ip-10-0-133-231.ec2.internal.18a6fb55de6059d5 namespace: default resourceVersion: "6438" uid: 838c4fc7-2cf8-4c2e-be41-c58ad56798e2 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-231.ec2.internal source: component: kubelet host: ip-10-0-133-231.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:52:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-231.ec2.internal uid: f5573e47-2eee-40ae-ae61-f4bc4fd38a72 kind: Event lastTimestamp: "2026-04-16T23:52:44Z" message: 'Node ip-10-0-133-231.ec2.internal event: Registered Node ip-10-0-133-231.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:52:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:52:45Z" name: ip-10-0-133-231.ec2.internal.18a6fb70cf68a47e namespace: default resourceVersion: "7996" uid: 113cf390-7a80-47f0-b937-327009b60083 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:19Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:19Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T23:50:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:19Z" name: ip-10-0-134-103.ec2.internal.18a6fb4efc3bb15a namespace: default resourceVersion: "5509" uid: 78ee7c82-110d-49ee-92cd-14891732c5b1 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:19Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:19Z" message: 'Node ip-10-0-134-103.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:50:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:20Z" name: ip-10-0-134-103.ec2.internal.18a6fb4efe30e3a2 namespace: default resourceVersion: "5602" uid: 6b9ebc1d-7214-46a1-8f94-0d896008a920 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:19Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:19Z" message: 'Node ip-10-0-134-103.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:50:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:20Z" name: ip-10-0-134-103.ec2.internal.18a6fb4efe313bf1 namespace: default resourceVersion: "5605" uid: eac1b952-2944-43ee-812d-ad6d75122cc3 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:50:19Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:19Z" message: 'Node ip-10-0-134-103.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:50:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:20Z" name: ip-10-0-134-103.ec2.internal.18a6fb4efe316b9f namespace: default resourceVersion: "5607" uid: 33010225-8308-43f9-8bb1-a26186b877c7 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:19Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:19Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:50:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:19Z" name: ip-10-0-134-103.ec2.internal.18a6fb4f00e98c62 namespace: default resourceVersion: "5513" uid: bf340ff3-ff48-44bb-8d94-c9ba69b6401d reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-103.ec2.internal resourceVersion: "5515" uid: fef723c7-4e34-4ca5-914e-a17321f13376 kind: Event lastTimestamp: "2026-04-16T23:50:20Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:50:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:50:20Z" name: ip-10-0-134-103.ec2.internal.18a6fb4f16313114 namespace: default resourceVersion: "5614" uid: 1908ad1c-d865-4d1b-9411-5f4e32a016cc reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-103.ec2.internal uid: fef723c7-4e34-4ca5-914e-a17321f13376 kind: Event lastTimestamp: "2026-04-16T23:50:23Z" message: 'Node ip-10-0-134-103.ec2.internal event: Registered Node ip-10-0-134-103.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:50:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:50:23Z" name: ip-10-0-134-103.ec2.internal.18a6fb4fd5dfb51d namespace: default resourceVersion: "5693" uid: 61eb0657-c90d-49f0-b08e-3f325a385747 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:50:52Z" involvedObject: kind: Node name: ip-10-0-134-103.ec2.internal uid: ip-10-0-134-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:50:52Z" message: 'Node ip-10-0-134-103.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:50:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:50:52Z" name: ip-10-0-134-103.ec2.internal.18a6fb56a3a0af67 namespace: default resourceVersion: "6559" uid: 484cc5b9-d223-4bec-942b-f14f3d6c39c4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-103.ec2.internal source: component: kubelet host: ip-10-0-134-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:52:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-103.ec2.internal uid: fef723c7-4e34-4ca5-914e-a17321f13376 kind: Event lastTimestamp: "2026-04-16T23:52:44Z" message: 'Node ip-10-0-134-103.ec2.internal event: Registered Node ip-10-0-134-103.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:52:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:52:45Z" name: ip-10-0-134-103.ec2.internal.18a6fb70cf699336 namespace: default resourceVersion: "8009" uid: 85346124-b8ae-4370-b2e6-8ffc1af008c5 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T23:57:40Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16152" uid: 1458dcad-b3c7-4a0d-9508-7c1313d5b552 kind: Event lastTimestamp: "2026-04-16T23:57:44Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T23:57:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T23:57:46Z" name: kuadrant-system.18a6fbb5b4e4cebd namespace: default resourceVersion: "16416" uid: 622f13e6-5f7b-4fa4-a61d-bd963b8706f6 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T23:52:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:52:23Z" name: kube-system.18a6fb6bbcf639fd namespace: default resourceVersion: "7575" uid: 3f2d4e45-5c91-43c3-8cf8-38b30bccc821 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-565698dc98-hqlmg type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T23:52:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:52:38Z" name: kube-system.18a6fb6f3b50a517 namespace: default resourceVersion: "7888" uid: 52c4a580-d5c5-472c-b851-41aaea15c66a reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-565698dc98-hqlmg type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T23:52:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:52:38Z" name: kube-system.18a6fb6f3b7de7dd namespace: default resourceVersion: "7889" uid: e48db6b6-3ffd-42a5-be7a-ea973708fafa reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-565698dc98-hqlmg type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T23:52:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:52:38Z" name: kube-system.18a6fb6f3bb4bbc5 namespace: default resourceVersion: "7890" uid: 96d707c5-78d7-4f4f-97dc-b39b8e5f7ac0 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-565698dc98-hqlmg type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T23:53:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:53:38Z" name: kube-system.18a6fb7d33d855c8 namespace: default resourceVersion: "10023" uid: 6d2c653a-eef6-40cc-92e6-31ec8a1797ee reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-565698dc98-hqlmg type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:45:41Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T23:45:41Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T23:45:41Z" name: openshift-kube-apiserver.18a6fb0e4439c551 namespace: default resourceVersion: "274" uid: 14abb0a8-d2f0-465b-99a2-7d3a1db954e3 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-c9f9d7b54-hd5dm type: Warning kind: EventList metadata: resourceVersion: "46377"