--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:39Z" involvedObject: kind: CertificateSigningRequest name: csr-2sqlr kind: Event lastTimestamp: "2026-04-21T02:41:39Z" message: CSR "csr-2sqlr" has been approved metadata: creationTimestamp: "2026-04-21T02:41:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:41:39Z" name: csr-2sqlr.18a83efaa7ed1429 namespace: default resourceVersion: "6778" uid: 5426d5ba-5d09-4e4b-8d90-1518ce381efa reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:32Z" involvedObject: kind: CertificateSigningRequest name: csr-cdc7z kind: Event lastTimestamp: "2026-04-21T02:41:32Z" message: CSR "csr-cdc7z" has been approved metadata: creationTimestamp: "2026-04-21T02:41:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:41:32Z" name: csr-cdc7z.18a83ef8ff8ff113 namespace: default resourceVersion: "6645" uid: d23083bb-cfc8-4900-9370-9f9af98be9db reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:33Z" involvedObject: kind: CertificateSigningRequest name: csr-pqlh9 kind: Event lastTimestamp: "2026-04-21T02:41:33Z" message: CSR "csr-pqlh9" has been approved metadata: creationTimestamp: "2026-04-21T02:41:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:41:33Z" name: csr-pqlh9.18a83ef937c3234b namespace: default resourceVersion: "6675" uid: cd091930-aa8d-40b1-88e8-830823cd05aa reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:40:58Z" involvedObject: kind: CertificateSigningRequest name: csr-rtf9m kind: Event lastTimestamp: "2026-04-21T02:40:58Z" message: CSR "csr-rtf9m" has been approved metadata: creationTimestamp: "2026-04-21T02:40:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:40:58Z" name: csr-rtf9m.18a83ef116021610 namespace: default resourceVersion: "5782" uid: 9faa8839-77e4-4b08-8c4e-9fde19e9fac9 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:04Z" involvedObject: kind: CertificateSigningRequest name: csr-tj7xr kind: Event lastTimestamp: "2026-04-21T02:41:04Z" message: CSR "csr-tj7xr" has been approved metadata: creationTimestamp: "2026-04-21T02:41:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:41:04Z" name: csr-tj7xr.18a83ef292304215 namespace: default resourceVersion: "5847" uid: 55e9f7f1-b8c9-42c9-a414-8e8fda7a08a5 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:38Z" involvedObject: kind: CertificateSigningRequest name: csr-x4mmm kind: Event lastTimestamp: "2026-04-21T02:41:38Z" message: CSR "csr-x4mmm" has been approved metadata: creationTimestamp: "2026-04-21T02:41:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T02:41:38Z" name: csr-x4mmm.18a83efa8a1ff568 namespace: default resourceVersion: "6763" uid: 8ab74680-68ae-41b8-8b01-f318e7980da3 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-21T02:47:36Z" involvedObject: apiVersion: datasciencecluster.opendatahub.io/v2 kind: DataScienceCluster name: default-dsc resourceVersion: "14444" uid: 4bf8f233-cb5e-4ef3-8ec3-37e162e3da53 kind: Event lastTimestamp: "2026-04-21T02:47:42Z" message: 'failure deploying resource {map[apiVersion:components.platform.opendatahub.io/v1alpha1 kind:Kserve metadata:map[annotations:map[component.opendatahub.io/management-state:Managed platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-dsc platform.opendatahub.io/instance.uid:4bf8f233-cb5e-4ef3-8ec3-37e162e3da53 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1] creationTimestamp: labels:map[platform.opendatahub.io/part-of:datasciencecluster] name:default-kserve ownerReferences:[map[apiVersion:datasciencecluster.opendatahub.io/v2 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:DataScienceCluster name:default-dsc uid:4bf8f233-cb5e-4ef3-8ec3-37e162e3da53]]] spec:map[modelsAsService:map[managementState:Removed] nim:map[managementState:Managed] rawDeploymentServiceConfig:Headed] status:map[]]}: apply failed components.platform.opendatahub.io/v1alpha1, Kind=Kserve: unable to patch components.platform.opendatahub.io/v1alpha1, Kind=Kserve default-kserve: kserves.components.platform.opendatahub.io "default-kserve" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion datasciencecluster.opendatahub.io/v2 Kind DataScienceCluster: no matches for kind "DataScienceCluster" in version "datasciencecluster.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-21T02:47:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T02:47:42Z" name: default-dsc.18a83f4dee479c20 namespace: default resourceVersion: "14584" uid: d639bca6-ac69-4861-9b56-4f80d875738b reason: ProvisioningError reportingComponent: datasciencecluster reportingInstance: "" source: component: datasciencecluster type: Warning - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-21T02:47:36Z" involvedObject: apiVersion: dscinitialization.opendatahub.io/v2 kind: DSCInitialization name: default-dsci resourceVersion: "14287" uid: 963a1224-c7ab-4335-90c9-46d4b3b1e32b kind: Event lastTimestamp: "2026-04-21T02:47:42Z" message: 'failed to create operator resources for instance default-dsci: unable to patch networking.k8s.io/v1, Kind=NetworkPolicy opendatahub/opendatahub: networkpolicies.networking.k8s.io "opendatahub" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion dscinitialization.opendatahub.io/v2 Kind DSCInitialization: no matches for kind "DSCInitialization" in version "dscinitialization.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-21T02:47:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T02:47:42Z" name: default-dsci.18a83f4de0881d47 namespace: default resourceVersion: "14581" uid: b6738d28-f49b-463f-9cd6-85478552888f reason: DSCInitializationReconcileError reportingComponent: dscinitialization-controller reportingInstance: "" source: component: dscinitialization-controller type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T02:47:49Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14736" uid: 6939970a-6937-457d-8c46-79b1438b09af kind: Event lastTimestamp: "2026-04-21T02:47:53Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-21T02:47:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T02:47:53Z" name: default-gateway.18a83f50e9e8cf2d namespace: default resourceVersion: "14879" uid: 181da39d-bf30-4cf2-b833-04ba64f99187 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-21T02:47:59Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15446" uid: b1e6026a-1791-48d8-add6-80551516c6f2 kind: Event lastTimestamp: "2026-04-21T02:48:27Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:b1e6026a-1791-48d8-add6-80551516c6f2 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:b1e6026a-1791-48d8-add6-80551516c6f2]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-21T02:47:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T02:48:27Z" name: default-kserve.18a83f5349f90b6b namespace: default resourceVersion: "16485" uid: f61bb0e0-10c7-41c0-aba3-cbdefbc6a3b2 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 29 eventTime: null firstTimestamp: "2026-04-21T02:47:47Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14619" uid: dc5b7a60-3d47-4ff6-8579-5a5a7c3ebd01 kind: Event lastTimestamp: "2026-04-21T02:52:58Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-21T02:47:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T02:52:58Z" name: default-monitoring.18a83f505966584f namespace: default resourceVersion: "24232" uid: 1143e5fe-746b-4363-ba47-82f3fbad974f reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-131-170.ec2.internal uid: ip-10-0-131-170.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-131-170.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:08Z" name: ip-10-0-131-170.ec2.internal.18a83ef34eccbb3d namespace: default resourceVersion: "6104" uid: f3874ac2-8f2f-4161-a4b8-cf3c78bfc920 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-131-170.ec2.internal source: component: kubelet host: ip-10-0-131-170.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-131-170.ec2.internal uid: ip-10-0-131-170.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-131-170.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:08Z" name: ip-10-0-131-170.ec2.internal.18a83ef34eccfc3f namespace: default resourceVersion: "6113" uid: cae77da5-4706-4d49-9e48-c148f60bffed reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-131-170.ec2.internal source: component: kubelet host: ip-10-0-131-170.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-131-170.ec2.internal uid: ip-10-0-131-170.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-131-170.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:08Z" name: ip-10-0-131-170.ec2.internal.18a83ef34ecd3674 namespace: default resourceVersion: "6119" uid: f1032256-1451-435d-9b2c-c194bd817c51 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-131-170.ec2.internal source: component: kubelet host: ip-10-0-131-170.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-131-170.ec2.internal uid: ip-10-0-131-170.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-131-170.ec2.internal.18a83ef35143ad48 namespace: default resourceVersion: "5976" uid: 56f762f9-c2d3-4601-970f-91c20295154f reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-131-170.ec2.internal source: component: kubelet host: ip-10-0-131-170.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-170.ec2.internal resourceVersion: "5997" uid: cdd183e8-8de5-46c5-b2ca-fd27fd9265f3 kind: Event lastTimestamp: "2026-04-21T02:41:08Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T02:41:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T02:41:08Z" name: ip-10-0-131-170.ec2.internal.18a83ef36c8dd317 namespace: default resourceVersion: "6126" uid: 55fc1c05-b4b2-4864-b064-69931ea1e455 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-170.ec2.internal uid: cdd183e8-8de5-46c5-b2ca-fd27fd9265f3 kind: Event lastTimestamp: "2026-04-21T02:41:09Z" message: 'Node ip-10-0-131-170.ec2.internal event: Registered Node ip-10-0-131-170.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:41:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:41:09Z" name: ip-10-0-131-170.ec2.internal.18a83ef3ad4e230f namespace: default resourceVersion: "6155" uid: 640bbdb3-21a0-4d3f-9642-4c0f1e5b7f3b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:40Z" involvedObject: kind: Node name: ip-10-0-131-170.ec2.internal uid: ip-10-0-131-170.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:40Z" message: 'Node ip-10-0-131-170.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T02:41:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:40Z" name: ip-10-0-131-170.ec2.internal.18a83efaf3fdbfc1 namespace: default resourceVersion: "6810" uid: 0b2dc146-d2d6-49f2-8b52-c77bc56d9fb4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-131-170.ec2.internal source: component: kubelet host: ip-10-0-131-170.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:42:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-170.ec2.internal uid: cdd183e8-8de5-46c5-b2ca-fd27fd9265f3 kind: Event lastTimestamp: "2026-04-21T02:42:54Z" message: 'Node ip-10-0-131-170.ec2.internal event: Registered Node ip-10-0-131-170.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:42:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:42:54Z" name: ip-10-0-131-170.ec2.internal.18a83f0c3ee77f10 namespace: default resourceVersion: "7987" uid: 2b80046b-3279-49ee-99d3-8770131cabb5 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-134-66.ec2.internal.18a83ef33f531112 namespace: default resourceVersion: "5981" uid: 0c5f2716-8326-4f9c-8040-4204fa8601ca reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-134-66.ec2.internal.18a83ef33f534b92 namespace: default resourceVersion: "5983" uid: b0017255-5a28-43af-9f53-ff50555b3374 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-134-66.ec2.internal.18a83ef33f536b3c namespace: default resourceVersion: "5984" uid: 46dbc262-0299-4498-8c9a-e22e44032c39 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-134-66.ec2.internal.18a83ef3427b90b5 namespace: default resourceVersion: "5928" uid: 235d3ab3-3a72-49b9-820d-f6bcd663789b reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:07Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal resourceVersion: "5930" uid: 7fd3ff60-513f-4ebc-a992-27690f36245a kind: Event lastTimestamp: "2026-04-21T02:41:07Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T02:41:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T02:41:07Z" name: ip-10-0-134-66.ec2.internal.18a83ef358220784 namespace: default resourceVersion: "6002" uid: a843b2b3-7a16-4f63-884e-4830f0e05124 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal uid: 7fd3ff60-513f-4ebc-a992-27690f36245a kind: Event lastTimestamp: "2026-04-21T02:41:09Z" message: 'Node ip-10-0-134-66.ec2.internal event: Registered Node ip-10-0-134-66.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:41:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:41:09Z" name: ip-10-0-134-66.ec2.internal.18a83ef3ad4dce54 namespace: default resourceVersion: "6154" uid: 0a967feb-78b8-4df3-b8f6-5ea388a065e0 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:40Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:40Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T02:41:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:40Z" name: ip-10-0-134-66.ec2.internal.18a83efae9f1e3d4 namespace: default resourceVersion: "6787" uid: 0a8afb63-5178-4a32-be1b-b5804e4774ad reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:42:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal uid: 7fd3ff60-513f-4ebc-a992-27690f36245a kind: Event lastTimestamp: "2026-04-21T02:42:54Z" message: 'Node ip-10-0-134-66.ec2.internal event: Registered Node ip-10-0-134-66.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:42:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:42:54Z" name: ip-10-0-134-66.ec2.internal.18a83f0c3ee80608 namespace: default resourceVersion: "7991" uid: f02a6d97-27ab-4083-820c-2e74afa166bd reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:40:32Z" involvedObject: kind: Node name: ip-10-0-137-147.ec2.internal uid: ip-10-0-137-147.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:40:33Z" message: 'Node ip-10-0-137-147.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T02:40:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:40:33Z" name: ip-10-0-137-147.ec2.internal.18a83eeb2e3dee92 namespace: default resourceVersion: "5370" uid: f7298861-d68e-4932-8e20-d62cd11fc28e reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-137-147.ec2.internal source: component: kubelet host: ip-10-0-137-147.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:40:32Z" involvedObject: kind: Node name: ip-10-0-137-147.ec2.internal uid: ip-10-0-137-147.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:40:33Z" message: 'Node ip-10-0-137-147.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T02:40:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:40:33Z" name: ip-10-0-137-147.ec2.internal.18a83eeb2e3e4a88 namespace: default resourceVersion: "5371" uid: dd32a99e-c330-4a53-a409-5b79e0c711e4 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-137-147.ec2.internal source: component: kubelet host: ip-10-0-137-147.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T02:40:32Z" involvedObject: kind: Node name: ip-10-0-137-147.ec2.internal uid: ip-10-0-137-147.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:40:33Z" message: 'Node ip-10-0-137-147.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T02:40:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:40:33Z" name: ip-10-0-137-147.ec2.internal.18a83eeb2e3e7568 namespace: default resourceVersion: "5372" uid: dcafd74e-8ffb-4f9a-a6a6-b085f1aa3546 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-137-147.ec2.internal source: component: kubelet host: ip-10-0-137-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:40:32Z" involvedObject: kind: Node name: ip-10-0-137-147.ec2.internal uid: ip-10-0-137-147.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:40:32Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T02:40:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:40:32Z" name: ip-10-0-137-147.ec2.internal.18a83eeb31ac9087 namespace: default resourceVersion: "5339" uid: d42dbb84-2aff-47b1-b115-d73c9d9b5012 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-137-147.ec2.internal source: component: kubelet host: ip-10-0-137-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:40:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-147.ec2.internal resourceVersion: "5341" uid: 3a6382ae-6a8f-4dc0-88df-31dfca66b3e3 kind: Event lastTimestamp: "2026-04-21T02:40:33Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T02:40:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T02:40:33Z" name: ip-10-0-137-147.ec2.internal.18a83eeb48b0c844 namespace: default resourceVersion: "5414" uid: 1bef0739-619f-4e68-83f9-10590544d207 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:40:34Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-147.ec2.internal uid: 3a6382ae-6a8f-4dc0-88df-31dfca66b3e3 kind: Event lastTimestamp: "2026-04-21T02:40:34Z" message: 'Node ip-10-0-137-147.ec2.internal event: Registered Node ip-10-0-137-147.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:40:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:40:34Z" name: ip-10-0-137-147.ec2.internal.18a83eeb86d8c327 namespace: default resourceVersion: "5501" uid: 019c715e-4760-4bc5-b7ff-f73349d35d65 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:41:06Z" involvedObject: kind: Node name: ip-10-0-137-147.ec2.internal uid: ip-10-0-137-147.ec2.internal kind: Event lastTimestamp: "2026-04-21T02:41:06Z" message: 'Node ip-10-0-137-147.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T02:41:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T02:41:06Z" name: ip-10-0-137-147.ec2.internal.18a83ef2ee512d93 namespace: default resourceVersion: "5858" uid: 8d72fd11-e46e-440c-b74e-72479d15aa28 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-137-147.ec2.internal source: component: kubelet host: ip-10-0-137-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:42:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-147.ec2.internal uid: 3a6382ae-6a8f-4dc0-88df-31dfca66b3e3 kind: Event lastTimestamp: "2026-04-21T02:42:54Z" message: 'Node ip-10-0-137-147.ec2.internal event: Registered Node ip-10-0-137-147.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T02:42:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T02:42:54Z" name: ip-10-0-137-147.ec2.internal.18a83f0c3ee81f58 namespace: default resourceVersion: "7996" uid: e56f8af3-591f-425e-92a5-395f15c3b363 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T02:48:49Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16681" uid: ca680e63-c474-4a9f-a7ce-d9e19e4bd469 kind: Event lastTimestamp: "2026-04-21T02:48:52Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-21T02:48:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-21T02:48:53Z" name: kuadrant-system.18a83f5ebfe6893b namespace: default resourceVersion: "16906" uid: 8f5345a9-f655-4d26-94b1-831a6fab5b3d reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-21T02:42:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T02:42:33Z" name: kube-system.18a83f074c7e902b namespace: default resourceVersion: "7498" uid: 0033744b-e3e6-44c3-a595-b7c79d58a92d reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-856cb5878b-5wh6r type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-21T02:42:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T02:42:48Z" name: kube-system.18a83f0acadba477 namespace: default resourceVersion: "7797" uid: 9d2c2ae7-8588-4f91-b2f0-d12dde2a9250 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-856cb5878b-5wh6r type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-21T02:42:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T02:42:48Z" name: kube-system.18a83f0acb0e387a namespace: default resourceVersion: "7798" uid: a24de3b6-f144-43b0-a288-3bc90a5d219a reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-856cb5878b-5wh6r type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-21T02:42:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T02:42:48Z" name: kube-system.18a83f0acb42b67a namespace: default resourceVersion: "7799" uid: 313bb89f-b293-4ee3-9dc9-6c308da82a6d reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-856cb5878b-5wh6r type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-21T02:43:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T02:43:48Z" name: kube-system.18a83f18c36a0caf namespace: default resourceVersion: "10077" uid: 72206c01-8392-4df1-b2db-8af7f6f0314c reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-856cb5878b-5wh6r type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T02:35:47Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-21T02:35:47Z" message: readyz=true metadata: creationTimestamp: "2026-04-21T02:35:47Z" name: openshift-kube-apiserver.18a83ea8ce2755cf namespace: default resourceVersion: "274" uid: 1c815a7f-98ce-4785-bca9-7c74c19b45f8 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-c5595f997-bs52m type: Warning kind: EventList metadata: resourceVersion: "27326"