--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:50Z" involvedObject: kind: CertificateSigningRequest name: csr-2llvq kind: Event lastTimestamp: "2026-04-22T18:43:50Z" message: CSR "csr-2llvq" has been approved metadata: creationTimestamp: "2026-04-22T18:43:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:43:50Z" name: csr-2llvq.18a8c210c8a63fc2 namespace: default resourceVersion: "6486" uid: 39208a9a-3651-4ff7-88ea-9dff40f4252d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:01Z" involvedObject: kind: CertificateSigningRequest name: csr-6vgq5 kind: Event lastTimestamp: "2026-04-22T18:44:01Z" message: CSR "csr-6vgq5" has been approved metadata: creationTimestamp: "2026-04-22T18:44:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:44:01Z" name: csr-6vgq5.18a8c21376e69f6e namespace: default resourceVersion: "6876" uid: f0638a5f-ecb5-4fe3-99fb-dc6a5d2d6143 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:07Z" involvedObject: kind: CertificateSigningRequest name: csr-cklph kind: Event lastTimestamp: "2026-04-22T18:44:07Z" message: CSR "csr-cklph" has been approved metadata: creationTimestamp: "2026-04-22T18:44:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:44:07Z" name: csr-cklph.18a8c214dbd13130 namespace: default resourceVersion: "6997" uid: bec2feac-7f6d-402e-9886-8fb5c0517444 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:56Z" involvedObject: kind: CertificateSigningRequest name: csr-gvq49 kind: Event lastTimestamp: "2026-04-22T18:43:56Z" message: CSR "csr-gvq49" has been approved metadata: creationTimestamp: "2026-04-22T18:43:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:43:56Z" name: csr-gvq49.18a8c2124531fe19 namespace: default resourceVersion: "6702" uid: 16eb2620-588a-4a0f-9042-ba3b30e720dd reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:01Z" involvedObject: kind: CertificateSigningRequest name: csr-gztlc kind: Event lastTimestamp: "2026-04-22T18:44:01Z" message: CSR "csr-gztlc" has been approved metadata: creationTimestamp: "2026-04-22T18:44:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:44:01Z" name: csr-gztlc.18a8c21357194ec6 namespace: default resourceVersion: "6861" uid: bc86e35a-be23-4224-90e9-b6d895a9ab9a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:55Z" involvedObject: kind: CertificateSigningRequest name: csr-szgsj kind: Event lastTimestamp: "2026-04-22T18:43:55Z" message: CSR "csr-szgsj" has been approved metadata: creationTimestamp: "2026-04-22T18:43:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T18:43:55Z" name: csr-szgsj.18a8c211e7abb605 namespace: default resourceVersion: "6623" uid: c82f446d-d297-42f8-b47e-7f390724253a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 12 eventTime: null firstTimestamp: "2026-04-22T18:49:37Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14216" uid: 697d804c-eaf3-43b0-94d5-3080168bc761 kind: Event lastTimestamp: "2026-04-22T18:49:50Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-22T18:49:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T18:49:50Z" name: default-gateway.18a8c26198b8d8de namespace: default resourceVersion: "14707" uid: 43845010-4ddc-4aa6-af22-5f1ce32a5c6a reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-22T18:49:47Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14564" uid: 792a79d7-ffec-48ce-88bd-183a05d34ca9 kind: Event lastTimestamp: "2026-04-22T18:50:15Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:792a79d7-ffec-48ce-88bd-183a05d34ca9 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:792a79d7-ffec-48ce-88bd-183a05d34ca9]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-22T18:49:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T18:50:15Z" name: default-kserve.18a8c263ec40a00e namespace: default resourceVersion: "15753" uid: 2d5104c3-a66b-41c1-90fe-d4109f3441c9 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 25 eventTime: null firstTimestamp: "2026-04-22T18:49:36Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13947" uid: f29f17e7-ee0a-403f-8611-00f260539a58 kind: Event lastTimestamp: "2026-04-22T18:50:43Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-22T18:49:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T18:50:43Z" name: default-monitoring.18a8c26147cf5cd4 namespace: default resourceVersion: "16179" uid: 6517bec3-2853-4e9e-a5a6-468bb53ce350 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20d85bf3e50 namespace: default resourceVersion: "6067" uid: e1948a19-7144-4b32-aa88-3187860117cd reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: 'Node ip-10-0-132-198.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20d87860e13 namespace: default resourceVersion: "6157" uid: b58e9faf-64f7-46ae-8e63-d2d28af647c6 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: 'Node ip-10-0-132-198.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20d878666bf namespace: default resourceVersion: "6164" uid: 03a919d4-4761-4e27-b0da-0bd724d6c687 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: 'Node ip-10-0-132-198.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20d878693a8 namespace: default resourceVersion: "6167" uid: c3e9d79f-1ca1-4e83-8706-1545b794bd0c reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20d8a6cc8d0 namespace: default resourceVersion: "6072" uid: 97f3a3d4-c874-4c3a-aab1-a33c9c82029a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-198.ec2.internal resourceVersion: "6073" uid: d991403d-e400-443f-913a-27bc14a305d8 kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20da12ab04f namespace: default resourceVersion: "6168" uid: c7887686-cc8a-47bb-9f66-07284038ff36 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-198.ec2.internal uid: d991403d-e400-443f-913a-27bc14a305d8 kind: Event lastTimestamp: "2026-04-22T18:43:36Z" message: 'Node ip-10-0-132-198.ec2.internal event: Registered Node ip-10-0-132-198.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:43:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:43:36Z" name: ip-10-0-132-198.ec2.internal.18a8c20da41042c1 namespace: default resourceVersion: "6171" uid: bafae113-bd6f-4d42-b39d-c58df2410fae reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-198.ec2.internal resourceVersion: "6918" uid: d991403d-e400-443f-913a-27bc14a305d8 kind: Event lastTimestamp: "2026-04-22T18:44:03Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-198.ec2.internal, error getting gateway config for node ip-10-0-132-198.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-132-198.ec2.internal", failed to update chassis to local for local node ip-10-0-132-198.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-132-198.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-198.ec2.internal]' metadata: creationTimestamp: "2026-04-22T18:44:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-132-198 operation: Update time: "2026-04-22T18:44:03Z" name: ip-10-0-132-198.ec2.internal.18a8c213d3fa50e4 namespace: default resourceVersion: "6924" uid: feb5e6d5-8a28-40a4-a135-a3b461b30043 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:09Z" involvedObject: kind: Node name: ip-10-0-132-198.ec2.internal uid: ip-10-0-132-198.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:44:09Z" message: 'Node ip-10-0-132-198.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T18:44:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:44:09Z" name: ip-10-0-132-198.ec2.internal.18a8c2152cef5e40 namespace: default resourceVersion: "7033" uid: 013c3250-6e40-4cde-a466-0d0219ff871c reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-198.ec2.internal source: component: kubelet host: ip-10-0-132-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:45:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-198.ec2.internal uid: d991403d-e400-443f-913a-27bc14a305d8 kind: Event lastTimestamp: "2026-04-22T18:45:58Z" message: 'Node ip-10-0-132-198.ec2.internal event: Registered Node ip-10-0-132-198.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:45:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:45:58Z" name: ip-10-0-132-198.ec2.internal.18a8c22eb50ec982 namespace: default resourceVersion: "8319" uid: b079eaa5-6f74-40cb-85b3-ed0438700d2a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:29Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:29Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T18:43:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:29Z" name: ip-10-0-134-244.ec2.internal.18a8c20be8d79c34 namespace: default resourceVersion: "5927" uid: 8dadc6bd-be72-430e-af85-abe0d3dbf84f reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:29Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:29Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T18:43:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:29Z" name: ip-10-0-134-244.ec2.internal.18a8c20be8d7fa8a namespace: default resourceVersion: "5932" uid: 62c5facc-bce8-4eb8-aa33-bdf292954b41 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:29Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:29Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T18:43:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:29Z" name: ip-10-0-134-244.ec2.internal.18a8c20be8d825c7 namespace: default resourceVersion: "5935" uid: c825f148-d1df-439b-8567-5fb74d4f027d reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:29Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:29Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T18:43:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:29Z" name: ip-10-0-134-244.ec2.internal.18a8c20bebd7577e namespace: default resourceVersion: "5840" uid: 17f012cd-b602-4601-acea-6acf6c9117c9 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:29Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal resourceVersion: "5841" uid: 2198fb44-9ad0-4f00-98c2-3f7e516e9ea5 kind: Event lastTimestamp: "2026-04-22T18:43:29Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T18:43:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T18:43:29Z" name: ip-10-0-134-244.ec2.internal.18a8c20c01f4d9bf namespace: default resourceVersion: "5943" uid: 31985158-9541-4fed-8390-cc8bab421d01 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:31Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal uid: 2198fb44-9ad0-4f00-98c2-3f7e516e9ea5 kind: Event lastTimestamp: "2026-04-22T18:43:31Z" message: 'Node ip-10-0-134-244.ec2.internal event: Registered Node ip-10-0-134-244.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:43:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:43:31Z" name: ip-10-0-134-244.ec2.internal.18a8c20c7a063be0 namespace: default resourceVersion: "6000" uid: 7b8281f4-86af-4fd9-802d-0744242366ac reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:56Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal resourceVersion: "6678" uid: 2198fb44-9ad0-4f00-98c2-3f7e516e9ea5 kind: Event lastTimestamp: "2026-04-22T18:43:56Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-244.ec2.internal, error getting gateway config for node ip-10-0-134-244.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-134-244.ec2.internal", failed to update chassis to local for local node ip-10-0-134-244.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-134-244.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-244.ec2.internal]' metadata: creationTimestamp: "2026-04-22T18:43:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-134-244 operation: Update time: "2026-04-22T18:43:56Z" name: ip-10-0-134-244.ec2.internal.18a8c21244f3c2d0 namespace: default resourceVersion: "6700" uid: b4dc047a-6bcf-4241-a8f2-4ed5a19bcd07 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:44:02Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:44:02Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T18:44:02Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:44:02Z" name: ip-10-0-134-244.ec2.internal.18a8c213b1fe5b7b namespace: default resourceVersion: "6894" uid: ab008e4e-c851-4d00-ba2b-73868f184084 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:45:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal uid: 2198fb44-9ad0-4f00-98c2-3f7e516e9ea5 kind: Event lastTimestamp: "2026-04-22T18:45:58Z" message: 'Node ip-10-0-134-244.ec2.internal event: Registered Node ip-10-0-134-244.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:45:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:45:58Z" name: ip-10-0-134-244.ec2.internal.18a8c22eb50edd96 namespace: default resourceVersion: "8326" uid: 6e799ce9-556a-4d93-bac4-1e328ba5a6a9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:25Z" involvedObject: kind: Node name: ip-10-0-135-106.ec2.internal uid: ip-10-0-135-106.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:25Z" message: 'Node ip-10-0-135-106.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T18:43:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:25Z" name: ip-10-0-135-106.ec2.internal.18a8c20aeee07b90 namespace: default resourceVersion: "5682" uid: ab9a10e3-5608-4482-8fd8-b2106b39d957 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-106.ec2.internal source: component: kubelet host: ip-10-0-135-106.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:25Z" involvedObject: kind: Node name: ip-10-0-135-106.ec2.internal uid: ip-10-0-135-106.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:25Z" message: 'Node ip-10-0-135-106.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T18:43:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:25Z" name: ip-10-0-135-106.ec2.internal.18a8c20aeee0df85 namespace: default resourceVersion: "5684" uid: 5c1ddc49-aa77-43f0-9438-9f0558b03100 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-106.ec2.internal source: component: kubelet host: ip-10-0-135-106.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T18:43:25Z" involvedObject: kind: Node name: ip-10-0-135-106.ec2.internal uid: ip-10-0-135-106.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:25Z" message: 'Node ip-10-0-135-106.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T18:43:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:25Z" name: ip-10-0-135-106.ec2.internal.18a8c20aeee116fb namespace: default resourceVersion: "5685" uid: 74fa8503-14e3-4baa-a356-ff5fc51405ab reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-106.ec2.internal source: component: kubelet host: ip-10-0-135-106.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:25Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-106.ec2.internal resourceVersion: "5646" uid: 67b1bbb7-9b17-4f59-9adb-a7613226c9d5 kind: Event lastTimestamp: "2026-04-22T18:43:25Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T18:43:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T18:43:25Z" name: ip-10-0-135-106.ec2.internal.18a8c20b09b2ad04 namespace: default resourceVersion: "5702" uid: 93230c77-6654-45fc-837e-053be01068ca reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:26Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-106.ec2.internal uid: 67b1bbb7-9b17-4f59-9adb-a7613226c9d5 kind: Event lastTimestamp: "2026-04-22T18:43:26Z" message: 'Node ip-10-0-135-106.ec2.internal event: Registered Node ip-10-0-135-106.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:43:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:43:26Z" name: ip-10-0-135-106.ec2.internal.18a8c20b4ff384f5 namespace: default resourceVersion: "5779" uid: b27bcc97-dbaa-4430-a2ba-dea9a06c2612 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:43:58Z" involvedObject: kind: Node name: ip-10-0-135-106.ec2.internal uid: ip-10-0-135-106.ec2.internal kind: Event lastTimestamp: "2026-04-22T18:43:58Z" message: 'Node ip-10-0-135-106.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T18:43:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T18:43:58Z" name: ip-10-0-135-106.ec2.internal.18a8c2129aa46c28 namespace: default resourceVersion: "6758" uid: 7d0752aa-8d48-445f-88b5-e21effb47a60 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-106.ec2.internal source: component: kubelet host: ip-10-0-135-106.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:45:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-106.ec2.internal uid: 67b1bbb7-9b17-4f59-9adb-a7613226c9d5 kind: Event lastTimestamp: "2026-04-22T18:45:58Z" message: 'Node ip-10-0-135-106.ec2.internal event: Registered Node ip-10-0-135-106.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T18:45:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:45:58Z" name: ip-10-0-135-106.ec2.internal.18a8c22eb50e577b namespace: default resourceVersion: "8314" uid: 75cb4973-ddea-4b75-a17b-9b0415a93f8f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 7 eventTime: null firstTimestamp: "2026-04-22T18:50:48Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "15985" uid: 993ce5a1-ef6a-4411-9267-02dfa23f6b86 kind: Event lastTimestamp: "2026-04-22T18:50:50Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-22T18:50:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-22T18:50:52Z" name: kuadrant-system.18a8c272326a2f30 namespace: default resourceVersion: "16387" uid: c9228379-f276-4787-9588-f54b14280b65 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-22T18:45:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T18:45:36Z" name: kube-system.18a8c22993ef3148 namespace: default resourceVersion: "7908" uid: d8e8471a-9e79-4e51-a05b-87c16f080361 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-864bbb5749-wcm5h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-22T18:45:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T18:45:51Z" name: kube-system.18a8c22d127521a7 namespace: default resourceVersion: "8241" uid: 382e8e49-d6ea-4d20-8418-4e26e91ce8f8 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-864bbb5749-wcm5h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-22T18:45:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T18:45:51Z" name: kube-system.18a8c22d12b3d7cd namespace: default resourceVersion: "8242" uid: bd8ce3ae-3015-47d6-ba5f-f20488d49902 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-864bbb5749-wcm5h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-22T18:45:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T18:45:51Z" name: kube-system.18a8c22d12fc5f13 namespace: default resourceVersion: "8243" uid: 812bfbe0-4650-4873-bdbc-5b970813b2bd reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-864bbb5749-wcm5h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-22T18:46:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T18:46:51Z" name: kube-system.18a8c23b0b23be55 namespace: default resourceVersion: "10497" uid: 2ed5c672-e85f-43ca-a36f-0e4d40b9c367 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-864bbb5749-wcm5h type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:52:43Z" involvedObject: apiVersion: v1 kind: Endpoints name: maas-default-gateway-openshift-default kind: Event lastTimestamp: "2026-04-22T18:52:43Z" message: 'Failed to create endpoint for service openshift-ingress/maas-default-gateway-openshift-default: endpoints "maas-default-gateway-openshift-default" already exists' metadata: creationTimestamp: "2026-04-22T18:52:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T18:52:43Z" name: maas-default-gateway-openshift-default.18a8c28cfeb8463a namespace: default resourceVersion: "21029" uid: 7999af5f-6516-4617-b1ab-f2532390cc28 reason: FailedToCreateEndpoint reportingComponent: endpoint-controller reportingInstance: "" source: component: endpoint-controller type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T18:37:51Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-22T18:37:51Z" message: readyz=true metadata: creationTimestamp: "2026-04-22T18:37:51Z" name: openshift-kube-apiserver.18a8c1bd2f39f109 namespace: default resourceVersion: "274" uid: 845acfd2-2daf-478c-bb96-860343e2f668 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-7dbf8d5cb4-lqlgj type: Warning kind: EventList metadata: resourceVersion: "25858"