--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:55:26Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-17T18:55:26Z" message: controller is starting metadata: creationTimestamp: "2026-04-17T18:55:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-17T18:55:26Z" name: 4cf87f93-eafd-4707-8ac2-a9f35c224f18 namespace: default resourceVersion: "12105" uid: 68b97ce1-6df2-4c27-9c75-14f7cc022b2a reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:31Z" involvedObject: kind: CertificateSigningRequest name: csr-4djgg kind: Event lastTimestamp: "2026-04-17T18:49:31Z" message: CSR "csr-4djgg" has been approved metadata: creationTimestamp: "2026-04-17T18:49:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:31Z" name: csr-4djgg.18a739796726aa52 namespace: default resourceVersion: "6210" uid: 4f628503-5d1b-45f6-ab01-14c08bb04b19 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:51Z" involvedObject: kind: CertificateSigningRequest name: csr-65hwr kind: Event lastTimestamp: "2026-04-17T18:49:51Z" message: CSR "csr-65hwr" has been approved metadata: creationTimestamp: "2026-04-17T18:49:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:51Z" name: csr-65hwr.18a7397df4b3466b namespace: default resourceVersion: "6725" uid: f9c3c6e5-dcfc-4279-a0dc-2d968051df13 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:42Z" involvedObject: kind: CertificateSigningRequest name: csr-6kjb4 kind: Event lastTimestamp: "2026-04-17T18:49:42Z" message: CSR "csr-6kjb4" has been approved metadata: creationTimestamp: "2026-04-17T18:49:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:42Z" name: csr-6kjb4.18a7397bfb66e650 namespace: default resourceVersion: "6564" uid: 9a790e99-2a77-4063-9a4e-8d61946c97df reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:45Z" involvedObject: kind: CertificateSigningRequest name: csr-9xswc kind: Event lastTimestamp: "2026-04-17T18:49:45Z" message: CSR "csr-9xswc" has been approved metadata: creationTimestamp: "2026-04-17T18:49:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:45Z" name: csr-9xswc.18a7397ca3b34b35 namespace: default resourceVersion: "6628" uid: 6c023423-ce94-4436-900e-f43eb70a48d0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:49Z" involvedObject: kind: CertificateSigningRequest name: csr-cct6c kind: Event lastTimestamp: "2026-04-17T18:49:49Z" message: CSR "csr-cct6c" has been approved metadata: creationTimestamp: "2026-04-17T18:49:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:49Z" name: csr-cct6c.18a7397d7c635662 namespace: default resourceVersion: "6685" uid: 9c907a84-2550-4587-bdab-f75fc7a80fd8 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:36Z" involvedObject: kind: CertificateSigningRequest name: csr-v8vc6 kind: Event lastTimestamp: "2026-04-17T18:49:36Z" message: CSR "csr-v8vc6" has been approved metadata: creationTimestamp: "2026-04-17T18:49:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T18:49:36Z" name: csr-v8vc6.18a7397a9856909f namespace: default resourceVersion: "6293" uid: 804b2371-094e-42b9-a941-00e89c28de80 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-17T18:56:20Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14734" uid: 4fe509b5-fb76-43c2-8d6c-1fd077aca5ce kind: Event lastTimestamp: "2026-04-17T18:56:34Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-17T18:56:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T18:56:34Z" name: default-gateway.18a739d88247d6ad namespace: default resourceVersion: "15167" uid: 7d60fbac-aec5-4001-a072-0cf4c8b03776 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-17T18:56:29Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15095" uid: a3eddfe0-3161-4225-a0d2-081d7dacf0a1 kind: Event lastTimestamp: "2026-04-17T18:56:55Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:a3eddfe0-3161-4225-a0d2-081d7dacf0a1 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:a3eddfe0-3161-4225-a0d2-081d7dacf0a1]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-17T18:56:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T18:56:55Z" name: default-kserve.18a739dacd790de6 namespace: default resourceVersion: "16376" uid: b6cf4b52-0030-4323-b7f5-510ea3a32b09 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-17T18:56:18Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14442" uid: fc3c8a93-6261-4ff3-9a52-cdb0df9c0563 kind: Event lastTimestamp: "2026-04-17T19:15:57Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-17T18:56:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T19:15:57Z" name: default-monitoring.18a739d829643134 namespace: default resourceVersion: "38261" uid: 9fcc98b4-4931-48a5-bd40-677b24af0305 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:21Z" involvedObject: kind: Node name: ip-10-0-132-192.ec2.internal uid: ip-10-0-132-192.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:21Z" message: 'Node ip-10-0-132-192.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T18:49:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:21Z" name: ip-10-0-132-192.ec2.internal.18a73976f6b9fe87 namespace: default resourceVersion: "5962" uid: 1685abec-c96a-403c-b812-8c0f2fa03c23 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-192.ec2.internal source: component: kubelet host: ip-10-0-132-192.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:21Z" involvedObject: kind: Node name: ip-10-0-132-192.ec2.internal uid: ip-10-0-132-192.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:21Z" message: 'Node ip-10-0-132-192.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T18:49:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:21Z" name: ip-10-0-132-192.ec2.internal.18a73976f6ba3f9f namespace: default resourceVersion: "5964" uid: 38406180-f50e-443c-aaf7-2d6d3e94bb61 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-192.ec2.internal source: component: kubelet host: ip-10-0-132-192.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:21Z" involvedObject: kind: Node name: ip-10-0-132-192.ec2.internal uid: ip-10-0-132-192.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:21Z" message: 'Node ip-10-0-132-192.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T18:49:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:21Z" name: ip-10-0-132-192.ec2.internal.18a73976f6ba8048 namespace: default resourceVersion: "5966" uid: b5122992-7b64-4605-a07f-973b3d727cb0 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-192.ec2.internal source: component: kubelet host: ip-10-0-132-192.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:21Z" involvedObject: kind: Node name: ip-10-0-132-192.ec2.internal uid: ip-10-0-132-192.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:21Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T18:49:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:21Z" name: ip-10-0-132-192.ec2.internal.18a73976f9861ac7 namespace: default resourceVersion: "5870" uid: 1074be1b-0c00-4e66-a54b-3b8842a4e8cc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-192.ec2.internal source: component: kubelet host: ip-10-0-132-192.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-192.ec2.internal resourceVersion: "5873" uid: 0bdb5001-8f30-4255-adad-1090cc909a71 kind: Event lastTimestamp: "2026-04-17T18:49:21Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T18:49:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T18:49:21Z" name: ip-10-0-132-192.ec2.internal.18a739770eb0de18 namespace: default resourceVersion: "5972" uid: d7783062-7bd6-4040-bec6-959961b4ca1d reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:24Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-192.ec2.internal uid: 0bdb5001-8f30-4255-adad-1090cc909a71 kind: Event lastTimestamp: "2026-04-17T18:49:24Z" message: 'Node ip-10-0-132-192.ec2.internal event: Registered Node ip-10-0-132-192.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:49:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:49:24Z" name: ip-10-0-132-192.ec2.internal.18a73977b9fd3e60 namespace: default resourceVersion: "6049" uid: b8c8e4f4-26bf-4bb3-9efb-42f64b9b3c43 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:47Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-192.ec2.internal resourceVersion: "6654" uid: 0bdb5001-8f30-4255-adad-1090cc909a71 kind: Event lastTimestamp: "2026-04-17T18:49:47Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-192.ec2.internal, error getting gateway config for node ip-10-0-132-192.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-132-192.ec2.internal", failed to update chassis to local for local node ip-10-0-132-192.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-132-192.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-192.ec2.internal]' metadata: creationTimestamp: "2026-04-17T18:49:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-132-192 operation: Update time: "2026-04-17T18:49:47Z" name: ip-10-0-132-192.ec2.internal.18a7397cff9aa846 namespace: default resourceVersion: "6655" uid: 880ca0e4-b2b6-4566-8552-ea3d509881cc reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:52Z" involvedObject: kind: Node name: ip-10-0-132-192.ec2.internal uid: ip-10-0-132-192.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:52Z" message: 'Node ip-10-0-132-192.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T18:49:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:52Z" name: ip-10-0-132-192.ec2.internal.18a7397e4b0c2695 namespace: default resourceVersion: "6746" uid: 6ed04221-df32-4524-b426-be993a4153e3 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-192.ec2.internal source: component: kubelet host: ip-10-0-132-192.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:51:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-192.ec2.internal uid: 0bdb5001-8f30-4255-adad-1090cc909a71 kind: Event lastTimestamp: "2026-04-17T18:51:08Z" message: 'Node ip-10-0-132-192.ec2.internal event: Registered Node ip-10-0-132-192.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:51:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:51:08Z" name: ip-10-0-132-192.ec2.internal.18a7398fd9473ee2 namespace: default resourceVersion: "7936" uid: ab8fbacf-3513-4011-99eb-b1a7eba97b5f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:04Z" involvedObject: kind: Node name: ip-10-0-136-27.ec2.internal uid: ip-10-0-136-27.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:05Z" message: 'Node ip-10-0-136-27.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T18:49:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:05Z" name: ip-10-0-136-27.ec2.internal.18a739732e100c62 namespace: default resourceVersion: "5469" uid: b4887d48-618d-4ae0-9b47-18eb603ab308 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-136-27.ec2.internal source: component: kubelet host: ip-10-0-136-27.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:04Z" involvedObject: kind: Node name: ip-10-0-136-27.ec2.internal uid: ip-10-0-136-27.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:05Z" message: 'Node ip-10-0-136-27.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T18:49:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:05Z" name: ip-10-0-136-27.ec2.internal.18a739732e1067cc namespace: default resourceVersion: "5475" uid: 103260ce-1819-426a-8779-f027902ab841 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-136-27.ec2.internal source: component: kubelet host: ip-10-0-136-27.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:04Z" involvedObject: kind: Node name: ip-10-0-136-27.ec2.internal uid: ip-10-0-136-27.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:05Z" message: 'Node ip-10-0-136-27.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T18:49:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:05Z" name: ip-10-0-136-27.ec2.internal.18a739732e109543 namespace: default resourceVersion: "5477" uid: 2d6ef68b-9d72-4ec8-a1af-cfb24ebd7a89 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-136-27.ec2.internal source: component: kubelet host: ip-10-0-136-27.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:04Z" involvedObject: kind: Node name: ip-10-0-136-27.ec2.internal uid: ip-10-0-136-27.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:04Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T18:49:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:04Z" name: ip-10-0-136-27.ec2.internal.18a739733103d395 namespace: default resourceVersion: "5424" uid: 4c9e22f6-d49e-4ee8-8351-08092e907994 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-136-27.ec2.internal source: component: kubelet host: ip-10-0-136-27.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:05Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-27.ec2.internal resourceVersion: "5425" uid: cbceb829-1c7c-4ac5-b23c-a0b5fd9f0fdc kind: Event lastTimestamp: "2026-04-17T18:49:05Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T18:49:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T18:49:05Z" name: ip-10-0-136-27.ec2.internal.18a7397349c89cb7 namespace: default resourceVersion: "5494" uid: 816a6fc9-a5f0-4375-b9fe-7177bb9fce9e reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-27.ec2.internal uid: cbceb829-1c7c-4ac5-b23c-a0b5fd9f0fdc kind: Event lastTimestamp: "2026-04-17T18:49:09Z" message: 'Node ip-10-0-136-27.ec2.internal event: Registered Node ip-10-0-136-27.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:49:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:49:09Z" name: ip-10-0-136-27.ec2.internal.18a739743bc64be4 namespace: default resourceVersion: "5624" uid: cbe7c467-fffb-4d4f-b741-7f7dee6a3bd4 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-27.ec2.internal resourceVersion: "6233" uid: cbceb829-1c7c-4ac5-b23c-a0b5fd9f0fdc kind: Event lastTimestamp: "2026-04-17T18:49:33Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-136-27.ec2.internal, error getting gateway config for node ip-10-0-136-27.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-136-27.ec2.internal", failed to update chassis to local for local node ip-10-0-136-27.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-136-27.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-136-27.ec2.internal]' metadata: creationTimestamp: "2026-04-17T18:49:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-136-27 operation: Update time: "2026-04-17T18:49:33Z" name: ip-10-0-136-27.ec2.internal.18a73979c39d5602 namespace: default resourceVersion: "6240" uid: 8f0a4748-f99d-434b-a70e-a46997533eee reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:38Z" involvedObject: kind: Node name: ip-10-0-136-27.ec2.internal uid: ip-10-0-136-27.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:38Z" message: 'Node ip-10-0-136-27.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T18:49:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:38Z" name: ip-10-0-136-27.ec2.internal.18a7397aef7fda7e namespace: default resourceVersion: "6362" uid: 01a82d2f-bffc-4bd1-974c-6a4c0d57ff73 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-136-27.ec2.internal source: component: kubelet host: ip-10-0-136-27.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:51:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-27.ec2.internal uid: cbceb829-1c7c-4ac5-b23c-a0b5fd9f0fdc kind: Event lastTimestamp: "2026-04-17T18:51:08Z" message: 'Node ip-10-0-136-27.ec2.internal event: Registered Node ip-10-0-136-27.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:51:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:51:08Z" name: ip-10-0-136-27.ec2.internal.18a7398fd947bb57 namespace: default resourceVersion: "7940" uid: 4ba06aad-f723-4b6c-8440-b2849a357c07 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:16Z" involvedObject: kind: Node name: ip-10-0-141-118.ec2.internal uid: ip-10-0-141-118.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:16Z" message: 'Node ip-10-0-141-118.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T18:49:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:16Z" name: ip-10-0-141-118.ec2.internal.18a73975ecd5c1a3 namespace: default resourceVersion: "5769" uid: 93ccd89a-eb60-497a-8c82-87b254045cde reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-141-118.ec2.internal source: component: kubelet host: ip-10-0-141-118.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:16Z" involvedObject: kind: Node name: ip-10-0-141-118.ec2.internal uid: ip-10-0-141-118.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:16Z" message: 'Node ip-10-0-141-118.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T18:49:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:16Z" name: ip-10-0-141-118.ec2.internal.18a73975ecd60ceb namespace: default resourceVersion: "5772" uid: 16e0dab4-78f9-4bb9-ad6f-d13a85f5a5ef reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-141-118.ec2.internal source: component: kubelet host: ip-10-0-141-118.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T18:49:16Z" involvedObject: kind: Node name: ip-10-0-141-118.ec2.internal uid: ip-10-0-141-118.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:16Z" message: 'Node ip-10-0-141-118.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T18:49:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:17Z" name: ip-10-0-141-118.ec2.internal.18a73975ecd63032 namespace: default resourceVersion: "5775" uid: 1f349daf-3b1a-46b0-91a1-63d03e688f3a reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-141-118.ec2.internal source: component: kubelet host: ip-10-0-141-118.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:16Z" involvedObject: kind: Node name: ip-10-0-141-118.ec2.internal uid: ip-10-0-141-118.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:16Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T18:49:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:16Z" name: ip-10-0-141-118.ec2.internal.18a73975f0115169 namespace: default resourceVersion: "5683" uid: 5b9c44e4-af65-44b5-98bd-df37a4ce08dd reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-141-118.ec2.internal source: component: kubelet host: ip-10-0-141-118.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:17Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-118.ec2.internal resourceVersion: "5684" uid: 5e4a9715-b6f8-4581-8612-22460151d2bf kind: Event lastTimestamp: "2026-04-17T18:49:17Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T18:49:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T18:49:17Z" name: ip-10-0-141-118.ec2.internal.18a739760497bf47 namespace: default resourceVersion: "5780" uid: ca8958c7-e2fa-4ad1-8160-b91d9f4ec717 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:19Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-118.ec2.internal uid: 5e4a9715-b6f8-4581-8612-22460151d2bf kind: Event lastTimestamp: "2026-04-17T18:49:19Z" message: 'Node ip-10-0-141-118.ec2.internal event: Registered Node ip-10-0-141-118.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:49:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:49:19Z" name: ip-10-0-141-118.ec2.internal.18a739768fe7c237 namespace: default resourceVersion: "5829" uid: 8ef2fb43-f627-4331-957e-602bb773e9fb reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-118.ec2.internal resourceVersion: "6593" uid: 5e4a9715-b6f8-4581-8612-22460151d2bf kind: Event lastTimestamp: "2026-04-17T18:49:44Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-118.ec2.internal, error getting gateway config for node ip-10-0-141-118.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-141-118.ec2.internal", failed to update chassis to local for local node ip-10-0-141-118.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-141-118.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-118.ec2.internal]' metadata: creationTimestamp: "2026-04-17T18:49:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-141-118 operation: Update time: "2026-04-17T18:49:44Z" name: ip-10-0-141-118.ec2.internal.18a7397c5769db4e namespace: default resourceVersion: "6600" uid: 84ba9a9f-7d41-495e-863e-2af6df100e87 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:49:50Z" involvedObject: kind: Node name: ip-10-0-141-118.ec2.internal uid: ip-10-0-141-118.ec2.internal kind: Event lastTimestamp: "2026-04-17T18:49:50Z" message: 'Node ip-10-0-141-118.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T18:49:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T18:49:50Z" name: ip-10-0-141-118.ec2.internal.18a7397dd24bca59 namespace: default resourceVersion: "6699" uid: ee33a636-61af-4a64-8969-f4ba1a912625 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-141-118.ec2.internal source: component: kubelet host: ip-10-0-141-118.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:51:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-118.ec2.internal uid: 5e4a9715-b6f8-4581-8612-22460151d2bf kind: Event lastTimestamp: "2026-04-17T18:51:08Z" message: 'Node ip-10-0-141-118.ec2.internal event: Registered Node ip-10-0-141-118.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T18:51:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T18:51:08Z" name: ip-10-0-141-118.ec2.internal.18a7398fd9466542 namespace: default resourceVersion: "7930" uid: 39583d4d-3c0a-412f-8d82-36c589faec78 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-17T18:50:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T18:50:44Z" name: kube-system.18a7398a719e6138 namespace: default resourceVersion: "7543" uid: 567f7859-caee-414f-a32b-daced1125742 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-587c9986cf-8qr2v type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-17T18:50:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T18:50:59Z" name: kube-system.18a7398deffc78ce namespace: default resourceVersion: "7841" uid: 0568b08b-c838-4391-935e-37642c4b1dbf reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-587c9986cf-8qr2v type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-17T18:50:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T18:50:59Z" name: kube-system.18a7398df02e4bf3 namespace: default resourceVersion: "7842" uid: 66923f99-a098-432a-996c-6987557627f9 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-587c9986cf-8qr2v type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-17T18:50:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T18:50:59Z" name: kube-system.18a7398df08709a6 namespace: default resourceVersion: "7843" uid: 36a3858f-c923-4f22-b349-3f1c6085442c reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-587c9986cf-8qr2v type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-17T18:51:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T18:51:59Z" name: kube-system.18a7399be887ff5e namespace: default resourceVersion: "10079" uid: 7acd42d3-dc52-450a-8b5f-0fc447480da1 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-587c9986cf-8qr2v type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T18:44:02Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-17T18:44:02Z" message: readyz=true metadata: creationTimestamp: "2026-04-17T18:44:02Z" name: openshift-kube-apiserver.18a7392cd43b275f namespace: default resourceVersion: "274" uid: 18f1509b-ea4f-4d4e-9a1b-9004a77103c2 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-56977d4848-5dwdt type: Warning kind: EventList metadata: resourceVersion: "46437"