--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:13Z" involvedObject: kind: CertificateSigningRequest name: csr-bsfv7 kind: Event lastTimestamp: "2026-04-21T16:02:13Z" message: CSR "csr-bsfv7" has been approved metadata: creationTimestamp: "2026-04-21T16:02:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:13Z" name: csr-bsfv7.18a86aaa8cf4d097 namespace: default resourceVersion: "6404" uid: 230e13be-bdbf-4022-bee5-b4ad44428ab8 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:09Z" involvedObject: kind: CertificateSigningRequest name: csr-hwk2r kind: Event lastTimestamp: "2026-04-21T16:02:09Z" message: CSR "csr-hwk2r" has been approved metadata: creationTimestamp: "2026-04-21T16:02:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:09Z" name: csr-hwk2r.18a86aa98f8e25bd namespace: default resourceVersion: "6230" uid: 6ef8a9da-8e1a-48dd-a1cc-052d1ef06729 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:20Z" involvedObject: kind: CertificateSigningRequest name: csr-ln2ff kind: Event lastTimestamp: "2026-04-21T16:02:20Z" message: CSR "csr-ln2ff" has been approved metadata: creationTimestamp: "2026-04-21T16:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:20Z" name: csr-ln2ff.18a86aac2b9b5eb6 namespace: default resourceVersion: "6608" uid: a00fd0b9-dfa8-4ea2-90bd-8003e66fc5ca reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:14Z" involvedObject: kind: CertificateSigningRequest name: csr-nzwwg kind: Event lastTimestamp: "2026-04-21T16:02:14Z" message: CSR "csr-nzwwg" has been approved metadata: creationTimestamp: "2026-04-21T16:02:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:14Z" name: csr-nzwwg.18a86aaaba6bcfa4 namespace: default resourceVersion: "6424" uid: 684f11ee-cbc9-45e5-9b0d-797ea725f40d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:17Z" involvedObject: kind: CertificateSigningRequest name: csr-wt9l9 kind: Event lastTimestamp: "2026-04-21T16:02:17Z" message: CSR "csr-wt9l9" has been approved metadata: creationTimestamp: "2026-04-21T16:02:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:17Z" name: csr-wt9l9.18a86aab5164f659 namespace: default resourceVersion: "6522" uid: ae35636e-a64b-4746-9a6d-fe67e30c0a39 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:22Z" involvedObject: kind: CertificateSigningRequest name: csr-x4f6d kind: Event lastTimestamp: "2026-04-21T16:02:22Z" message: CSR "csr-x4f6d" has been approved metadata: creationTimestamp: "2026-04-21T16:02:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T16:02:22Z" name: csr-x4f6d.18a86aacb3cba0ce namespace: default resourceVersion: "6652" uid: 93d12c7f-2568-45a5-9f28-639a7370917f reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:09:42Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14211" uid: fb339512-8242-43d3-bdd4-1fb8cc91210a kind: Event lastTimestamp: "2026-04-21T16:09:42Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-21T16:09:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T16:09:42Z" name: default-gateway.18a86b130c922139 namespace: default resourceVersion: "14218" uid: c74fb468-9e93-4199-925b-22035b3a3948 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T16:09:44Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14489" uid: fb339512-8242-43d3-bdd4-1fb8cc91210a kind: Event lastTimestamp: "2026-04-21T16:09:51Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-21T16:09:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T16:09:51Z" name: default-gateway.18a86b138788c549 namespace: default resourceVersion: "14740" uid: d279cc4b-2a95-45cf-a14f-d70a7eec5410 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-21T16:09:56Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15065" uid: a8b1f88d-d43b-4381-bc74-0b14806be1aa kind: Event lastTimestamp: "2026-04-21T16:10:24Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:a8b1f88d-d43b-4381-bc74-0b14806be1aa platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:a8b1f88d-d43b-4381-bc74-0b14806be1aa]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-21T16:09:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T16:10:24Z" name: default-kserve.18a86b1634b83e57 namespace: default resourceVersion: "16183" uid: 3c389a76-b6d8-4dac-b542-cdc32efeea63 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 34 eventTime: null firstTimestamp: "2026-04-21T16:09:42Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14187" uid: f89a365e-c232-4200-b5c4-3443d5c01bd2 kind: Event lastTimestamp: "2026-04-21T16:29:21Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-21T16:09:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T16:29:21Z" name: default-monitoring.18a86b1307401cca namespace: default resourceVersion: "37648" uid: 8b92b8bb-b608-4c76-84d0-c65b362f9ed3 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:43Z" involvedObject: kind: Node name: ip-10-0-129-96.ec2.internal uid: ip-10-0-129-96.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:44Z" message: 'Node ip-10-0-129-96.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T16:01:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:44Z" name: ip-10-0-129-96.ec2.internal.18a86aa39ebccb3c namespace: default resourceVersion: "5399" uid: 7b70c0c3-c83a-4ab5-9f22-8b9abc478182 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-96.ec2.internal source: component: kubelet host: ip-10-0-129-96.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:43Z" involvedObject: kind: Node name: ip-10-0-129-96.ec2.internal uid: ip-10-0-129-96.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:44Z" message: 'Node ip-10-0-129-96.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T16:01:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:44Z" name: ip-10-0-129-96.ec2.internal.18a86aa39ebd14b8 namespace: default resourceVersion: "5400" uid: eaf16fbe-304d-4a54-ae8e-3283e0c8c74b reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-96.ec2.internal source: component: kubelet host: ip-10-0-129-96.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:43Z" involvedObject: kind: Node name: ip-10-0-129-96.ec2.internal uid: ip-10-0-129-96.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:44Z" message: 'Node ip-10-0-129-96.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T16:01:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:44Z" name: ip-10-0-129-96.ec2.internal.18a86aa39ebd392d namespace: default resourceVersion: "5401" uid: daeac5da-56ef-489c-b15c-b62069210553 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-96.ec2.internal source: component: kubelet host: ip-10-0-129-96.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-96.ec2.internal resourceVersion: "5356" uid: 53296b73-b3c1-4ad2-92c1-a86d61ca08d7 kind: Event lastTimestamp: "2026-04-21T16:01:44Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T16:01:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T16:01:44Z" name: ip-10-0-129-96.ec2.internal.18a86aa3bdd9e0b1 namespace: default resourceVersion: "5429" uid: 137892ba-477b-4d2a-b299-6b7da262aac6 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:48Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-96.ec2.internal uid: 53296b73-b3c1-4ad2-92c1-a86d61ca08d7 kind: Event lastTimestamp: "2026-04-21T16:01:48Z" message: 'Node ip-10-0-129-96.ec2.internal event: Registered Node ip-10-0-129-96.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:01:48Z" name: ip-10-0-129-96.ec2.internal.18a86aa4b5668021 namespace: default resourceVersion: "5544" uid: df102cee-605b-4dfd-b718-efdc9b4c0132 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:11Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-96.ec2.internal resourceVersion: "6262" uid: 53296b73-b3c1-4ad2-92c1-a86d61ca08d7 kind: Event lastTimestamp: "2026-04-21T16:02:11Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-96.ec2.internal, error getting gateway config for node ip-10-0-129-96.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-129-96.ec2.internal", failed to update chassis to local for local node ip-10-0-129-96.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-129-96.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-96.ec2.internal]' metadata: creationTimestamp: "2026-04-21T16:02:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-129-96 operation: Update time: "2026-04-21T16:02:11Z" name: ip-10-0-129-96.ec2.internal.18a86aa9eb3f1dc1 namespace: default resourceVersion: "6267" uid: 03addf7d-e0d3-4e3f-9397-81ac77abed78 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:15Z" involvedObject: kind: Node name: ip-10-0-129-96.ec2.internal uid: ip-10-0-129-96.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:02:15Z" message: 'Node ip-10-0-129-96.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T16:02:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:02:15Z" name: ip-10-0-129-96.ec2.internal.18a86aaafe1ded1f namespace: default resourceVersion: "6458" uid: 9507be2c-6b26-474a-ba24-d2b77f7d8ad9 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-96.ec2.internal source: component: kubelet host: ip-10-0-129-96.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:04:26Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-96.ec2.internal uid: 53296b73-b3c1-4ad2-92c1-a86d61ca08d7 kind: Event lastTimestamp: "2026-04-21T16:04:26Z" message: 'Node ip-10-0-129-96.ec2.internal event: Registered Node ip-10-0-129-96.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:04:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:04:26Z" name: ip-10-0-129-96.ec2.internal.18a86ac983b7247a namespace: default resourceVersion: "8122" uid: 177b1eec-f51a-4b42-8118-38882d825413 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa5832a9925 namespace: default resourceVersion: "5727" uid: 254c3df9-76ba-4364-bc29-e236e4c108ca reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: 'Node ip-10-0-138-191.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa584ed8d3e namespace: default resourceVersion: "5733" uid: c7b9d03e-ca75-4f66-af6a-5b649d81da9f reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: 'Node ip-10-0-138-191.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa584edd074 namespace: default resourceVersion: "5737" uid: 6aed0ba0-4992-4c02-b239-4c6c46052ea3 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: 'Node ip-10-0-138-191.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa584ee0348 namespace: default resourceVersion: "5747" uid: 63273596-3849-4a39-8e1e-69793e1343f0 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa5883040b9 namespace: default resourceVersion: "5731" uid: 4e7bf531-f382-4a6e-9a14-5881e1600f7a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:52Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-191.ec2.internal resourceVersion: "5732" uid: bb838552-4c87-4a64-aef0-4bde19c9ae3d kind: Event lastTimestamp: "2026-04-21T16:01:52Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T16:01:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T16:01:52Z" name: ip-10-0-138-191.ec2.internal.18a86aa59f309dd5 namespace: default resourceVersion: "5830" uid: 2a939be5-c03a-48f4-bd0c-b0a572ddf6bd reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-191.ec2.internal uid: bb838552-4c87-4a64-aef0-4bde19c9ae3d kind: Event lastTimestamp: "2026-04-21T16:01:53Z" message: 'Node ip-10-0-138-191.ec2.internal event: Registered Node ip-10-0-138-191.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:01:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:01:53Z" name: ip-10-0-138-191.ec2.internal.18a86aa5df7697de namespace: default resourceVersion: "5889" uid: 0b71be51-11a5-4ca7-9920-5d23c9170abe reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:24Z" involvedObject: kind: Node name: ip-10-0-138-191.ec2.internal uid: ip-10-0-138-191.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:02:24Z" message: 'Node ip-10-0-138-191.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T16:02:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:02:24Z" name: ip-10-0-138-191.ec2.internal.18a86aad0a61e52f namespace: default resourceVersion: "6678" uid: 2c834a88-123b-4e0c-a258-727cb63947c6 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-191.ec2.internal source: component: kubelet host: ip-10-0-138-191.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:04:26Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-191.ec2.internal uid: bb838552-4c87-4a64-aef0-4bde19c9ae3d kind: Event lastTimestamp: "2026-04-21T16:04:26Z" message: 'Node ip-10-0-138-191.ec2.internal event: Registered Node ip-10-0-138-191.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:04:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:04:26Z" name: ip-10-0-138-191.ec2.internal.18a86ac983b8aaec namespace: default resourceVersion: "8136" uid: 9c76e153-e2fd-445c-b0bf-4ece0087f094 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:48Z" involvedObject: kind: Node name: ip-10-0-142-158.ec2.internal uid: ip-10-0-142-158.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:49Z" message: 'Node ip-10-0-142-158.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T16:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:49Z" name: ip-10-0-142-158.ec2.internal.18a86aa4bd741a88 namespace: default resourceVersion: "5628" uid: 67f4332d-a802-402f-9d71-f5779cc11e48 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-142-158.ec2.internal source: component: kubelet host: ip-10-0-142-158.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:48Z" involvedObject: kind: Node name: ip-10-0-142-158.ec2.internal uid: ip-10-0-142-158.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:49Z" message: 'Node ip-10-0-142-158.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T16:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:49Z" name: ip-10-0-142-158.ec2.internal.18a86aa4bd7470d6 namespace: default resourceVersion: "5633" uid: 477af617-e987-4f53-b09b-f18e0fe0eb9f reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-142-158.ec2.internal source: component: kubelet host: ip-10-0-142-158.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T16:01:48Z" involvedObject: kind: Node name: ip-10-0-142-158.ec2.internal uid: ip-10-0-142-158.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:49Z" message: 'Node ip-10-0-142-158.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T16:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:49Z" name: ip-10-0-142-158.ec2.internal.18a86aa4bd74a4c7 namespace: default resourceVersion: "5639" uid: 18de5b6c-49d3-4ad9-bc8d-2ba5ec7c47e4 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-142-158.ec2.internal source: component: kubelet host: ip-10-0-142-158.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:48Z" involvedObject: kind: Node name: ip-10-0-142-158.ec2.internal uid: ip-10-0-142-158.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:01:48Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T16:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:01:48Z" name: ip-10-0-142-158.ec2.internal.18a86aa4c0422aa3 namespace: default resourceVersion: "5552" uid: 5e9c34da-da39-497d-85bd-fa6fd280f8a1 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-142-158.ec2.internal source: component: kubelet host: ip-10-0-142-158.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-158.ec2.internal resourceVersion: "5553" uid: 4813f765-6a58-4bf5-bcd3-2d3d003f4649 kind: Event lastTimestamp: "2026-04-21T16:01:49Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T16:01:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T16:01:49Z" name: ip-10-0-142-158.ec2.internal.18a86aa4d5d8803a namespace: default resourceVersion: "5647" uid: 2f2c643a-a9e0-442b-963f-114c9ed327b0 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:01:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-158.ec2.internal uid: 4813f765-6a58-4bf5-bcd3-2d3d003f4649 kind: Event lastTimestamp: "2026-04-21T16:01:53Z" message: 'Node ip-10-0-142-158.ec2.internal event: Registered Node ip-10-0-142-158.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:01:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:01:53Z" name: ip-10-0-142-158.ec2.internal.18a86aa5df761ca9 namespace: default resourceVersion: "5888" uid: e27e32d5-03f0-4f05-8951-a5c3a9fc0368 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-158.ec2.internal resourceVersion: "6440" uid: 4813f765-6a58-4bf5-bcd3-2d3d003f4649 kind: Event lastTimestamp: "2026-04-21T16:02:15Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-142-158.ec2.internal, error getting gateway config for node ip-10-0-142-158.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-142-158.ec2.internal", failed to update chassis to local for local node ip-10-0-142-158.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-142-158.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-142-158.ec2.internal]' metadata: creationTimestamp: "2026-04-21T16:02:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-142-158 operation: Update time: "2026-04-21T16:02:15Z" name: ip-10-0-142-158.ec2.internal.18a86aaae8197ef5 namespace: default resourceVersion: "6444" uid: 6e7f0cb6-a66d-4ac4-9d17-57e71d56556b reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:02:22Z" involvedObject: kind: Node name: ip-10-0-142-158.ec2.internal uid: ip-10-0-142-158.ec2.internal kind: Event lastTimestamp: "2026-04-21T16:02:22Z" message: 'Node ip-10-0-142-158.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T16:02:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T16:02:22Z" name: ip-10-0-142-158.ec2.internal.18a86aac7c54fbdc namespace: default resourceVersion: "6620" uid: 0950d7e8-63bb-4ad2-948a-d0c39a69973d reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-142-158.ec2.internal source: component: kubelet host: ip-10-0-142-158.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T16:04:26Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-158.ec2.internal uid: 4813f765-6a58-4bf5-bcd3-2d3d003f4649 kind: Event lastTimestamp: "2026-04-21T16:04:26Z" message: 'Node ip-10-0-142-158.ec2.internal event: Registered Node ip-10-0-142-158.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T16:04:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T16:04:26Z" name: ip-10-0-142-158.ec2.internal.18a86ac983b8cc98 namespace: default resourceVersion: "8156" uid: 21fd28b8-b44b-4dc1-a7df-5f917f1de9d6 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 9 eventTime: null firstTimestamp: "2026-04-21T16:10:54Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16533" uid: 5acf9db9-2afa-4f95-8ca4-012e56a5332b kind: Event lastTimestamp: "2026-04-21T16:10:57Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-21T16:10:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-21T16:10:58Z" name: kuadrant-system.18a86b23d8f11bea namespace: default resourceVersion: "16762" uid: 662d2ff0-fce4-437c-9404-79bdd29364fc reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-21T16:03:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T16:03:59Z" name: kube-system.18a86ac340655d45 namespace: default resourceVersion: "7623" uid: 1e14ee67-c810-48f1-9ef2-0255e4a7f34e reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-c6c5bfdcb-sgmpt type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-21T16:04:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T16:04:14Z" name: kube-system.18a86ac6bf39b848 namespace: default resourceVersion: "7884" uid: cf678dfb-fbf9-4343-8fae-2c4416451209 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-c6c5bfdcb-sgmpt type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-21T16:04:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T16:04:14Z" name: kube-system.18a86ac6bfaf317a namespace: default resourceVersion: "7885" uid: f5a68200-65ed-40b3-b277-1efe92349795 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-c6c5bfdcb-sgmpt type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-21T16:04:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T16:04:14Z" name: kube-system.18a86ac6c05c8150 namespace: default resourceVersion: "7886" uid: 71ea81b6-f40f-481a-9737-671b7845eef8 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-c6c5bfdcb-sgmpt type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-21T16:05:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T16:05:14Z" name: kube-system.18a86ad4b8172a00 namespace: default resourceVersion: "10207" uid: c2589135-fa50-48cc-ac4e-f190fe6b94b0 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-c6c5bfdcb-sgmpt type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T15:56:44Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-21T15:56:44Z" message: readyz=true metadata: creationTimestamp: "2026-04-21T15:56:44Z" name: openshift-kube-apiserver.18a86a5dd62d7b9e namespace: default resourceVersion: "274" uid: c8a96f79-0199-4830-b7ad-b5e45707383c reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-855b5c88f4-fn8xm type: Warning kind: EventList metadata: resourceVersion: "46187"