--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:29Z" involvedObject: kind: CertificateSigningRequest name: csr-2tcdd kind: Event lastTimestamp: "2026-04-21T14:56:29Z" message: CSR "csr-2tcdd" has been approved metadata: creationTimestamp: "2026-04-21T14:56:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:29Z" name: csr-2tcdd.18a86714274e3c02 namespace: default resourceVersion: "6697" uid: 8ca275f4-9566-48db-a30e-2749ad583dba reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:25Z" involvedObject: kind: CertificateSigningRequest name: csr-4s2cf kind: Event lastTimestamp: "2026-04-21T14:56:25Z" message: CSR "csr-4s2cf" has been approved metadata: creationTimestamp: "2026-04-21T14:56:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:25Z" name: csr-4s2cf.18a86713571d0614 namespace: default resourceVersion: "6627" uid: e6122acd-ca55-433b-a6e4-3cc7e0581a1d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:19Z" involvedObject: kind: CertificateSigningRequest name: csr-56lsz kind: Event lastTimestamp: "2026-04-21T14:56:19Z" message: CSR "csr-56lsz" has been approved metadata: creationTimestamp: "2026-04-21T14:56:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:19Z" name: csr-56lsz.18a86711f4b4b26b namespace: default resourceVersion: "6508" uid: 6f6d8304-43c6-4670-8582-6dc54538c6f3 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:22Z" involvedObject: kind: CertificateSigningRequest name: csr-9gwpv kind: Event lastTimestamp: "2026-04-21T14:56:22Z" message: CSR "csr-9gwpv" has been approved metadata: creationTimestamp: "2026-04-21T14:56:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:22Z" name: csr-9gwpv.18a86712b09f5efa namespace: default resourceVersion: "6581" uid: 36f89909-3da9-48f5-a733-dede0beb64a0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:08Z" involvedObject: kind: CertificateSigningRequest name: csr-lsnsq kind: Event lastTimestamp: "2026-04-21T14:56:08Z" message: CSR "csr-lsnsq" has been approved metadata: creationTimestamp: "2026-04-21T14:56:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:08Z" name: csr-lsnsq.18a8670f4cd2015b namespace: default resourceVersion: "6155" uid: 3a7800da-dc4f-40f0-82b2-80e0dc8cc33d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:02Z" involvedObject: kind: CertificateSigningRequest name: csr-zdtqb kind: Event lastTimestamp: "2026-04-21T14:56:02Z" message: CSR "csr-zdtqb" has been approved metadata: creationTimestamp: "2026-04-21T14:56:02Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:56:02Z" name: csr-zdtqb.18a8670dd82e1021 namespace: default resourceVersion: "6062" uid: 29934826-4069-45dd-83c8-0bb5e786c2e4 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T15:03:36Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14248" uid: 7f95c147-0efd-48ef-aea6-1d0639ec7f7f kind: Event lastTimestamp: "2026-04-21T15:03:36Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-21T15:03:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T15:03:36Z" name: default-gateway.18a86777835d6c7f namespace: default resourceVersion: "14265" uid: e61bf44f-ba5d-49ae-844b-438e4b9a9e8b reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T15:03:38Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14497" uid: 7f95c147-0efd-48ef-aea6-1d0639ec7f7f kind: Event lastTimestamp: "2026-04-21T15:03:45Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-21T15:03:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T15:03:45Z" name: default-gateway.18a867780c53300f namespace: default resourceVersion: "14700" uid: 3b25c18f-c3b9-4935-b868-bcd1844087cd reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-21T15:03:48Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14846" uid: c7587f04-f455-435c-99c1-e8ddc7ca31b1 kind: Event lastTimestamp: "2026-04-21T15:04:17Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:c7587f04-f455-435c-99c1-e8ddc7ca31b1 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:c7587f04-f455-435c-99c1-e8ddc7ca31b1]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-21T15:03:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T15:04:17Z" name: default-kserve.18a8677a55ee3b38 namespace: default resourceVersion: "16298" uid: 5560e74b-dac5-40ef-bb6a-d93fdc233e36 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-21T15:03:35Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14218" uid: d1105f53-1fd7-4d46-8628-6024f4c4b46f kind: Event lastTimestamp: "2026-04-21T15:23:15Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-21T15:03:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T15:23:15Z" name: default-monitoring.18a867777dc2210a namespace: default resourceVersion: "35014" uid: dd08fd86-7047-4f04-be8d-4283ee57165a reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:36Z" involvedObject: kind: Node name: ip-10-0-129-133.ec2.internal uid: ip-10-0-129-133.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:36Z" message: 'Node ip-10-0-129-133.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:55:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:36Z" name: ip-10-0-129-133.ec2.internal.18a86707dee5c2cf namespace: default resourceVersion: "5368" uid: 477d3168-2073-44cb-9df8-c605ff66d745 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-133.ec2.internal source: component: kubelet host: ip-10-0-129-133.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:36Z" involvedObject: kind: Node name: ip-10-0-129-133.ec2.internal uid: ip-10-0-129-133.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:36Z" message: 'Node ip-10-0-129-133.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:55:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:36Z" name: ip-10-0-129-133.ec2.internal.18a86707dee6107a namespace: default resourceVersion: "5369" uid: a3de831f-1b30-4ea9-ab5e-208cc6299fd3 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-133.ec2.internal source: component: kubelet host: ip-10-0-129-133.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:36Z" involvedObject: kind: Node name: ip-10-0-129-133.ec2.internal uid: ip-10-0-129-133.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:36Z" message: 'Node ip-10-0-129-133.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:55:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:36Z" name: ip-10-0-129-133.ec2.internal.18a86707dee673c5 namespace: default resourceVersion: "5370" uid: 80fca3e8-13b7-4d24-a395-a4ad30626eba reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-133.ec2.internal source: component: kubelet host: ip-10-0-129-133.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:36Z" involvedObject: kind: Node name: ip-10-0-129-133.ec2.internal uid: ip-10-0-129-133.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:36Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:55:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:36Z" name: ip-10-0-129-133.ec2.internal.18a86707e23271d8 namespace: default resourceVersion: "5330" uid: f0831722-0123-4f9b-9b6d-49586e8f6c9b reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-133.ec2.internal source: component: kubelet host: ip-10-0-129-133.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-133.ec2.internal resourceVersion: "5332" uid: bee26194-3046-4362-b66b-5d5d6bc97260 kind: Event lastTimestamp: "2026-04-21T14:55:36Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:55:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:55:36Z" name: ip-10-0-129-133.ec2.internal.18a86707f80cf248 namespace: default resourceVersion: "5384" uid: 8973b84f-c231-49db-aec9-aa5040b06853 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:40Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-133.ec2.internal uid: bee26194-3046-4362-b66b-5d5d6bc97260 kind: Event lastTimestamp: "2026-04-21T14:55:40Z" message: 'Node ip-10-0-129-133.ec2.internal event: Registered Node ip-10-0-129-133.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:55:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:55:40Z" name: ip-10-0-129-133.ec2.internal.18a86708e203e5ec namespace: default resourceVersion: "5516" uid: 7b678c9e-6578-4fd6-b699-67a69e70f52e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-133.ec2.internal resourceVersion: "6087" uid: bee26194-3046-4362-b66b-5d5d6bc97260 kind: Event lastTimestamp: "2026-04-21T14:56:03Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-133.ec2.internal, error getting gateway config for node ip-10-0-129-133.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-129-133.ec2.internal", failed to update chassis to local for local node ip-10-0-129-133.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-129-133.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-133.ec2.internal]' metadata: creationTimestamp: "2026-04-21T14:56:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-129-133 operation: Update time: "2026-04-21T14:56:03Z" name: ip-10-0-129-133.ec2.internal.18a8670e3471aeaf namespace: default resourceVersion: "6089" uid: 5df9f731-96bc-4569-9d3c-9de64af220b7 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:09Z" involvedObject: kind: Node name: ip-10-0-129-133.ec2.internal uid: ip-10-0-129-133.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:56:09Z" message: 'Node ip-10-0-129-133.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:56:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:56:09Z" name: ip-10-0-129-133.ec2.internal.18a8670f9f6374f0 namespace: default resourceVersion: "6173" uid: 00fce03d-d698-4437-a63c-7af4611ac3e0 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-133.ec2.internal source: component: kubelet host: ip-10-0-129-133.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:58:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-133.ec2.internal uid: bee26194-3046-4362-b66b-5d5d6bc97260 kind: Event lastTimestamp: "2026-04-21T14:58:21Z" message: 'Node ip-10-0-129-133.ec2.internal event: Registered Node ip-10-0-129-133.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:58:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:58:21Z" name: ip-10-0-129-133.ec2.internal.18a8672e460ea005 namespace: default resourceVersion: "8114" uid: 254f46a7-6c99-440f-8f06-4acc99e7b030 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c036791a6 namespace: default resourceVersion: "5594" uid: 014631c8-45bd-42fa-b615-4aaf28c19a57 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: 'Node ip-10-0-130-121.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c051ffb8b namespace: default resourceVersion: "5689" uid: daa897f7-f653-4807-82ec-f36c4eca86a0 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: 'Node ip-10-0-130-121.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c052051d3 namespace: default resourceVersion: "5691" uid: 419f1d61-3e72-4fc6-97f9-0a8a55691229 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: 'Node ip-10-0-130-121.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c05207dc8 namespace: default resourceVersion: "5693" uid: 12413be2-44d1-45e7-a48e-f6976bd9721f reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c084490bb namespace: default resourceVersion: "5598" uid: 59888812-2d73-4a2a-a91d-789af204952e reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-121.ec2.internal resourceVersion: "5600" uid: 22d6deef-6e96-4409-81d0-dc7da2ebd0aa kind: Event lastTimestamp: "2026-04-21T14:55:54Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:55:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:55:54Z" name: ip-10-0-130-121.ec2.internal.18a8670c1b30ddc7 namespace: default resourceVersion: "5696" uid: b4b29359-1b50-4ea6-a833-08177b280f0f reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:55Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-121.ec2.internal uid: 22d6deef-6e96-4409-81d0-dc7da2ebd0aa kind: Event lastTimestamp: "2026-04-21T14:55:55Z" message: 'Node ip-10-0-130-121.ec2.internal event: Registered Node ip-10-0-130-121.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:55:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:55:55Z" name: ip-10-0-130-121.ec2.internal.18a8670c6039578c namespace: default resourceVersion: "5744" uid: daa0bb53-c6f4-4c1a-bb8c-ae151da3dd09 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-121.ec2.internal resourceVersion: "6541" uid: 22d6deef-6e96-4409-81d0-dc7da2ebd0aa kind: Event lastTimestamp: "2026-04-21T14:56:21Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-121.ec2.internal, error getting gateway config for node ip-10-0-130-121.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-130-121.ec2.internal", failed to update chassis to local for local node ip-10-0-130-121.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-130-121.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-121.ec2.internal]' metadata: creationTimestamp: "2026-04-21T14:56:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-130-121 operation: Update time: "2026-04-21T14:56:21Z" name: ip-10-0-130-121.ec2.internal.18a8671250e27bac namespace: default resourceVersion: "6544" uid: 3c71c818-de00-4712-b453-17c9d2add088 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:27Z" involvedObject: kind: Node name: ip-10-0-130-121.ec2.internal uid: ip-10-0-130-121.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:56:27Z" message: 'Node ip-10-0-130-121.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:56:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:56:27Z" name: ip-10-0-130-121.ec2.internal.18a86713ae1f2e08 namespace: default resourceVersion: "6648" uid: e9ab7cf2-e99a-4071-b43a-bdf1e0ed9432 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-130-121.ec2.internal source: component: kubelet host: ip-10-0-130-121.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:58:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-121.ec2.internal uid: 22d6deef-6e96-4409-81d0-dc7da2ebd0aa kind: Event lastTimestamp: "2026-04-21T14:58:21Z" message: 'Node ip-10-0-130-121.ec2.internal event: Registered Node ip-10-0-130-121.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:58:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:58:21Z" name: ip-10-0-130-121.ec2.internal.18a8672e460f7e5f namespace: default resourceVersion: "8122" uid: a9d5e401-74be-433e-bb8a-fd101028fb32 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:57Z" involvedObject: kind: Node name: ip-10-0-134-40.ec2.internal uid: ip-10-0-134-40.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:57Z" message: 'Node ip-10-0-134-40.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:55:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:58Z" name: ip-10-0-134-40.ec2.internal.18a8670cc87e3b1f namespace: default resourceVersion: "5909" uid: 033ef7db-3c76-489e-8f81-46f398b0e7df reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-40.ec2.internal source: component: kubelet host: ip-10-0-134-40.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:57Z" involvedObject: kind: Node name: ip-10-0-134-40.ec2.internal uid: ip-10-0-134-40.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:57Z" message: 'Node ip-10-0-134-40.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:55:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:58Z" name: ip-10-0-134-40.ec2.internal.18a8670cc87e7d7a namespace: default resourceVersion: "5921" uid: dc9b4cd0-f3e5-498e-9ceb-ba6389f6d8cc reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-40.ec2.internal source: component: kubelet host: ip-10-0-134-40.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:55:57Z" involvedObject: kind: Node name: ip-10-0-134-40.ec2.internal uid: ip-10-0-134-40.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:57Z" message: 'Node ip-10-0-134-40.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:55:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:58Z" name: ip-10-0-134-40.ec2.internal.18a8670cc87eabcc namespace: default resourceVersion: "5925" uid: e175ed04-6fa9-478a-ba2d-9aa95292444f reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-40.ec2.internal source: component: kubelet host: ip-10-0-134-40.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:57Z" involvedObject: kind: Node name: ip-10-0-134-40.ec2.internal uid: ip-10-0-134-40.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:55:57Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:55:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:55:57Z" name: ip-10-0-134-40.ec2.internal.18a8670ccb17cfb7 namespace: default resourceVersion: "5802" uid: c672ee8a-7859-4935-bf74-9f5fc4673f80 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-40.ec2.internal source: component: kubelet host: ip-10-0-134-40.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:55:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-40.ec2.internal resourceVersion: "5805" uid: 132ac2ee-f3ad-4f0a-99c6-c11ae6d1fbac kind: Event lastTimestamp: "2026-04-21T14:55:58Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:55:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:55:58Z" name: ip-10-0-134-40.ec2.internal.18a8670ce3deddf3 namespace: default resourceVersion: "5918" uid: 2c9aa3ef-3745-4216-bfd5-3d4eebdfc8b7 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:00Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-40.ec2.internal uid: 132ac2ee-f3ad-4f0a-99c6-c11ae6d1fbac kind: Event lastTimestamp: "2026-04-21T14:56:00Z" message: 'Node ip-10-0-134-40.ec2.internal event: Registered Node ip-10-0-134-40.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:56:00Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:56:00Z" name: ip-10-0-134-40.ec2.internal.18a8670d8a4e1053 namespace: default resourceVersion: "6043" uid: 65f612bc-dfc2-4ec0-a1b2-c3ee318efafe reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:24Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-40.ec2.internal resourceVersion: "6599" uid: 132ac2ee-f3ad-4f0a-99c6-c11ae6d1fbac kind: Event lastTimestamp: "2026-04-21T14:56:24Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-40.ec2.internal, error getting gateway config for node ip-10-0-134-40.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-134-40.ec2.internal", failed to update chassis to local for local node ip-10-0-134-40.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-134-40.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-40.ec2.internal]' metadata: creationTimestamp: "2026-04-21T14:56:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-134-40 operation: Update time: "2026-04-21T14:56:24Z" name: ip-10-0-134-40.ec2.internal.18a867130e3988a2 namespace: default resourceVersion: "6603" uid: ceb2f9df-8964-4627-a845-945d81f856de reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:56:30Z" involvedObject: kind: Node name: ip-10-0-134-40.ec2.internal uid: ip-10-0-134-40.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:56:30Z" message: 'Node ip-10-0-134-40.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:56:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:56:30Z" name: ip-10-0-134-40.ec2.internal.18a8671472153f8d namespace: default resourceVersion: "6715" uid: 21d269c7-1bdc-452f-9220-051378878bb4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-40.ec2.internal source: component: kubelet host: ip-10-0-134-40.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:58:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-40.ec2.internal uid: 132ac2ee-f3ad-4f0a-99c6-c11ae6d1fbac kind: Event lastTimestamp: "2026-04-21T14:58:21Z" message: 'Node ip-10-0-134-40.ec2.internal event: Registered Node ip-10-0-134-40.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:58:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:58:21Z" name: ip-10-0-134-40.ec2.internal.18a8672e460f97b9 namespace: default resourceVersion: "8136" uid: ead9de42-c2e8-4b9b-a4c7-c2ed81cec459 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T15:04:48Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16525" uid: d93382d9-e86f-40ee-a1c6-a70a42b6fd3f kind: Event lastTimestamp: "2026-04-21T15:04:51Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-21T15:04:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-21T15:04:52Z" name: kuadrant-system.18a867885d9def5e namespace: default resourceVersion: "16747" uid: 79967fad-ef4e-4948-82e2-130cc0103571 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-21T14:58:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:58:01Z" name: kube-system.18a86729843fe5b3 namespace: default resourceVersion: "7670" uid: 01d68f5c-3389-4f66-a3ed-57a52f74a8f7 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846979dcf4-pfqfs type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-21T14:58:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:58:16Z" name: kube-system.18a8672d02af7775 namespace: default resourceVersion: "7964" uid: c699d767-dd67-4606-b7fb-65f65278e693 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846979dcf4-pfqfs type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-21T14:58:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:58:16Z" name: kube-system.18a8672d02e881e1 namespace: default resourceVersion: "7965" uid: ec3ba76f-3742-4997-9ad6-e6e55160d22f reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846979dcf4-pfqfs type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-21T14:58:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:58:16Z" name: kube-system.18a8672d031fd5a7 namespace: default resourceVersion: "7966" uid: ed249368-5527-4c4d-bd00-0e9942cdd1ae reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846979dcf4-pfqfs type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-21T14:59:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:59:16Z" name: kube-system.18a8673afb363533 namespace: default resourceVersion: "10062" uid: 24f7f57f-06a0-402f-9e66-14ea48e4c029 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846979dcf4-pfqfs type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:50:57Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-21T14:50:57Z" message: readyz=true metadata: creationTimestamp: "2026-04-21T14:50:57Z" name: openshift-kube-apiserver.18a866c6d95278a1 namespace: default resourceVersion: "274" uid: 4a5f5be8-2c7d-4a9f-9e7d-be9db042f4d6 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-66dfcbd99-hcgbx type: Warning kind: EventList metadata: resourceVersion: "46458"