--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:33Z" involvedObject: kind: CertificateSigningRequest name: csr-9cf2s kind: Event lastTimestamp: "2026-04-20T21:13:33Z" message: CSR "csr-9cf2s" has been approved metadata: creationTimestamp: "2026-04-20T21:13:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:13:33Z" name: csr-9cf2s.18a82d13266c46fa namespace: default resourceVersion: "6693" uid: 7102a45e-3c6c-427d-9224-54451674bdd5 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:26Z" involvedObject: kind: CertificateSigningRequest name: csr-f27tj kind: Event lastTimestamp: "2026-04-20T21:13:26Z" message: CSR "csr-f27tj" has been approved metadata: creationTimestamp: "2026-04-20T21:13:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:13:26Z" name: csr-f27tj.18a82d1194a75f03 namespace: default resourceVersion: "6529" uid: 26b25b0b-3892-4a49-9936-4cba14fe7376 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:35Z" involvedObject: kind: CertificateSigningRequest name: csr-k6z2k kind: Event lastTimestamp: "2026-04-20T21:13:35Z" message: CSR "csr-k6z2k" has been approved metadata: creationTimestamp: "2026-04-20T21:13:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:13:35Z" name: csr-k6z2k.18a82d13b9003d86 namespace: default resourceVersion: "6755" uid: 9e58b0e4-cdc9-43b0-86e4-6bd3824afde6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:41Z" involvedObject: kind: CertificateSigningRequest name: csr-nvw87 kind: Event lastTimestamp: "2026-04-20T21:13:41Z" message: CSR "csr-nvw87" has been approved metadata: creationTimestamp: "2026-04-20T21:13:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:13:41Z" name: csr-nvw87.18a82d1524af9351 namespace: default resourceVersion: "6835" uid: 64a3cd77-9e35-48f0-b809-f4a6258c0ed2 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:56Z" involvedObject: kind: CertificateSigningRequest name: csr-pmbxr kind: Event lastTimestamp: "2026-04-20T21:12:56Z" message: CSR "csr-pmbxr" has been approved metadata: creationTimestamp: "2026-04-20T21:12:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:12:56Z" name: csr-pmbxr.18a82d0a93c737af namespace: default resourceVersion: "5799" uid: b9974abd-68d4-4aaa-9589-db756c1d5d9b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:02Z" involvedObject: kind: CertificateSigningRequest name: csr-zp7kk kind: Event lastTimestamp: "2026-04-20T21:13:02Z" message: CSR "csr-zp7kk" has been approved metadata: creationTimestamp: "2026-04-20T21:13:02Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T21:13:02Z" name: csr-zp7kk.18a82d0c19272fb6 namespace: default resourceVersion: "6021" uid: 65e702a3-cd5f-44c6-b8df-7dbd4dc6f936 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 12 eventTime: null firstTimestamp: "2026-04-20T21:18:37Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14127" uid: 631609ae-9819-4429-a51f-1e0b78cb9897 kind: Event lastTimestamp: "2026-04-20T21:18:49Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-20T21:18:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T21:18:49Z" name: default-gateway.18a82d59e44a7069 namespace: default resourceVersion: "14542" uid: 3279474d-b453-4145-b2a8-6766308a2b37 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 25 eventTime: null firstTimestamp: "2026-04-20T21:18:46Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14484" uid: 248b47ae-b064-4263-b0c8-6eb3bebc37e9 kind: Event lastTimestamp: "2026-04-20T21:19:07Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:248b47ae-b064-4263-b0c8-6eb3bebc37e9 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:248b47ae-b064-4263-b0c8-6eb3bebc37e9]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-20T21:18:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T21:19:07Z" name: default-kserve.18a82d5c1bde7a51 namespace: default resourceVersion: "15623" uid: ef9588d8-431d-493b-bf21-6734ca3fc9f3 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-20T21:18:35Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13946" uid: 12a2b5e2-e441-446d-9e79-6d66caf28971 kind: Event lastTimestamp: "2026-04-20T21:38:13Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-20T21:18:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T21:38:13Z" name: default-monitoring.18a82d59998cdae3 namespace: default resourceVersion: "38273" uid: 969cc518-468a-4248-a582-fb65ae6bd66b reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0dc5ce7aeb namespace: default resourceVersion: "6139" uid: 11bb8e01-508a-43d6-92b5-1112aee4f5b1 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: 'Node ip-10-0-129-149.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0dc78d5629 namespace: default resourceVersion: "6192" uid: ee731887-3fb0-4a8d-ac53-f684784fd1e2 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: 'Node ip-10-0-129-149.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0dc78dac8c namespace: default resourceVersion: "6193" uid: 47b65013-cb09-4d98-9fec-5b1f12911d0c reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: 'Node ip-10-0-129-149.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0dc78dd4a0 namespace: default resourceVersion: "6194" uid: ff73f6f5-adb2-4b60-b2ef-771fb2e1a89b reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0dcacb842d namespace: default resourceVersion: "6143" uid: 63c89274-a54e-45b7-a519-139cf3fb9856 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:10Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-149.ec2.internal resourceVersion: "6146" uid: 7467041b-6f1c-4f37-808e-7f55e12d00d8 kind: Event lastTimestamp: "2026-04-20T21:13:10Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T21:13:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T21:13:10Z" name: ip-10-0-129-149.ec2.internal.18a82d0de0809c4c namespace: default resourceVersion: "6217" uid: b4913bc4-0284-43cc-8f17-a6c65a7bee75 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-149.ec2.internal uid: 7467041b-6f1c-4f37-808e-7f55e12d00d8 kind: Event lastTimestamp: "2026-04-20T21:13:15Z" message: 'Node ip-10-0-129-149.ec2.internal event: Registered Node ip-10-0-129-149.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:13:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:13:15Z" name: ip-10-0-129-149.ec2.internal.18a82d0ee621e76e namespace: default resourceVersion: "6377" uid: 5ce89363-fcd2-477e-984c-6ba197aba868 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:43Z" involvedObject: kind: Node name: ip-10-0-129-149.ec2.internal uid: ip-10-0-129-149.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:43Z" message: 'Node ip-10-0-129-149.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T21:13:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:43Z" name: ip-10-0-129-149.ec2.internal.18a82d1571c3c11a namespace: default resourceVersion: "6884" uid: 78ee9d25-c4d6-4f97-bc8e-c3128c82bc47 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-149.ec2.internal source: component: kubelet host: ip-10-0-129-149.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:14:34Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-149.ec2.internal uid: 7467041b-6f1c-4f37-808e-7f55e12d00d8 kind: Event lastTimestamp: "2026-04-20T21:14:34Z" message: 'Node ip-10-0-129-149.ec2.internal event: Registered Node ip-10-0-129-149.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:14:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:14:34Z" name: ip-10-0-129-149.ec2.internal.18a82d2150cacbea namespace: default resourceVersion: "7863" uid: 95f6f0aa-5427-4fe9-aa89-81799e83b95f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:30Z" involvedObject: kind: Node name: ip-10-0-129-57.ec2.internal uid: ip-10-0-129-57.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:30Z" message: 'Node ip-10-0-129-57.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T21:12:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:30Z" name: ip-10-0-129-57.ec2.internal.18a82d049609c6b1 namespace: default resourceVersion: "5423" uid: 18b59702-6168-4032-a321-3f1775626a89 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-57.ec2.internal source: component: kubelet host: ip-10-0-129-57.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:30Z" involvedObject: kind: Node name: ip-10-0-129-57.ec2.internal uid: ip-10-0-129-57.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:30Z" message: 'Node ip-10-0-129-57.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T21:12:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:30Z" name: ip-10-0-129-57.ec2.internal.18a82d04960a0af1 namespace: default resourceVersion: "5427" uid: 5f168125-70b9-44a0-a757-bcbad3865faa reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-57.ec2.internal source: component: kubelet host: ip-10-0-129-57.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:30Z" involvedObject: kind: Node name: ip-10-0-129-57.ec2.internal uid: ip-10-0-129-57.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:30Z" message: 'Node ip-10-0-129-57.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T21:12:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:31Z" name: ip-10-0-129-57.ec2.internal.18a82d04960a301b namespace: default resourceVersion: "5429" uid: 6afb4054-d1b1-4c03-87a6-701f234b02c6 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-57.ec2.internal source: component: kubelet host: ip-10-0-129-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:30Z" involvedObject: kind: Node name: ip-10-0-129-57.ec2.internal uid: ip-10-0-129-57.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:30Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T21:12:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:30Z" name: ip-10-0-129-57.ec2.internal.18a82d0498ce36d3 namespace: default resourceVersion: "5378" uid: f0536790-d0fd-4348-b850-93367ae61d88 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-57.ec2.internal source: component: kubelet host: ip-10-0-129-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:31Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-57.ec2.internal resourceVersion: "5379" uid: 1318f230-d035-4cf2-9c82-08e623a23863 kind: Event lastTimestamp: "2026-04-20T21:12:31Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T21:12:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T21:12:31Z" name: ip-10-0-129-57.ec2.internal.18a82d04b1e8d5f4 namespace: default resourceVersion: "5453" uid: a7c26517-e266-499e-b247-3452810d3401 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:35Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-57.ec2.internal uid: 1318f230-d035-4cf2-9c82-08e623a23863 kind: Event lastTimestamp: "2026-04-20T21:12:35Z" message: 'Node ip-10-0-129-57.ec2.internal event: Registered Node ip-10-0-129-57.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:12:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:12:35Z" name: ip-10-0-129-57.ec2.internal.18a82d0595a75227 namespace: default resourceVersion: "5577" uid: 442dcf62-433f-48b4-9a14-df018d6f2500 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:57Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-57.ec2.internal resourceVersion: "5819" uid: 1318f230-d035-4cf2-9c82-08e623a23863 kind: Event lastTimestamp: "2026-04-20T21:12:57Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-57.ec2.internal, error getting gateway config for node ip-10-0-129-57.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-129-57.ec2.internal", failed to update chassis to local for local node ip-10-0-129-57.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-129-57.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-129-57.ec2.internal]' metadata: creationTimestamp: "2026-04-20T21:12:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-129-57 operation: Update time: "2026-04-20T21:12:57Z" name: ip-10-0-129-57.ec2.internal.18a82d0aef897943 namespace: default resourceVersion: "5823" uid: 14043570-0d93-4ed3-b8e9-d090ee144cf6 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:04Z" involvedObject: kind: Node name: ip-10-0-129-57.ec2.internal uid: ip-10-0-129-57.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:04Z" message: 'Node ip-10-0-129-57.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T21:13:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:04Z" name: ip-10-0-129-57.ec2.internal.18a82d0c790f779f namespace: default resourceVersion: "6046" uid: d8fa630b-6d4e-4280-9cab-3eae75d1b72a reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-57.ec2.internal source: component: kubelet host: ip-10-0-129-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:14:34Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-57.ec2.internal uid: 1318f230-d035-4cf2-9c82-08e623a23863 kind: Event lastTimestamp: "2026-04-20T21:14:34Z" message: 'Node ip-10-0-129-57.ec2.internal event: Registered Node ip-10-0-129-57.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:14:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:14:34Z" name: ip-10-0-129-57.ec2.internal.18a82d2150cc9081 namespace: default resourceVersion: "7870" uid: 51f33574-2d0d-4018-b3e5-5f3086872019 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:59Z" involvedObject: kind: Node name: ip-10-0-132-45.ec2.internal uid: ip-10-0-132-45.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:59Z" message: 'Node ip-10-0-132-45.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T21:12:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:59Z" name: ip-10-0-132-45.ec2.internal.18a82d0b2bf3cd31 namespace: default resourceVersion: "5938" uid: 1b07ceef-88f2-4012-a6b4-aa024116dd7d reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-45.ec2.internal source: component: kubelet host: ip-10-0-132-45.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:59Z" involvedObject: kind: Node name: ip-10-0-132-45.ec2.internal uid: ip-10-0-132-45.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:59Z" message: 'Node ip-10-0-132-45.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T21:12:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:59Z" name: ip-10-0-132-45.ec2.internal.18a82d0b2bf40c2f namespace: default resourceVersion: "5939" uid: 25711445-e158-48ab-a902-8a5e2c9cb416 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-45.ec2.internal source: component: kubelet host: ip-10-0-132-45.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T21:12:59Z" involvedObject: kind: Node name: ip-10-0-132-45.ec2.internal uid: ip-10-0-132-45.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:59Z" message: 'Node ip-10-0-132-45.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T21:12:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:59Z" name: ip-10-0-132-45.ec2.internal.18a82d0b2bf42f28 namespace: default resourceVersion: "5940" uid: 3c179f34-96e7-4c46-a5e4-d0ac78a3c839 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-45.ec2.internal source: component: kubelet host: ip-10-0-132-45.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:59Z" involvedObject: kind: Node name: ip-10-0-132-45.ec2.internal uid: ip-10-0-132-45.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:12:59Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T21:12:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:12:59Z" name: ip-10-0-132-45.ec2.internal.18a82d0b2dc2c3b5 namespace: default resourceVersion: "5845" uid: 0b7ad634-54e9-4615-bed9-a6e3ea6ad29c reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-45.ec2.internal source: component: kubelet host: ip-10-0-132-45.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:12:59Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-45.ec2.internal resourceVersion: "5847" uid: 59a511ba-1fb9-433b-92d8-088c16955df0 kind: Event lastTimestamp: "2026-04-20T21:12:59Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T21:12:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T21:12:59Z" name: ip-10-0-132-45.ec2.internal.18a82d0b42aeaa44 namespace: default resourceVersion: "5943" uid: bf6e2db5-36e7-40fb-8de2-99449812a6b3 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:00Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-45.ec2.internal uid: 59a511ba-1fb9-433b-92d8-088c16955df0 kind: Event lastTimestamp: "2026-04-20T21:13:00Z" message: 'Node ip-10-0-132-45.ec2.internal event: Registered Node ip-10-0-132-45.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:13:00Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:13:00Z" name: ip-10-0-132-45.ec2.internal.18a82d0b67f54eb8 namespace: default resourceVersion: "5956" uid: 8b659cc6-316a-4d71-b81c-8727e5266bf7 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:28Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-45.ec2.internal resourceVersion: "6543" uid: 59a511ba-1fb9-433b-92d8-088c16955df0 kind: Event lastTimestamp: "2026-04-20T21:13:28Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-45.ec2.internal, error getting gateway config for node ip-10-0-132-45.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-132-45.ec2.internal", failed to update chassis to local for local node ip-10-0-132-45.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-132-45.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-45.ec2.internal]' metadata: creationTimestamp: "2026-04-20T21:13:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-132-45 operation: Update time: "2026-04-20T21:13:28Z" name: ip-10-0-132-45.ec2.internal.18a82d11f1bc21b1 namespace: default resourceVersion: "6545" uid: 91d75a8c-2edd-4702-a604-c2b463a13cf2 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:13:34Z" involvedObject: kind: Node name: ip-10-0-132-45.ec2.internal uid: ip-10-0-132-45.ec2.internal kind: Event lastTimestamp: "2026-04-20T21:13:34Z" message: 'Node ip-10-0-132-45.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T21:13:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T21:13:34Z" name: ip-10-0-132-45.ec2.internal.18a82d136d6c10e1 namespace: default resourceVersion: "6718" uid: eea5333a-5475-4def-907c-232e159a09f1 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-45.ec2.internal source: component: kubelet host: ip-10-0-132-45.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:14:34Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-45.ec2.internal uid: 59a511ba-1fb9-433b-92d8-088c16955df0 kind: Event lastTimestamp: "2026-04-20T21:14:34Z" message: 'Node ip-10-0-132-45.ec2.internal event: Registered Node ip-10-0-132-45.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T21:14:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T21:14:34Z" name: ip-10-0-132-45.ec2.internal.18a82d2150ccb09e namespace: default resourceVersion: "7878" uid: 0cd50ea9-fb61-498f-955a-0e7635eb5c09 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T21:19:48Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16517" uid: 02458f6a-f6a5-4971-ae71-1738c2a0bb3e kind: Event lastTimestamp: "2026-04-20T21:19:52Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-20T21:19:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-20T21:19:52Z" name: kuadrant-system.18a82d6a980b7359 namespace: default resourceVersion: "16801" uid: 708a74cc-078f-4fd6-a62d-5776de027e38 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-20T21:14:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T21:14:15Z" name: kube-system.18a82d1d0dc3534f namespace: default resourceVersion: "7499" uid: c74e4cba-92bf-4220-8203-b09762a56556 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7c5c8b67cb-cldb5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-20T21:14:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T21:14:30Z" name: kube-system.18a82d208c44bd61 namespace: default resourceVersion: "7764" uid: c6982fe5-6a8a-421b-a5e6-f23f88f8d588 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7c5c8b67cb-cldb5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-20T21:14:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T21:14:30Z" name: kube-system.18a82d208c7a802b namespace: default resourceVersion: "7765" uid: d8da91df-4c21-4ebf-ae60-06d252be034f reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7c5c8b67cb-cldb5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-20T21:14:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T21:14:30Z" name: kube-system.18a82d208cba5e7f namespace: default resourceVersion: "7766" uid: b70bcc43-db55-47d7-bc16-ae8cfa4206a6 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7c5c8b67cb-cldb5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-20T21:15:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T21:15:30Z" name: kube-system.18a82d2e84c9918f namespace: default resourceVersion: "10184" uid: 5516d8b0-1727-4989-96fc-82cbd062b19d reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7c5c8b67cb-cldb5 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T21:07:32Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-20T21:07:32Z" message: readyz=true metadata: creationTimestamp: "2026-04-20T21:07:32Z" name: openshift-kube-apiserver.18a82cbf0cfee26d namespace: default resourceVersion: "274" uid: 83340ef6-574d-499e-97db-b8f6ff3d220a reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-67f7b795d4-vhchw type: Warning kind: EventList metadata: resourceVersion: "47347"