--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:54Z" involvedObject: kind: CertificateSigningRequest name: csr-679kl kind: Event lastTimestamp: "2026-05-11T20:50:54Z" message: CSR "csr-679kl" has been approved metadata: creationTimestamp: "2026-05-11T20:50:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:50:54Z" name: csr-679kl.18ae9e06af335a37 namespace: default resourceVersion: "6445" uid: c38037c2-3109-4190-aa23-d56625ed5687 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:27Z" involvedObject: kind: CertificateSigningRequest name: csr-dvbsc kind: Event lastTimestamp: "2026-05-11T20:51:27Z" message: CSR "csr-dvbsc" has been approved metadata: creationTimestamp: "2026-05-11T20:51:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:51:27Z" name: csr-dvbsc.18ae9e0e5c189a08 namespace: default resourceVersion: "7735" uid: b3af6ce4-2cf0-44af-8a7c-9881300ad57e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:21Z" involvedObject: kind: CertificateSigningRequest name: csr-jlbvh kind: Event lastTimestamp: "2026-05-11T20:51:21Z" message: CSR "csr-jlbvh" has been approved metadata: creationTimestamp: "2026-05-11T20:51:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:51:21Z" name: csr-jlbvh.18ae9e0cf1791a93 namespace: default resourceVersion: "7389" uid: fff4fffe-44f2-4712-93d2-76bf0705f637 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:49Z" involvedObject: kind: CertificateSigningRequest name: csr-p7pwz kind: Event lastTimestamp: "2026-05-11T20:50:49Z" message: CSR "csr-p7pwz" has been approved metadata: creationTimestamp: "2026-05-11T20:50:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:50:49Z" name: csr-p7pwz.18ae9e05895721aa namespace: default resourceVersion: "6275" uid: 4edcf824-9b69-41df-b900-b84d96e0b42b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:44Z" involvedObject: kind: CertificateSigningRequest name: csr-rqfjl kind: Event lastTimestamp: "2026-05-11T20:50:44Z" message: CSR "csr-rqfjl" has been approved metadata: creationTimestamp: "2026-05-11T20:50:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:50:44Z" name: csr-rqfjl.18ae9e04741d23df namespace: default resourceVersion: "6192" uid: c2327405-0a3f-47d6-8848-f4ef5683a824 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:48Z" involvedObject: kind: CertificateSigningRequest name: csr-s4r6l kind: Event lastTimestamp: "2026-05-11T20:50:48Z" message: CSR "csr-s4r6l" has been approved metadata: creationTimestamp: "2026-05-11T20:50:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-05-11T20:50:48Z" name: csr-s4r6l.18ae9e053fad9179 namespace: default resourceVersion: "6254" uid: 26fded85-9fc8-4d5d-a35e-3c496004ca50 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-05-11T20:56:35Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14512" uid: 94a8c5b9-b5bb-463d-a382-6f5dbe7b05e5 kind: Event lastTimestamp: "2026-05-11T20:56:42Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-05-11T20:56:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-05-11T20:56:42Z" name: default-gateway.18ae9e561d95b670 namespace: default resourceVersion: "14749" uid: 64e0797f-7450-4889-a0b3-9bc2022e0d59 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-05-11T20:56:45Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15125" uid: b54d918e-cc67-4176-9c77-b40ee83c414c kind: Event lastTimestamp: "2026-05-11T20:57:11Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:b54d918e-cc67-4176-9c77-b40ee83c414c platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:b54d918e-cc67-4176-9c77-b40ee83c414c]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-05-11T20:56:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-05-11T20:57:11Z" name: default-kserve.18ae9e588833a915 namespace: default resourceVersion: "16335" uid: 92c56890-80ab-4feb-9331-f265e8cee176 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 34 eventTime: null firstTimestamp: "2026-05-11T20:56:34Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14247" uid: 1104f7a2-613a-4903-9141-94ac84287f62 kind: Event lastTimestamp: "2026-05-11T21:16:13Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-05-11T20:56:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-05-11T21:16:13Z" name: default-monitoring.18ae9e55dee81ac7 namespace: default resourceVersion: "38135" uid: 4af8dc2a-91a0-45a7-96ee-9a6110e71207 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: Starting kubelet. metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff3974ed9c namespace: default resourceVersion: "5671" uid: 82af2a25-d012-4720-bdb1-5914dc536a63 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: 'Node ip-10-0-128-58.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff3b1212d2 namespace: default resourceVersion: "5752" uid: 990e8d30-90a1-4424-aa66-45953bd837a1 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: 'Node ip-10-0-128-58.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff3b12536d namespace: default resourceVersion: "5759" uid: 63e20222-4cf2-493f-879f-cceaf4467300 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: 'Node ip-10-0-128-58.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff3b128445 namespace: default resourceVersion: "5762" uid: e2db0363-8548-4c29-b04d-43d4d44c87b8 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff3d6da035 namespace: default resourceVersion: "5675" uid: f1dd200c-8727-4aac-9d59-2eac74bc7e67 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:22Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-58.ec2.internal resourceVersion: "5677" uid: bb5217be-a51b-4afa-b763-14be1a419b7f kind: Event lastTimestamp: "2026-05-11T20:50:22Z" message: Node synced successfully metadata: creationTimestamp: "2026-05-11T20:50:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-05-11T20:50:22Z" name: ip-10-0-128-58.ec2.internal.18ae9dff534541b8 namespace: default resourceVersion: "5769" uid: 7456b59c-4041-4231-8d42-de80585b30fa reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-58.ec2.internal uid: bb5217be-a51b-4afa-b763-14be1a419b7f kind: Event lastTimestamp: "2026-05-11T20:50:23Z" message: 'Node ip-10-0-128-58.ec2.internal event: Registered Node ip-10-0-128-58.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:50:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:50:23Z" name: ip-10-0-128-58.ec2.internal.18ae9dff91c3367c namespace: default resourceVersion: "5789" uid: e557d3ee-3231-4981-aa68-523e730976fe reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-58.ec2.internal resourceVersion: "6280" uid: bb5217be-a51b-4afa-b763-14be1a419b7f kind: Event lastTimestamp: "2026-05-11T20:50:49Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-128-58.ec2.internal, error getting gateway config for node ip-10-0-128-58.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-128-58.ec2.internal", failed to update chassis to local for local node ip-10-0-128-58.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-128-58.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-128-58.ec2.internal]' metadata: creationTimestamp: "2026-05-11T20:50:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-128-58 operation: Update time: "2026-05-11T20:50:49Z" name: ip-10-0-128-58.ec2.internal.18ae9e059cf299fb namespace: default resourceVersion: "6282" uid: 8e6c961f-797f-4733-8804-c23ad4775cef reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-128-58.ec2.internal uid: ip-10-0-128-58.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: 'Node ip-10-0-128-58.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-128-58.ec2.internal.18ae9e06f95a0310 namespace: default resourceVersion: "6549" uid: f92f22eb-68b6-4c0f-89a9-fbb03aad8a80 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-128-58.ec2.internal source: component: kubelet host: ip-10-0-128-58.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-58.ec2.internal uid: bb5217be-a51b-4afa-b763-14be1a419b7f kind: Event lastTimestamp: "2026-05-11T20:51:23Z" message: 'Node ip-10-0-128-58.ec2.internal event: Registered Node ip-10-0-128-58.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:51:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:51:24Z" name: ip-10-0-128-58.ec2.internal.18ae9e0d92af4b72 namespace: default resourceVersion: "7521" uid: 18217dab-7ead-44ae-901e-b614f8955200 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: Starting kubelet. metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06df8a40e5 namespace: default resourceVersion: "6462" uid: 016ec63b-71c8-4722-b7f8-0cbeb9beb487 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: 'Node ip-10-0-133-205.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06e18061d0 namespace: default resourceVersion: "6514" uid: b8dba458-13af-4d19-8d46-327797130cb0 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: 'Node ip-10-0-133-205.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06e180a6bc namespace: default resourceVersion: "6519" uid: 27fb22b8-17e0-4440-81b9-e9b7752c8db3 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: 'Node ip-10-0-133-205.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06e180ca73 namespace: default resourceVersion: "6527" uid: 75d19b67-19b6-4e9d-a340-628de01dd04a reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06e489e4c4 namespace: default resourceVersion: "6466" uid: 429edc33-2b44-4891-ac98-8395008480c1 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:55Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-205.ec2.internal resourceVersion: "6468" uid: 81532a35-a04f-4940-b22f-52620bb49895 kind: Event lastTimestamp: "2026-05-11T20:50:55Z" message: Node synced successfully metadata: creationTimestamp: "2026-05-11T20:50:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-05-11T20:50:55Z" name: ip-10-0-133-205.ec2.internal.18ae9e06f96c8eb7 namespace: default resourceVersion: "6550" uid: 9cec8fd5-f584-40b7-8ac9-201090e7e1dc reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-205.ec2.internal uid: 81532a35-a04f-4940-b22f-52620bb49895 kind: Event lastTimestamp: "2026-05-11T20:50:58Z" message: 'Node ip-10-0-133-205.ec2.internal event: Registered Node ip-10-0-133-205.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:50:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:50:58Z" name: ip-10-0-133-205.ec2.internal.18ae9e07b8518f11 namespace: default resourceVersion: "6731" uid: 92c7706b-2b11-425c-a742-1c3caf941d67 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-205.ec2.internal uid: 81532a35-a04f-4940-b22f-52620bb49895 kind: Event lastTimestamp: "2026-05-11T20:51:23Z" message: 'Node ip-10-0-133-205.ec2.internal event: Registered Node ip-10-0-133-205.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:51:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:51:24Z" name: ip-10-0-133-205.ec2.internal.18ae9e0d92af663e namespace: default resourceVersion: "7524" uid: 4924fef4-8cab-4492-8d0e-6434d1030878 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:28Z" involvedObject: kind: Node name: ip-10-0-133-205.ec2.internal uid: ip-10-0-133-205.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:51:28Z" message: 'Node ip-10-0-133-205.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-05-11T20:51:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:51:28Z" name: ip-10-0-133-205.ec2.internal.18ae9e0ea4f19848 namespace: default resourceVersion: "7748" uid: 3f1d6648-37e2-4946-83ba-7f58ab9783ec reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-205.ec2.internal source: component: kubelet host: ip-10-0-133-205.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:17Z" involvedObject: kind: Node name: ip-10-0-135-190.ec2.internal uid: ip-10-0-135-190.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:17Z" message: 'Node ip-10-0-135-190.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-05-11T20:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:17Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe076c7b28 namespace: default resourceVersion: "5424" uid: 0dbecc5d-8c2b-417c-94ab-0eb6ed625f5b reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-190.ec2.internal source: component: kubelet host: ip-10-0-135-190.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:17Z" involvedObject: kind: Node name: ip-10-0-135-190.ec2.internal uid: ip-10-0-135-190.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:17Z" message: 'Node ip-10-0-135-190.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-05-11T20:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:17Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe076cbef0 namespace: default resourceVersion: "5425" uid: dcdcbc82-4c1f-44c3-9179-a5eb2d0dda9a reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-190.ec2.internal source: component: kubelet host: ip-10-0-135-190.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-05-11T20:50:17Z" involvedObject: kind: Node name: ip-10-0-135-190.ec2.internal uid: ip-10-0-135-190.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:17Z" message: 'Node ip-10-0-135-190.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-05-11T20:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:17Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe076ce3ea namespace: default resourceVersion: "5426" uid: 769b2abc-2704-4f16-ac16-5552bdcbceb3 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-190.ec2.internal source: component: kubelet host: ip-10-0-135-190.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:17Z" involvedObject: kind: Node name: ip-10-0-135-190.ec2.internal uid: ip-10-0-135-190.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:17Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-05-11T20:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:17Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe09f558ea namespace: default resourceVersion: "5384" uid: 767c5233-40a8-4144-b6f8-11fd6ae173be reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-135-190.ec2.internal source: component: kubelet host: ip-10-0-135-190.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:17Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-190.ec2.internal resourceVersion: "5387" uid: 0fd1a0d9-30a3-401e-b97a-3bdf1f85b7b7 kind: Event lastTimestamp: "2026-05-11T20:50:17Z" message: Node synced successfully metadata: creationTimestamp: "2026-05-11T20:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-05-11T20:50:17Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe1fa68228 namespace: default resourceVersion: "5447" uid: 43b9f6f8-9ab7-492e-9df1-1cad910ee8d5 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:18Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-190.ec2.internal uid: 0fd1a0d9-30a3-401e-b97a-3bdf1f85b7b7 kind: Event lastTimestamp: "2026-05-11T20:50:18Z" message: 'Node ip-10-0-135-190.ec2.internal event: Registered Node ip-10-0-135-190.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:50:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:50:18Z" name: ip-10-0-135-190.ec2.internal.18ae9dfe67af8be2 namespace: default resourceVersion: "5601" uid: 9060a1ea-c286-4b4c-9bfe-ec198ecd1e92 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:50:50Z" involvedObject: kind: Node name: ip-10-0-135-190.ec2.internal uid: ip-10-0-135-190.ec2.internal kind: Event lastTimestamp: "2026-05-11T20:50:50Z" message: 'Node ip-10-0-135-190.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-05-11T20:50:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-05-11T20:50:50Z" name: ip-10-0-135-190.ec2.internal.18ae9e05e3cc817f namespace: default resourceVersion: "6298" uid: 041c2631-3f72-43ac-8330-41b9326258e5 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-190.ec2.internal source: component: kubelet host: ip-10-0-135-190.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:51:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-190.ec2.internal uid: 0fd1a0d9-30a3-401e-b97a-3bdf1f85b7b7 kind: Event lastTimestamp: "2026-05-11T20:51:23Z" message: 'Node ip-10-0-135-190.ec2.internal event: Registered Node ip-10-0-135-190.ec2.internal in Controller' metadata: creationTimestamp: "2026-05-11T20:51:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-05-11T20:51:24Z" name: ip-10-0-135-190.ec2.internal.18ae9e0d92ae1c1a namespace: default resourceVersion: "7513" uid: fbfd00c1-eac0-4b44-8972-f98242a6c0ae reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 3 eventTime: null firstTimestamp: "2026-05-11T20:57:46Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16592" uid: e07651e7-bdd5-4415-9123-74db504cb573 kind: Event lastTimestamp: "2026-05-11T20:57:47Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-05-11T20:57:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-05-11T20:57:48Z" name: kuadrant-system.18ae9e66be09b6c8 namespace: default resourceVersion: "16784" uid: fc7f6fec-946a-4b89-a0d2-bfd92c91ccb1 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-05-11T20:51:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-05-11T20:51:03Z" name: kube-system.18ae9e08d42a7df8 namespace: default resourceVersion: "6999" uid: b22c9c88-8939-4e81-81a7-1e2639b2904e reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64969b85d5-ffxsq type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-05-11T20:51:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-05-11T20:51:18Z" name: kube-system.18ae9e0c52b079b3 namespace: default resourceVersion: "7357" uid: 85dcfe12-6cb4-4a56-b7ae-f6b6d81e43cb reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64969b85d5-ffxsq type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-05-11T20:51:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-05-11T20:51:18Z" name: kube-system.18ae9e0c52e7e564 namespace: default resourceVersion: "7358" uid: 86aee747-5e55-43ba-9ee0-a0199ad5333d reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64969b85d5-ffxsq type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-05-11T20:51:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-05-11T20:51:18Z" name: kube-system.18ae9e0c532a7d8f namespace: default resourceVersion: "7359" uid: 6c7395a9-08a2-428a-b5ea-e464225912c6 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64969b85d5-ffxsq type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-05-11T20:52:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-05-11T20:52:18Z" name: kube-system.18ae9e1a4b3f71a2 namespace: default resourceVersion: "9805" uid: 25e87c8b-8f42-412a-b71c-f1f0ae4ad9ec reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64969b85d5-ffxsq type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-05-11T20:45:54Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-05-11T20:45:54Z" message: readyz=true metadata: creationTimestamp: "2026-05-11T20:45:54Z" name: openshift-kube-apiserver.18ae9dc0eca3ff6b namespace: default resourceVersion: "274" uid: b39c05e6-4293-494e-821e-e7405a340c35 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-7fdd8c4c6-2m8g4 type: Warning kind: EventList metadata: resourceVersion: "46039"