--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:23Z" involvedObject: kind: CertificateSigningRequest name: csr-g76xp kind: Event lastTimestamp: "2026-04-23T01:10:23Z" message: CSR "csr-g76xp" has been approved metadata: creationTimestamp: "2026-04-23T01:10:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:23Z" name: csr-g76xp.18a8d728eb6267c1 namespace: default resourceVersion: "6303" uid: 3a819a5c-a22f-40f8-8b80-0cea73f33a22 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:17Z" involvedObject: kind: CertificateSigningRequest name: csr-gjclq kind: Event lastTimestamp: "2026-04-23T01:10:17Z" message: CSR "csr-gjclq" has been approved metadata: creationTimestamp: "2026-04-23T01:10:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:17Z" name: csr-gjclq.18a8d72785b72fe2 namespace: default resourceVersion: "6145" uid: 21f18770-fbc1-46eb-b6e7-87a63b6c6bf5 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:43Z" involvedObject: kind: CertificateSigningRequest name: csr-smvkn kind: Event lastTimestamp: "2026-04-23T01:10:43Z" message: CSR "csr-smvkn" has been approved metadata: creationTimestamp: "2026-04-23T01:10:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:43Z" name: csr-smvkn.18a8d72d9cced967 namespace: default resourceVersion: "6728" uid: 325aa5bd-68e0-4724-b096-bd51b10a1858 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:38Z" involvedObject: kind: CertificateSigningRequest name: csr-t2dn8 kind: Event lastTimestamp: "2026-04-23T01:10:38Z" message: CSR "csr-t2dn8" has been approved metadata: creationTimestamp: "2026-04-23T01:10:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:38Z" name: csr-t2dn8.18a8d72c58239595 namespace: default resourceVersion: "6658" uid: a851f6d0-e7ee-4e4f-876f-493c53279b73 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:33Z" involvedObject: kind: CertificateSigningRequest name: csr-tq2bm kind: Event lastTimestamp: "2026-04-23T01:10:33Z" message: CSR "csr-tq2bm" has been approved metadata: creationTimestamp: "2026-04-23T01:10:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:33Z" name: csr-tq2bm.18a8d72b463441c2 namespace: default resourceVersion: "6550" uid: a0c815e4-c168-4770-9d1a-b462b568a454 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:26Z" involvedObject: kind: CertificateSigningRequest name: csr-xkbsr kind: Event lastTimestamp: "2026-04-23T01:10:26Z" message: CSR "csr-xkbsr" has been approved metadata: creationTimestamp: "2026-04-23T01:10:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-23T01:10:26Z" name: csr-xkbsr.18a8d7299f85e258 namespace: default resourceVersion: "6380" uid: aeed7d2c-3ba3-4023-aea6-a9254e9509c3 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-23T01:16:12Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14390" uid: 24dee52d-57a6-42d2-ae7e-63d31ff37776 kind: Event lastTimestamp: "2026-04-23T01:16:20Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-23T01:16:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-23T01:16:20Z" name: default-gateway.18a8d77a0550004e namespace: default resourceVersion: "14689" uid: 34fa4b95-a43e-4a3f-b5db-399519ee0bf4 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-23T01:16:20Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14782" uid: dfcef8d3-b35e-419c-bd14-f200ea57f881 kind: Event lastTimestamp: "2026-04-23T01:16:48Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:dfcef8d3-b35e-419c-bd14-f200ea57f881 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:dfcef8d3-b35e-419c-bd14-f200ea57f881]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-23T01:16:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-23T01:16:48Z" name: default-kserve.18a8d77c19313310 namespace: default resourceVersion: "16288" uid: 6a5aab40-a476-4259-b3b7-c47057abf6fb reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-23T01:16:10Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14013" uid: f81c9d20-8a09-41c0-8b16-bc705b304c97 kind: Event lastTimestamp: "2026-04-23T01:33:03Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-23T01:16:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-23T01:33:03Z" name: default-monitoring.18a8d7799ca3e86f namespace: default resourceVersion: "35260" uid: bdbaccf3-5c31-43d9-bc1e-b5c45ca41b17 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d72684af04fb namespace: default resourceVersion: "5928" uid: 081f72fb-1b37-42d9-a3b4-60edfbeef991 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: 'Node ip-10-0-135-74.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d72686437c35 namespace: default resourceVersion: "6026" uid: 3f034e80-2c4b-40f0-9665-c7f4dd8f5b26 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: 'Node ip-10-0-135-74.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d7268643f79a namespace: default resourceVersion: "6028" uid: 538a8e26-8a24-490e-a6a1-65ae202b1096 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: 'Node ip-10-0-135-74.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d7268644275e namespace: default resourceVersion: "6029" uid: 9cf7669a-b7b5-4ebf-853f-350a3d98d22e reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d7268971bae4 namespace: default resourceVersion: "5933" uid: c63b3d8d-8348-4b01-b723-d3e39937275d reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-74.ec2.internal uid: 8c627685-5396-4a35-b137-b5ed5f4e00d8 kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: 'Node ip-10-0-135-74.ec2.internal event: Registered Node ip-10-0-135-74.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d7269a3480a4 namespace: default resourceVersion: "6031" uid: e3db9667-4611-4cce-9358-648a17307003 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-74.ec2.internal resourceVersion: "5934" uid: 8c627685-5396-4a35-b137-b5ed5f4e00d8 kind: Event lastTimestamp: "2026-04-23T01:10:13Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-23T01:10:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-23T01:10:13Z" name: ip-10-0-135-74.ec2.internal.18a8d7269d77a99b namespace: default resourceVersion: "6035" uid: c614c902-0bc9-4a7c-b012-38ed883b2a92 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:45Z" involvedObject: kind: Node name: ip-10-0-135-74.ec2.internal uid: ip-10-0-135-74.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:45Z" message: 'Node ip-10-0-135-74.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-23T01:10:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:45Z" name: ip-10-0-135-74.ec2.internal.18a8d72df2be946c namespace: default resourceVersion: "6740" uid: 80d7e42c-f13b-40e7-aa4e-7fcc0e4314ae reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-74.ec2.internal source: component: kubelet host: ip-10-0-135-74.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:12:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-74.ec2.internal uid: 8c627685-5396-4a35-b137-b5ed5f4e00d8 kind: Event lastTimestamp: "2026-04-23T01:12:20Z" message: 'Node ip-10-0-135-74.ec2.internal event: Registered Node ip-10-0-135-74.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:12:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:12:20Z" name: ip-10-0-135-74.ec2.internal.18a8d744145d1c71 namespace: default resourceVersion: "8098" uid: 30613b1a-6e70-47c6-968b-a22b54ff6bde reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723ae221fc2 namespace: default resourceVersion: "5605" uid: 97dc053e-c17e-4d88-929b-c1ad9649f84b reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: 'Node ip-10-0-137-21.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723afc4dcf0 namespace: default resourceVersion: "5697" uid: efb2fbc3-dafd-4632-86d6-453e7415ad40 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: 'Node ip-10-0-137-21.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723afc525bd namespace: default resourceVersion: "5698" uid: 1b2ba8f9-d87b-4273-a627-4e18d4ce5548 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: 'Node ip-10-0-137-21.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723afc54bf6 namespace: default resourceVersion: "5699" uid: df149400-12dd-4f28-b750-f7cfe7e3a879 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723b2a8bf90 namespace: default resourceVersion: "5609" uid: 4b467e4c-d86e-4ee0-bc56-ab23336c9c6a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-21.ec2.internal resourceVersion: "5610" uid: 532507a8-480b-40f2-bb93-d0b3ac85ebb8 kind: Event lastTimestamp: "2026-04-23T01:10:01Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-23T01:10:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-23T01:10:01Z" name: ip-10-0-137-21.ec2.internal.18a8d723c7eb430b namespace: default resourceVersion: "5706" uid: 8492fc64-ef1c-4c33-bc98-fc709c265f0c reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-21.ec2.internal uid: 532507a8-480b-40f2-bb93-d0b3ac85ebb8 kind: Event lastTimestamp: "2026-04-23T01:10:03Z" message: 'Node ip-10-0-137-21.ec2.internal event: Registered Node ip-10-0-137-21.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:10:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:10:03Z" name: ip-10-0-137-21.ec2.internal.18a8d7244607340d namespace: default resourceVersion: "5754" uid: 4c9a6611-fa92-4106-bd49-76a34e4c8399 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:35Z" involvedObject: kind: Node name: ip-10-0-137-21.ec2.internal uid: ip-10-0-137-21.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:35Z" message: 'Node ip-10-0-137-21.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-23T01:10:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:35Z" name: ip-10-0-137-21.ec2.internal.18a8d72b937c3f34 namespace: default resourceVersion: "6590" uid: a42a9622-9aec-440d-9bd8-5c263ddabef4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-137-21.ec2.internal source: component: kubelet host: ip-10-0-137-21.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:12:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-137-21.ec2.internal uid: 532507a8-480b-40f2-bb93-d0b3ac85ebb8 kind: Event lastTimestamp: "2026-04-23T01:12:20Z" message: 'Node ip-10-0-137-21.ec2.internal event: Registered Node ip-10-0-137-21.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:12:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:12:20Z" name: ip-10-0-137-21.ec2.internal.18a8d744145c08a7 namespace: default resourceVersion: "8091" uid: 50928918-1dbc-4e56-a0fd-bd108d4d2ed9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:09:52Z" involvedObject: kind: Node name: ip-10-0-138-235.ec2.internal uid: ip-10-0-138-235.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:09:52Z" message: 'Node ip-10-0-138-235.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-23T01:09:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:09:52Z" name: ip-10-0-138-235.ec2.internal.18a8d721ace3b030 namespace: default resourceVersion: "5448" uid: d3ba7666-4e4f-43e9-abb3-0e9f66050a99 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-235.ec2.internal source: component: kubelet host: ip-10-0-138-235.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:09:52Z" involvedObject: kind: Node name: ip-10-0-138-235.ec2.internal uid: ip-10-0-138-235.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:09:52Z" message: 'Node ip-10-0-138-235.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-23T01:09:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:09:52Z" name: ip-10-0-138-235.ec2.internal.18a8d721ace3f16e namespace: default resourceVersion: "5450" uid: 8b7e1a14-7139-444a-adba-7b4a30eda385 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-235.ec2.internal source: component: kubelet host: ip-10-0-138-235.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-23T01:09:52Z" involvedObject: kind: Node name: ip-10-0-138-235.ec2.internal uid: ip-10-0-138-235.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:09:52Z" message: 'Node ip-10-0-138-235.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-23T01:09:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:09:52Z" name: ip-10-0-138-235.ec2.internal.18a8d721ace4197c namespace: default resourceVersion: "5451" uid: e278bd1e-8ff7-4030-b7a7-c911540e9199 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-235.ec2.internal source: component: kubelet host: ip-10-0-138-235.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:09:52Z" involvedObject: kind: Node name: ip-10-0-138-235.ec2.internal uid: ip-10-0-138-235.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:09:52Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-23T01:09:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:09:52Z" name: ip-10-0-138-235.ec2.internal.18a8d721afc471fd namespace: default resourceVersion: "5383" uid: 31763f54-05d3-4d38-be59-2c3c81325608 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-235.ec2.internal source: component: kubelet host: ip-10-0-138-235.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:09:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-235.ec2.internal resourceVersion: "5384" uid: 98b5b32a-2fb7-4a8a-81c1-bc912f96cda5 kind: Event lastTimestamp: "2026-04-23T01:09:53Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-23T01:09:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-23T01:09:53Z" name: ip-10-0-138-235.ec2.internal.18a8d721c90bff97 namespace: default resourceVersion: "5476" uid: 821dabfd-b33b-414d-b540-2c396c000704 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:09:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-235.ec2.internal uid: 98b5b32a-2fb7-4a8a-81c1-bc912f96cda5 kind: Event lastTimestamp: "2026-04-23T01:09:53Z" message: 'Node ip-10-0-138-235.ec2.internal event: Registered Node ip-10-0-138-235.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:09:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:09:53Z" name: ip-10-0-138-235.ec2.internal.18a8d721f1d918c0 namespace: default resourceVersion: "5515" uid: 885ec4ae-297c-4a2b-b0dc-9c922f8014ea reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:10:25Z" involvedObject: kind: Node name: ip-10-0-138-235.ec2.internal uid: ip-10-0-138-235.ec2.internal kind: Event lastTimestamp: "2026-04-23T01:10:25Z" message: 'Node ip-10-0-138-235.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-23T01:10:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-23T01:10:25Z" name: ip-10-0-138-235.ec2.internal.18a8d7293a9c03b6 namespace: default resourceVersion: "6326" uid: ebc4dbf1-565e-42f6-80e0-5dc5e4e6ea93 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-235.ec2.internal source: component: kubelet host: ip-10-0-138-235.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:12:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-235.ec2.internal uid: 98b5b32a-2fb7-4a8a-81c1-bc912f96cda5 kind: Event lastTimestamp: "2026-04-23T01:12:20Z" message: 'Node ip-10-0-138-235.ec2.internal event: Registered Node ip-10-0-138-235.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-23T01:12:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-23T01:12:20Z" name: ip-10-0-138-235.ec2.internal.18a8d744145cfeb7 namespace: default resourceVersion: "8093" uid: 572656c7-f8fb-4672-b5bb-bfa9bb6f6aa0 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 8 eventTime: null firstTimestamp: "2026-04-23T01:17:22Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16540" uid: 00de1e64-a687-4d8b-80f4-4c1fe5ab62fe kind: Event lastTimestamp: "2026-04-23T01:17:25Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-23T01:17:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-23T01:17:26Z" name: kuadrant-system.18a8d78a74af9215 namespace: default resourceVersion: "16748" uid: f224cdfc-e986-4373-a359-f43a91d34744 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-23T01:11:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-23T01:11:59Z" name: kube-system.18a8d73f26eaea10 namespace: default resourceVersion: "7604" uid: 8cbe09ee-e1e4-4f2e-b9b0-fd763825668d reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6b94f887b9-ts48f type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-23T01:12:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-23T01:12:14Z" name: kube-system.18a8d742a54447f6 namespace: default resourceVersion: "7873" uid: 73629ff9-4760-4c05-82a6-c81a562159fe reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6b94f887b9-ts48f type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-23T01:12:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-23T01:12:14Z" name: kube-system.18a8d742a574e041 namespace: default resourceVersion: "7874" uid: 8046e0f5-9ac3-4520-b114-78a759ce8b4c reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6b94f887b9-ts48f type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-23T01:12:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-23T01:12:14Z" name: kube-system.18a8d742a5a81f84 namespace: default resourceVersion: "7875" uid: 297ee886-55e1-45a6-881e-f1b89b5a9775 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6b94f887b9-ts48f type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-23T01:13:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-23T01:13:14Z" name: kube-system.18a8d7509dc1d6fd namespace: default resourceVersion: "9686" uid: e6b291db-f81b-421e-9b22-939a91c8215e reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6b94f887b9-ts48f type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-23T01:05:00Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-23T01:05:00Z" message: readyz=true metadata: creationTimestamp: "2026-04-23T01:05:00Z" name: openshift-kube-apiserver.18a8d6ddbe5b9767 namespace: default resourceVersion: "274" uid: 474532d9-f0a6-4709-a448-f425da2aa2b1 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-6d67c48d9-qmj46 type: Warning kind: EventList metadata: resourceVersion: "46189"