--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:33Z" involvedObject: kind: CertificateSigningRequest name: csr-jvlxc kind: Event lastTimestamp: "2026-04-20T20:11:33Z" message: CSR "csr-jvlxc" has been approved metadata: creationTimestamp: "2026-04-20T20:11:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:33Z" name: csr-jvlxc.18a829b109caab27 namespace: default resourceVersion: "6195" uid: f1c8d042-58ef-47a6-a748-6392859d6435 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:40Z" involvedObject: kind: CertificateSigningRequest name: csr-l55pw kind: Event lastTimestamp: "2026-04-20T20:11:40Z" message: CSR "csr-l55pw" has been approved metadata: creationTimestamp: "2026-04-20T20:11:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:40Z" name: csr-l55pw.18a829b2a8ab6cc6 namespace: default resourceVersion: "6323" uid: 21a925e0-55c9-4abc-80e2-fd474db05b81 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:35Z" involvedObject: kind: CertificateSigningRequest name: csr-thzch kind: Event lastTimestamp: "2026-04-20T20:11:35Z" message: CSR "csr-thzch" has been approved metadata: creationTimestamp: "2026-04-20T20:11:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:35Z" name: csr-thzch.18a829b17bb4875d namespace: default resourceVersion: "6240" uid: 86e416de-7b4a-46c9-8da9-b32ea6f5584b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:56Z" involvedObject: kind: CertificateSigningRequest name: csr-xljlq kind: Event lastTimestamp: "2026-04-20T20:11:56Z" message: CSR "csr-xljlq" has been approved metadata: creationTimestamp: "2026-04-20T20:11:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:56Z" name: csr-xljlq.18a829b65dd2ba40 namespace: default resourceVersion: "6663" uid: a57a2b89-5287-4d6f-8c43-0a0821005a4f reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:50Z" involvedObject: kind: CertificateSigningRequest name: csr-zpqzq kind: Event lastTimestamp: "2026-04-20T20:11:50Z" message: CSR "csr-zpqzq" has been approved metadata: creationTimestamp: "2026-04-20T20:11:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:50Z" name: csr-zpqzq.18a829b4f126651b namespace: default resourceVersion: "6597" uid: c059bcb4-a987-4c59-9371-d354070d6bb0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:40Z" involvedObject: kind: CertificateSigningRequest name: csr-ztwq8 kind: Event lastTimestamp: "2026-04-20T20:11:40Z" message: CSR "csr-ztwq8" has been approved metadata: creationTimestamp: "2026-04-20T20:11:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T20:11:40Z" name: csr-ztwq8.18a829b2c903d51b namespace: default resourceVersion: "6333" uid: 369849b6-259f-48b7-9a56-d6f184cae10e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:17:27Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "13864" uid: 4ca3d39d-563a-45cd-9925-a63c8c333097 kind: Event lastTimestamp: "2026-04-20T20:17:27Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-20T20:17:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T20:17:27Z" name: default-gateway.18a82a03801d8279 namespace: default resourceVersion: "13869" uid: 4cbc291b-d20e-4ee7-ba43-dbd7fef9b48d reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-20T20:17:28Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14094" uid: 4ca3d39d-563a-45cd-9925-a63c8c333097 kind: Event lastTimestamp: "2026-04-20T20:17:36Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-20T20:17:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T20:17:36Z" name: default-gateway.18a82a03bc529b24 namespace: default resourceVersion: "14335" uid: a73d68d4-b243-4f77-b103-8283dc53d1fa reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-20T20:17:36Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14419" uid: 08a5894b-37b1-4b54-bd05-893011811b8a kind: Event lastTimestamp: "2026-04-20T20:18:02Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:08a5894b-37b1-4b54-bd05-893011811b8a platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:08a5894b-37b1-4b54-bd05-893011811b8a]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-20T20:17:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T20:18:02Z" name: default-kserve.18a82a05b0a086dc namespace: default resourceVersion: "15885" uid: 92252585-b69b-454a-a6a7-8ae12efd9c39 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-20T20:17:27Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13822" uid: 086eeba3-716a-4b92-8cfc-807705d39e38 kind: Event lastTimestamp: "2026-04-20T20:34:35Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-20T20:17:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T20:34:35Z" name: default-monitoring.18a82a03777bddcd namespace: default resourceVersion: "35489" uid: fbdf886f-2a20-40ad-bd64-12736d3aa92f reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:24Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:24Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T20:11:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:24Z" name: ip-10-0-129-247.ec2.internal.18a829af027b9a17 namespace: default resourceVersion: "5775" uid: 5b603c0f-8ace-4489-9c40-cddfe520b022 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:24Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:24Z" message: 'Node ip-10-0-129-247.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T20:11:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:24Z" name: ip-10-0-129-247.ec2.internal.18a829af0432f73d namespace: default resourceVersion: "5868" uid: 2258bb63-5042-451f-b259-7357719762a0 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:24Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:24Z" message: 'Node ip-10-0-129-247.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T20:11:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:24Z" name: ip-10-0-129-247.ec2.internal.18a829af04333ab2 namespace: default resourceVersion: "5869" uid: 52d236fb-7a45-4cd6-92c8-05fc34c76c0f reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:24Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:24Z" message: 'Node ip-10-0-129-247.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T20:11:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:24Z" name: ip-10-0-129-247.ec2.internal.18a829af04335e2f namespace: default resourceVersion: "5871" uid: 84b3a0ca-a4e2-4d4a-9231-18e3a46996b6 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:24Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:24Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T20:11:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:24Z" name: ip-10-0-129-247.ec2.internal.18a829af069c668c namespace: default resourceVersion: "5779" uid: 73c11bfb-678c-40d9-b447-c3838054922f reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:25Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-247.ec2.internal resourceVersion: "5780" uid: de6e9356-8894-41c5-b319-1de66c95630b kind: Event lastTimestamp: "2026-04-20T20:11:25Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T20:11:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T20:11:25Z" name: ip-10-0-129-247.ec2.internal.18a829af1b5b26e4 namespace: default resourceVersion: "5879" uid: ab34810c-ddf7-4c7a-9614-8f4546a132d2 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:29Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-247.ec2.internal uid: de6e9356-8894-41c5-b319-1de66c95630b kind: Event lastTimestamp: "2026-04-20T20:11:29Z" message: 'Node ip-10-0-129-247.ec2.internal event: Registered Node ip-10-0-129-247.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:11:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:11:29Z" name: ip-10-0-129-247.ec2.internal.18a829b02c2ad299 namespace: default resourceVersion: "6068" uid: 0621dfb7-aa92-452c-ab48-b89d1914649c reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:57Z" involvedObject: kind: Node name: ip-10-0-129-247.ec2.internal uid: ip-10-0-129-247.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:57Z" message: 'Node ip-10-0-129-247.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T20:11:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:57Z" name: ip-10-0-129-247.ec2.internal.18a829b6a4c61a24 namespace: default resourceVersion: "6676" uid: b66e9951-12e7-4572-8ab3-5345f0a54b91 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-247.ec2.internal source: component: kubelet host: ip-10-0-129-247.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:13:37Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-247.ec2.internal uid: de6e9356-8894-41c5-b319-1de66c95630b kind: Event lastTimestamp: "2026-04-20T20:13:37Z" message: 'Node ip-10-0-129-247.ec2.internal event: Registered Node ip-10-0-129-247.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:13:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:13:37Z" name: ip-10-0-129-247.ec2.internal.18a829ce08a479c7 namespace: default resourceVersion: "8034" uid: d2ad0eb8-f7f6-41a5-825e-0db84cc2e699 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T20:11:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-134-66.ec2.internal.18a829ab14ccb103 namespace: default resourceVersion: "5393" uid: 3ca12481-a224-4160-814a-9e850daa262d reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T20:11:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-134-66.ec2.internal.18a829ab14ccfe7d namespace: default resourceVersion: "5394" uid: a4a424d0-4081-418c-a2d7-4c60c7b6f976 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T20:11:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:07Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T20:11:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-134-66.ec2.internal.18a829ab14cd23c8 namespace: default resourceVersion: "5396" uid: 79abf9d7-b64f-475b-8fdb-ceeb3cb304f1 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:07Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T20:11:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:07Z" name: ip-10-0-134-66.ec2.internal.18a829ab16ada388 namespace: default resourceVersion: "5347" uid: 80e5101a-2532-4a74-8c4a-4956a6f20db6 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal resourceVersion: "5348" uid: 87364306-284b-4d10-8ac5-ce814e890aee kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-134-66.ec2.internal.18a829ab2d041008 namespace: default resourceVersion: "5420" uid: 68ca5cc3-0d03-44ce-9517-5a5b823dac44 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal uid: 87364306-284b-4d10-8ac5-ce814e890aee kind: Event lastTimestamp: "2026-04-20T20:11:09Z" message: 'Node ip-10-0-134-66.ec2.internal event: Registered Node ip-10-0-134-66.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:11:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:11:09Z" name: ip-10-0-134-66.ec2.internal.18a829ab83e2d909 namespace: default resourceVersion: "5574" uid: 58aa5b71-efa2-458d-acae-92f59d7d1ca9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:41Z" involvedObject: kind: Node name: ip-10-0-134-66.ec2.internal uid: ip-10-0-134-66.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:41Z" message: 'Node ip-10-0-134-66.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T20:11:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:41Z" name: ip-10-0-134-66.ec2.internal.18a829b2f8ba6c3e namespace: default resourceVersion: "6348" uid: e62a19d2-ba13-411b-ab74-7b7acb574b99 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-66.ec2.internal source: component: kubelet host: ip-10-0-134-66.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:13:37Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-66.ec2.internal uid: 87364306-284b-4d10-8ac5-ce814e890aee kind: Event lastTimestamp: "2026-04-20T20:13:37Z" message: 'Node ip-10-0-134-66.ec2.internal event: Registered Node ip-10-0-134-66.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:13:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:13:37Z" name: ip-10-0-134-66.ec2.internal.18a829ce08a54165 namespace: default resourceVersion: "8041" uid: f77025cc-a4b2-49f0-9295-4154e7c45300 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-141-183.ec2.internal.18a829ab4b161ccc namespace: default resourceVersion: "5464" uid: 44898538-fbb6-44f6-b818-0820fbbfdf93 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: 'Node ip-10-0-141-183.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-141-183.ec2.internal.18a829ab4d00f028 namespace: default resourceVersion: "5477" uid: bd3ed9ba-5889-4746-af85-3a5ed7b2109b reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: 'Node ip-10-0-141-183.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-141-183.ec2.internal.18a829ab4d014403 namespace: default resourceVersion: "5479" uid: 4ca8aaf1-e090-4935-a2d9-b8ccf679045e reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: 'Node ip-10-0-141-183.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-141-183.ec2.internal.18a829ab4d0174d0 namespace: default resourceVersion: "5491" uid: bc61eb1f-a92b-4659-85b3-9248a9b950d5 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:08Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:08Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T20:11:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:08Z" name: ip-10-0-141-183.ec2.internal.18a829ab4fa85cf3 namespace: default resourceVersion: "5471" uid: 37b786bc-8110-45c5-bcfa-e7fbc0fe90a7 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-183.ec2.internal resourceVersion: "5476" uid: 5dc71f44-2640-4bcc-afca-78f3743f79e2 kind: Event lastTimestamp: "2026-04-20T20:11:09Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T20:11:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T20:11:09Z" name: ip-10-0-141-183.ec2.internal.18a829ab64090f49 namespace: default resourceVersion: "5566" uid: 0a004621-9ae0-430d-ba40-2b0019ecb765 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-183.ec2.internal uid: 5dc71f44-2640-4bcc-afca-78f3743f79e2 kind: Event lastTimestamp: "2026-04-20T20:11:09Z" message: 'Node ip-10-0-141-183.ec2.internal event: Registered Node ip-10-0-141-183.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:11:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:11:09Z" name: ip-10-0-141-183.ec2.internal.18a829ab83e51c95 namespace: default resourceVersion: "5575" uid: 07e7b03b-c2c8-41ed-bbe6-6c8387002da7 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:11:42Z" involvedObject: kind: Node name: ip-10-0-141-183.ec2.internal uid: ip-10-0-141-183.ec2.internal kind: Event lastTimestamp: "2026-04-20T20:11:42Z" message: 'Node ip-10-0-141-183.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T20:11:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T20:11:42Z" name: ip-10-0-141-183.ec2.internal.18a829b311059d09 namespace: default resourceVersion: "6379" uid: 266ee3b8-30e9-42ec-a82c-8ccf62920108 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-141-183.ec2.internal source: component: kubelet host: ip-10-0-141-183.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:13:37Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-183.ec2.internal uid: 5dc71f44-2640-4bcc-afca-78f3743f79e2 kind: Event lastTimestamp: "2026-04-20T20:13:37Z" message: 'Node ip-10-0-141-183.ec2.internal event: Registered Node ip-10-0-141-183.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T20:13:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T20:13:37Z" name: ip-10-0-141-183.ec2.internal.18a829ce08a55a29 namespace: default resourceVersion: "8044" uid: 12b46c9b-db26-4639-9870-12b789b1e724 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 9 eventTime: null firstTimestamp: "2026-04-20T20:18:39Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16162" uid: 58946444-1d33-4f6a-bbc4-23e6b2ddd5b0 kind: Event lastTimestamp: "2026-04-20T20:18:42Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-20T20:18:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-20T20:18:43Z" name: kuadrant-system.18a82a1451354d62 namespace: default resourceVersion: "16380" uid: 88846988-4d95-4a27-9605-7eb4980db082 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-20T20:13:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T20:13:15Z" name: kube-system.18a829c8d4ee85bf namespace: default resourceVersion: "7567" uid: 7556773d-494a-440a-9c9e-8cc9954971c7 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6dd99b498c-2lvrl type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-20T20:13:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T20:13:30Z" name: kube-system.18a829cc5360a8a8 namespace: default resourceVersion: "7831" uid: 2a88ac36-a272-460a-9eef-cb27f20b7306 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6dd99b498c-2lvrl type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-20T20:13:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T20:13:30Z" name: kube-system.18a829cc5399d8b2 namespace: default resourceVersion: "7832" uid: faa89e5f-e25e-4d0a-8e20-2ac2ebc432c2 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6dd99b498c-2lvrl type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-20T20:13:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T20:13:30Z" name: kube-system.18a829cc53d40aab namespace: default resourceVersion: "7833" uid: 1f9bcd13-f004-4a8f-a90d-80a3643fc71c reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6dd99b498c-2lvrl type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-20T20:14:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T20:14:30Z" name: kube-system.18a829da4bea4a84 namespace: default resourceVersion: "10188" uid: 79d54178-1ca3-4c1b-95f6-f64fd4681760 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6dd99b498c-2lvrl type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T20:06:22Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-20T20:06:22Z" message: readyz=true metadata: creationTimestamp: "2026-04-20T20:06:22Z" name: openshift-kube-apiserver.18a829689e303c77 namespace: default resourceVersion: "274" uid: d60138ba-1d02-4502-90a2-b9c4c208e90c reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-66f89db4b8-88x87 type: Warning kind: EventList metadata: resourceVersion: "46808"