--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:44:49Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T20:44:49Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T20:44:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T20:44:49Z" name: 02140b14-1cac-4baf-89a6-da3e45d6d793 namespace: default resourceVersion: "11981" uid: ead0b010-f2ec-418b-9d92-c533394b5702 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:20Z" involvedObject: kind: CertificateSigningRequest name: csr-5c9dv kind: Event lastTimestamp: "2026-04-16T20:38:20Z" message: CSR "csr-5c9dv" has been approved metadata: creationTimestamp: "2026-04-16T20:38:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:20Z" name: csr-5c9dv.18a6f0d4dab42a2a namespace: default resourceVersion: "6227" uid: f9b96b78-c60b-47a7-afed-3d995575ec3a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:24Z" involvedObject: kind: CertificateSigningRequest name: csr-6lwd6 kind: Event lastTimestamp: "2026-04-16T20:38:24Z" message: CSR "csr-6lwd6" has been approved metadata: creationTimestamp: "2026-04-16T20:38:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:24Z" name: csr-6lwd6.18a6f0d5d9ec5d48 namespace: default resourceVersion: "6334" uid: 2617b84c-f9c1-4f7a-9947-f4600cf4565d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:09Z" involvedObject: kind: CertificateSigningRequest name: csr-dxmr9 kind: Event lastTimestamp: "2026-04-16T20:38:09Z" message: CSR "csr-dxmr9" has been approved metadata: creationTimestamp: "2026-04-16T20:38:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:09Z" name: csr-dxmr9.18a6f0d25821bce9 namespace: default resourceVersion: "5873" uid: bddaed6f-3b2f-463a-9490-46cd46f8ae43 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:30Z" involvedObject: kind: CertificateSigningRequest name: csr-fjtdx kind: Event lastTimestamp: "2026-04-16T20:38:30Z" message: CSR "csr-fjtdx" has been approved metadata: creationTimestamp: "2026-04-16T20:38:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:30Z" name: csr-fjtdx.18a6f0d7373e4124 namespace: default resourceVersion: "6426" uid: cfb66589-2d4a-4db5-80e7-1d1ec7d106f6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:14Z" involvedObject: kind: CertificateSigningRequest name: csr-k5jk7 kind: Event lastTimestamp: "2026-04-16T20:38:14Z" message: CSR "csr-k5jk7" has been approved metadata: creationTimestamp: "2026-04-16T20:38:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:14Z" name: csr-k5jk7.18a6f0d38bfad519 namespace: default resourceVersion: "6023" uid: 99a3f6a2-028e-4b7a-8bd2-815b46864a2a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:16Z" involvedObject: kind: CertificateSigningRequest name: csr-nfpcs kind: Event lastTimestamp: "2026-04-16T20:38:16Z" message: CSR "csr-nfpcs" has been approved metadata: creationTimestamp: "2026-04-16T20:38:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:38:16Z" name: csr-nfpcs.18a6f0d40bc53309 namespace: default resourceVersion: "6061" uid: cde48de7-3e9f-460c-bc59-960d4afb1811 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T20:45:50Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14312" uid: 82aec557-abe6-4e4a-ba3d-305d021ba0a0 kind: Event lastTimestamp: "2026-04-16T20:45:57Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T20:45:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T20:45:57Z" name: default-gateway.18a6f13dc91016e8 namespace: default resourceVersion: "14528" uid: b88c62e9-0683-4dfa-ab92-f3e8f2ea8a2e reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T20:45:59Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14641" uid: 27bb6ab2-d49d-4b89-8861-10d05296e91c kind: Event lastTimestamp: "2026-04-16T20:46:26Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:27bb6ab2-d49d-4b89-8861-10d05296e91c platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:27bb6ab2-d49d-4b89-8861-10d05296e91c]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T20:45:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T20:46:26Z" name: default-kserve.18a6f13fe83efd0b namespace: default resourceVersion: "16155" uid: cb7ab06a-94a1-4d6a-bb82-d643fbd7bec4 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 29 eventTime: null firstTimestamp: "2026-04-16T20:45:49Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14050" uid: c3c0f203-1c77-4af2-b82a-8d83e981e8bb kind: Event lastTimestamp: "2026-04-16T21:02:57Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T20:45:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T21:02:57Z" name: default-monitoring.18a6f13d7bdf8b80 namespace: default resourceVersion: "35319" uid: 11ca98df-3582-4842-a173-3327b5379360 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:37:41Z" involvedObject: kind: Node name: ip-10-0-129-199.ec2.internal uid: ip-10-0-129-199.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:41Z" message: 'Node ip-10-0-129-199.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:37:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:41Z" name: ip-10-0-129-199.ec2.internal.18a6f0cbe167c5ef namespace: default resourceVersion: "5143" uid: 013a0575-9657-47d2-bc35-ab7692633ec1 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-199.ec2.internal source: component: kubelet host: ip-10-0-129-199.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:37:41Z" involvedObject: kind: Node name: ip-10-0-129-199.ec2.internal uid: ip-10-0-129-199.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:41Z" message: 'Node ip-10-0-129-199.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:37:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:41Z" name: ip-10-0-129-199.ec2.internal.18a6f0cbe1680681 namespace: default resourceVersion: "5144" uid: 13f92d5b-df90-4201-bfac-3df7e0098ba2 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-199.ec2.internal source: component: kubelet host: ip-10-0-129-199.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:37:41Z" involvedObject: kind: Node name: ip-10-0-129-199.ec2.internal uid: ip-10-0-129-199.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:41Z" message: 'Node ip-10-0-129-199.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:37:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:41Z" name: ip-10-0-129-199.ec2.internal.18a6f0cbe1682c3c namespace: default resourceVersion: "5145" uid: faf4e09c-63ec-4def-8c3f-3699e077df84 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-199.ec2.internal source: component: kubelet host: ip-10-0-129-199.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:41Z" involvedObject: kind: Node name: ip-10-0-129-199.ec2.internal uid: ip-10-0-129-199.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:41Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:37:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:41Z" name: ip-10-0-129-199.ec2.internal.18a6f0cbe49f6dc5 namespace: default resourceVersion: "5079" uid: d0c3c818-e1c0-41f6-b805-699c71d5ac9c reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-199.ec2.internal source: component: kubelet host: ip-10-0-129-199.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-199.ec2.internal resourceVersion: "5080" uid: eacb0efe-861c-4e7b-82aa-4a9ad6b5a466 kind: Event lastTimestamp: "2026-04-16T20:37:41Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:37:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:37:41Z" name: ip-10-0-129-199.ec2.internal.18a6f0cbfb5d1416 namespace: default resourceVersion: "5172" uid: 84e9c43b-5dc1-4f29-a931-9a8f9d951536 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:43Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-199.ec2.internal uid: eacb0efe-861c-4e7b-82aa-4a9ad6b5a466 kind: Event lastTimestamp: "2026-04-16T20:37:43Z" message: 'Node ip-10-0-129-199.ec2.internal event: Registered Node ip-10-0-129-199.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:37:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:37:43Z" name: ip-10-0-129-199.ec2.internal.18a6f0cc6daa598f namespace: default resourceVersion: "5245" uid: d32742f9-d9f5-4a90-8495-bd4c3517ee8e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:17Z" involvedObject: kind: Node name: ip-10-0-129-199.ec2.internal uid: ip-10-0-129-199.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:38:17Z" message: 'Node ip-10-0-129-199.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:38:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:38:17Z" name: ip-10-0-129-199.ec2.internal.18a6f0d4585b8b14 namespace: default resourceVersion: "6082" uid: 36f00287-735b-40ba-af01-189f29d91894 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-199.ec2.internal source: component: kubelet host: ip-10-0-129-199.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:40:30Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-199.ec2.internal uid: eacb0efe-861c-4e7b-82aa-4a9ad6b5a466 kind: Event lastTimestamp: "2026-04-16T20:40:30Z" message: 'Node ip-10-0-129-199.ec2.internal event: Registered Node ip-10-0-129-199.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:40:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:40:30Z" name: ip-10-0-129-199.ec2.internal.18a6f0f31d6ce837 namespace: default resourceVersion: "7661" uid: d186687b-57a7-4d1d-9c7e-908063c959a2 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d0055e8699 namespace: default resourceVersion: "5560" uid: 0b9eddbc-6024-4f82-92a5-991b1b388f37 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: 'Node ip-10-0-134-79.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d007215ccc namespace: default resourceVersion: "5568" uid: ec7382bb-2ca7-4f35-907c-5c1a7606dc62 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: 'Node ip-10-0-134-79.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d00721a082 namespace: default resourceVersion: "5570" uid: ac66ebd1-c732-4417-a260-8b535ae82168 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: 'Node ip-10-0-134-79.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d00721c507 namespace: default resourceVersion: "5578" uid: b17b1d05-6ea4-4034-893f-3d39c92c6a57 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d009dad37a namespace: default resourceVersion: "5564" uid: 21e1fab6-db10-41a6-9c28-048a61aa0694 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:59Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-79.ec2.internal resourceVersion: "5567" uid: 9dd3a99e-8cdf-47dd-aeb9-e02f3b3f235d kind: Event lastTimestamp: "2026-04-16T20:37:59Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:37:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:37:59Z" name: ip-10-0-134-79.ec2.internal.18a6f0d01f43ee22 namespace: default resourceVersion: "5667" uid: d99f1a7e-b0ee-49f2-9582-957d220a7642 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-79.ec2.internal uid: 9dd3a99e-8cdf-47dd-aeb9-e02f3b3f235d kind: Event lastTimestamp: "2026-04-16T20:38:03Z" message: 'Node ip-10-0-134-79.ec2.internal event: Registered Node ip-10-0-134-79.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:38:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:38:03Z" name: ip-10-0-134-79.ec2.internal.18a6f0d115e784ac namespace: default resourceVersion: "5753" uid: 1c2deeb3-fb9a-453c-92e0-f801bb0d79af reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:31Z" involvedObject: kind: Node name: ip-10-0-134-79.ec2.internal uid: ip-10-0-134-79.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:38:31Z" message: 'Node ip-10-0-134-79.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:38:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:38:31Z" name: ip-10-0-134-79.ec2.internal.18a6f0d79054d59f namespace: default resourceVersion: "6439" uid: da6392dd-f84b-44d5-9794-4a9611200d4c reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-79.ec2.internal source: component: kubelet host: ip-10-0-134-79.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:40:30Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-79.ec2.internal uid: 9dd3a99e-8cdf-47dd-aeb9-e02f3b3f235d kind: Event lastTimestamp: "2026-04-16T20:40:30Z" message: 'Node ip-10-0-134-79.ec2.internal event: Registered Node ip-10-0-134-79.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:40:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:40:30Z" name: ip-10-0-134-79.ec2.internal.18a6f0f31d6c2888 namespace: default resourceVersion: "7656" uid: 21b1da47-bff0-4eaf-9e0e-df3f24b1cdcf reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0cdebaf22da namespace: default resourceVersion: "5316" uid: 661f3dd5-dacf-47d6-b3a1-e7f779342dc3 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: 'Node ip-10-0-142-90.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0cdf4a66655 namespace: default resourceVersion: "5322" uid: 685dce55-19e8-45e4-bb04-d45df41961b4 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: 'Node ip-10-0-142-90.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0cdf4a6aae1 namespace: default resourceVersion: "5329" uid: 8d6e28ef-4e8b-44c5-adc7-608bb1000b1d reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: 'Node ip-10-0-142-90.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0cdf4a6cf53 namespace: default resourceVersion: "5341" uid: e00604d8-98ee-40be-9090-8ddbf203651b reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0cdf71f5562 namespace: default resourceVersion: "5320" uid: fe60d200-9cf4-4ddf-966c-3e41eab8a8a2 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:50Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-90.ec2.internal resourceVersion: "5321" uid: 00c81832-7906-4d55-b645-2c3256cf37e2 kind: Event lastTimestamp: "2026-04-16T20:37:50Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:37:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:37:50Z" name: ip-10-0-142-90.ec2.internal.18a6f0ce0c647509 namespace: default resourceVersion: "5409" uid: d24ac265-6c22-4084-b7a9-4e5559170fa4 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:37:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-90.ec2.internal uid: 00c81832-7906-4d55-b645-2c3256cf37e2 kind: Event lastTimestamp: "2026-04-16T20:37:53Z" message: 'Node ip-10-0-142-90.ec2.internal event: Registered Node ip-10-0-142-90.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:37:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:37:53Z" name: ip-10-0-142-90.ec2.internal.18a6f0cec1cc7b44 namespace: default resourceVersion: "5511" uid: 29053a21-2a96-4871-8f96-19596df5b7cc reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-90.ec2.internal resourceVersion: "6045" uid: 00c81832-7906-4d55-b645-2c3256cf37e2 kind: Event lastTimestamp: "2026-04-16T20:38:15Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-142-90.ec2.internal, error getting gateway config for node ip-10-0-142-90.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-142-90.ec2.internal", failed to update chassis to local for local node ip-10-0-142-90.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-142-90.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-142-90.ec2.internal]' metadata: creationTimestamp: "2026-04-16T20:38:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-142-90 operation: Update time: "2026-04-16T20:38:15Z" name: ip-10-0-142-90.ec2.internal.18a6f0d3e950afc3 namespace: default resourceVersion: "6047" uid: c3f268ee-6451-4b5c-9ac1-89eb016e596b reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:38:21Z" involvedObject: kind: Node name: ip-10-0-142-90.ec2.internal uid: ip-10-0-142-90.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:38:21Z" message: 'Node ip-10-0-142-90.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:38:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:38:21Z" name: ip-10-0-142-90.ec2.internal.18a6f0d53b9b3f69 namespace: default resourceVersion: "6280" uid: 12bcfba8-9a08-4673-a225-fdeafc607748 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-142-90.ec2.internal source: component: kubelet host: ip-10-0-142-90.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:40:30Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-90.ec2.internal uid: 00c81832-7906-4d55-b645-2c3256cf37e2 kind: Event lastTimestamp: "2026-04-16T20:40:30Z" message: 'Node ip-10-0-142-90.ec2.internal event: Registered Node ip-10-0-142-90.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:40:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:40:30Z" name: ip-10-0-142-90.ec2.internal.18a6f0f31d6ccff5 namespace: default resourceVersion: "7660" uid: 9b31e8ac-635c-431b-adc1-40751064e89b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T20:47:01Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16426" uid: fafbf755-8b3a-4a94-bef4-91623303d407 kind: Event lastTimestamp: "2026-04-16T20:47:04Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T20:47:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T20:47:05Z" name: kuadrant-system.18a6f14e554eea81 namespace: default resourceVersion: "16650" uid: e8cca990-c73f-4bd0-bc21-a067c2dc488c reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T20:40:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T20:40:08Z" name: kube-system.18a6f0ee1599c64d namespace: default resourceVersion: "7334" uid: 82394859-3dc4-43ae-9f12-c093c80a6ce4 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-58498667bf-m2c7n type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T20:40:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T20:40:23Z" name: kube-system.18a6f0f19401f784 namespace: default resourceVersion: "7585" uid: 87b8c9a6-a0f4-479a-afae-68428b5907a0 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-58498667bf-m2c7n type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T20:40:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T20:40:23Z" name: kube-system.18a6f0f194a0997c namespace: default resourceVersion: "7586" uid: 9c12a2a7-2c79-4c41-9cb9-d5d5fc242cbc reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-58498667bf-m2c7n type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T20:40:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T20:40:23Z" name: kube-system.18a6f0f1952bdb7f namespace: default resourceVersion: "7587" uid: 2be255d9-b7da-47a4-a112-446fd4818b50 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-58498667bf-m2c7n type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T20:41:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T20:41:23Z" name: kube-system.18a6f0ff8d0dc00a namespace: default resourceVersion: "9847" uid: 41f10127-ae05-47ff-9372-770c76c323ab reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-58498667bf-m2c7n type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:33:18Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T20:33:18Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T20:33:18Z" name: openshift-kube-apiserver.18a6f08e929c3be8 namespace: default resourceVersion: "274" uid: 5c6ab2e4-146f-4b3d-863a-f3963dc63805 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-5cf4597bc8-ftscj type: Warning kind: EventList metadata: resourceVersion: "46028"