--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:31Z" involvedObject: kind: CertificateSigningRequest name: csr-bfdc9 kind: Event lastTimestamp: "2026-04-21T14:26:31Z" message: CSR "csr-bfdc9" has been approved metadata: creationTimestamp: "2026-04-21T14:26:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:26:31Z" name: csr-bfdc9.18a865719b8b18e4 namespace: default resourceVersion: "6758" uid: 5e805849-3380-4d17-b29c-b1b914189a4e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:06Z" involvedObject: kind: CertificateSigningRequest name: csr-bj8p8 kind: Event lastTimestamp: "2026-04-21T14:26:06Z" message: CSR "csr-bj8p8" has been approved metadata: creationTimestamp: "2026-04-21T14:26:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:26:06Z" name: csr-bj8p8.18a8656bdfb6f0a8 namespace: default resourceVersion: "6199" uid: cd7cb22d-9ee6-4958-81d0-7fa7fb67605d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:01Z" involvedObject: kind: CertificateSigningRequest name: csr-g7zcn kind: Event lastTimestamp: "2026-04-21T14:26:01Z" message: CSR "csr-g7zcn" has been approved metadata: creationTimestamp: "2026-04-21T14:26:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:26:01Z" name: csr-g7zcn.18a8656aa3ab7a7e namespace: default resourceVersion: "6147" uid: 469bc7f8-a925-4804-8b03-25ffcccdcf02 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:51Z" involvedObject: kind: CertificateSigningRequest name: csr-hzfml kind: Event lastTimestamp: "2026-04-21T14:25:51Z" message: CSR "csr-hzfml" has been approved metadata: creationTimestamp: "2026-04-21T14:25:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:25:51Z" name: csr-hzfml.18a865682abd60dc namespace: default resourceVersion: "5961" uid: 9af3c2ca-1ef3-46d4-ae56-8daa7bfa75cf reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:38Z" involvedObject: kind: CertificateSigningRequest name: csr-mdnz7 kind: Event lastTimestamp: "2026-04-21T14:26:38Z" message: CSR "csr-mdnz7" has been approved metadata: creationTimestamp: "2026-04-21T14:26:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:26:38Z" name: csr-mdnz7.18a865732f21da89 namespace: default resourceVersion: "6833" uid: b60c0203-ad53-4815-ba8a-0760a4562166 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:46Z" involvedObject: kind: CertificateSigningRequest name: csr-rj874 kind: Event lastTimestamp: "2026-04-21T14:25:46Z" message: CSR "csr-rj874" has been approved metadata: creationTimestamp: "2026-04-21T14:25:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-21T14:25:46Z" name: csr-rj874.18a8656706b27c20 namespace: default resourceVersion: "5859" uid: 2e3c1bbf-8d42-4b8f-8547-179007bd5a9c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:37:44Z" involvedObject: apiVersion: v1 kind: Endpoints name: data-science-gateway-data-science-gateway-class kind: Event lastTimestamp: "2026-04-21T14:37:44Z" message: 'Failed to create endpoint for service openshift-ingress/data-science-gateway-data-science-gateway-class: endpoints "data-science-gateway-data-science-gateway-class" already exists' metadata: creationTimestamp: "2026-04-21T14:37:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:37:44Z" name: data-science-gateway-data-science-gateway-class.18a8660e4fef1169 namespace: default resourceVersion: "17143" uid: 7f047c59-a58e-4f24-b88c-b63d82602281 reason: FailedToCreateEndpoint reportingComponent: endpoint-controller reportingInstance: "" source: component: endpoint-controller type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:37:07Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "15007" uid: 4894eb7f-880a-4a14-a70e-41639f245236 kind: Event lastTimestamp: "2026-04-21T14:37:07Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-21T14:37:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T14:37:07Z" name: default-gateway.18a86605bbe9bb67 namespace: default resourceVersion: "15010" uid: cfc1c781-9dce-4561-9ff8-cb3ccaec30cf reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-21T14:37:09Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "15339" uid: 4894eb7f-880a-4a14-a70e-41639f245236 kind: Event lastTimestamp: "2026-04-21T14:37:21Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-21T14:37:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T14:37:21Z" name: default-gateway.18a8660616eba74d namespace: default resourceVersion: "15792" uid: 34f89d77-56be-4f5c-8170-60b898eaa42a reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-21T14:37:19Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15759" uid: 391f2686-98cb-4da3-803b-0c963937ecb9 kind: Event lastTimestamp: "2026-04-21T14:37:47Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:391f2686-98cb-4da3-803b-0c963937ecb9 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:391f2686-98cb-4da3-803b-0c963937ecb9]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-21T14:37:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T14:37:47Z" name: default-kserve.18a86608774e47e8 namespace: default resourceVersion: "17233" uid: e4deabc1-795b-4a50-bedb-3e1c8cbb6efd reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-21T14:37:07Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14991" uid: f9ccf2b5-9ffc-4a9d-921d-a0f1335203fe kind: Event lastTimestamp: "2026-04-21T14:56:46Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-21T14:37:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-21T14:56:46Z" name: default-monitoring.18a86605b7ff515a namespace: default resourceVersion: "37835" uid: 62020581-e769-439f-a47a-57ce6c97a3fb reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:35Z" involvedObject: kind: Node name: ip-10-0-138-110.ec2.internal uid: ip-10-0-138-110.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:35Z" message: 'Node ip-10-0-138-110.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:25:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:36Z" name: ip-10-0-138-110.ec2.internal.18a8656497faecd6 namespace: default resourceVersion: "5577" uid: 6a43ecca-54dc-414c-b1f9-920d0055b0b0 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-110.ec2.internal source: component: kubelet host: ip-10-0-138-110.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:35Z" involvedObject: kind: Node name: ip-10-0-138-110.ec2.internal uid: ip-10-0-138-110.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:35Z" message: 'Node ip-10-0-138-110.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:25:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:36Z" name: ip-10-0-138-110.ec2.internal.18a8656497fb47b0 namespace: default resourceVersion: "5581" uid: 56fc330d-175e-40c2-8639-bedf70157517 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-110.ec2.internal source: component: kubelet host: ip-10-0-138-110.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:35Z" involvedObject: kind: Node name: ip-10-0-138-110.ec2.internal uid: ip-10-0-138-110.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:35Z" message: 'Node ip-10-0-138-110.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:25:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:36Z" name: ip-10-0-138-110.ec2.internal.18a8656497fb6e22 namespace: default resourceVersion: "5584" uid: 4f706e1b-17b0-45e2-8148-7039f1b8aeb8 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-110.ec2.internal source: component: kubelet host: ip-10-0-138-110.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:35Z" involvedObject: kind: Node name: ip-10-0-138-110.ec2.internal uid: ip-10-0-138-110.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:35Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:25:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:35Z" name: ip-10-0-138-110.ec2.internal.18a865649b073955 namespace: default resourceVersion: "5515" uid: 5872fdab-4d91-4d4d-8bf1-9290d2584c34 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-110.ec2.internal source: component: kubelet host: ip-10-0-138-110.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-110.ec2.internal resourceVersion: "5517" uid: 667d9e23-5672-49bb-ae22-46861a9aac9d kind: Event lastTimestamp: "2026-04-21T14:25:36Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:25:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:25:36Z" name: ip-10-0-138-110.ec2.internal.18a86564b048d5ce namespace: default resourceVersion: "5609" uid: 943ee29f-55ef-40b5-988d-7cee41f4c3c4 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:37Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-110.ec2.internal uid: 667d9e23-5672-49bb-ae22-46861a9aac9d kind: Event lastTimestamp: "2026-04-21T14:25:37Z" message: 'Node ip-10-0-138-110.ec2.internal event: Registered Node ip-10-0-138-110.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:25:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:25:37Z" name: ip-10-0-138-110.ec2.internal.18a86565100d2cb4 namespace: default resourceVersion: "5644" uid: 690ffbb3-a4d5-4906-b9c6-5794c903eb75 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:08Z" involvedObject: kind: Node name: ip-10-0-138-110.ec2.internal uid: ip-10-0-138-110.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:08Z" message: 'Node ip-10-0-138-110.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:26:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:08Z" name: ip-10-0-138-110.ec2.internal.18a8656c25618d15 namespace: default resourceVersion: "6337" uid: 6b6d1d08-890d-4df4-9213-4a561e25b2c7 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-110.ec2.internal source: component: kubelet host: ip-10-0-138-110.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:28:12Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-110.ec2.internal uid: 667d9e23-5672-49bb-ae22-46861a9aac9d kind: Event lastTimestamp: "2026-04-21T14:28:12Z" message: 'Node ip-10-0-138-110.ec2.internal event: Registered Node ip-10-0-138-110.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:28:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:28:12Z" name: ip-10-0-138-110.ec2.internal.18a86589119b1e3e namespace: default resourceVersion: "7982" uid: e69ac668-f4da-404d-813b-8240b990d03b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:20Z" involvedObject: kind: Node name: ip-10-0-138-93.ec2.internal uid: ip-10-0-138-93.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:21Z" message: 'Node ip-10-0-138-93.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:25:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:21Z" name: ip-10-0-138-93.ec2.internal.18a865612339af74 namespace: default resourceVersion: "5304" uid: 418f4808-ad22-48c5-ae60-989e3f192d73 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-93.ec2.internal source: component: kubelet host: ip-10-0-138-93.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:20Z" involvedObject: kind: Node name: ip-10-0-138-93.ec2.internal uid: ip-10-0-138-93.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:21Z" message: 'Node ip-10-0-138-93.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:25:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:21Z" name: ip-10-0-138-93.ec2.internal.18a86561233a0a59 namespace: default resourceVersion: "5308" uid: 4c0dea4a-1193-47c1-99d0-294612aa23fa reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-93.ec2.internal source: component: kubelet host: ip-10-0-138-93.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-21T14:25:20Z" involvedObject: kind: Node name: ip-10-0-138-93.ec2.internal uid: ip-10-0-138-93.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:21Z" message: 'Node ip-10-0-138-93.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:25:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:21Z" name: ip-10-0-138-93.ec2.internal.18a86561233a3db9 namespace: default resourceVersion: "5316" uid: a90d3aa4-9ad3-44bc-8d18-81be97ae5a72 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-93.ec2.internal source: component: kubelet host: ip-10-0-138-93.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:20Z" involvedObject: kind: Node name: ip-10-0-138-93.ec2.internal uid: ip-10-0-138-93.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:20Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:25:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:20Z" name: ip-10-0-138-93.ec2.internal.18a8656126669ee5 namespace: default resourceVersion: "5276" uid: 379373d0-7ce4-458b-81b8-3e234549e90c reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-93.ec2.internal source: component: kubelet host: ip-10-0-138-93.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-93.ec2.internal resourceVersion: "5278" uid: a1930672-4b86-49ea-a881-acbb0af16795 kind: Event lastTimestamp: "2026-04-21T14:25:21Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:25:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:25:21Z" name: ip-10-0-138-93.ec2.internal.18a865613e59a634 namespace: default resourceVersion: "5338" uid: d61941da-406f-44ad-956c-0186e1d67b94 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:22Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-93.ec2.internal uid: a1930672-4b86-49ea-a881-acbb0af16795 kind: Event lastTimestamp: "2026-04-21T14:25:22Z" message: 'Node ip-10-0-138-93.ec2.internal event: Registered Node ip-10-0-138-93.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:25:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:25:22Z" name: ip-10-0-138-93.ec2.internal.18a8656191d60848 namespace: default resourceVersion: "5432" uid: 1105100e-3e44-40be-9671-4bf2e9b86563 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:25:52Z" involvedObject: kind: Node name: ip-10-0-138-93.ec2.internal uid: ip-10-0-138-93.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:25:52Z" message: 'Node ip-10-0-138-93.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:25:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:25:52Z" name: ip-10-0-138-93.ec2.internal.18a86568754fe7ec namespace: default resourceVersion: "5978" uid: c5ede575-4137-4b5c-8e35-58eb4f477834 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-93.ec2.internal source: component: kubelet host: ip-10-0-138-93.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:28:12Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-93.ec2.internal uid: a1930672-4b86-49ea-a881-acbb0af16795 kind: Event lastTimestamp: "2026-04-21T14:28:12Z" message: 'Node ip-10-0-138-93.ec2.internal event: Registered Node ip-10-0-138-93.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:28:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:28:12Z" name: ip-10-0-138-93.ec2.internal.18a86589119d2b3b namespace: default resourceVersion: "7992" uid: bc3a7d7a-9122-4c41-aaa0-c9fadaf19482 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656be5c3da9f namespace: default resourceVersion: "6206" uid: 87cf0e8c-348e-4ddc-906f-bd65d432f11f reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: 'Node ip-10-0-141-61.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656be7929582 namespace: default resourceVersion: "6212" uid: 85ddc0fb-6486-4f0e-94c6-81f59fd00418 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: 'Node ip-10-0-141-61.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656be793130c namespace: default resourceVersion: "6213" uid: 8cc92680-7c84-4182-9ccc-3007ffbf0dd9 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: 'Node ip-10-0-141-61.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656be7934733 namespace: default resourceVersion: "6222" uid: 55afa92d-a0cf-4419-ac4f-aa08313f66c4 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656bea59d57f namespace: default resourceVersion: "6210" uid: 46f45e01-c21e-4e95-9c32-d248a56d8461 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-61.ec2.internal resourceVersion: "6211" uid: cfa98bb6-8a54-489e-9ce9-1c05c41798f2 kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656c01c929df namespace: default resourceVersion: "6262" uid: b4783533-dc94-4c86-aeef-6335b133502d reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:07Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-61.ec2.internal uid: cfa98bb6-8a54-489e-9ce9-1c05c41798f2 kind: Event lastTimestamp: "2026-04-21T14:26:07Z" message: 'Node ip-10-0-141-61.ec2.internal event: Registered Node ip-10-0-141-61.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:26:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:26:07Z" name: ip-10-0-141-61.ec2.internal.18a8656c0c5899dd namespace: default resourceVersion: "6304" uid: 3a7c662b-306c-4484-becc-77ce8c446614 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-61.ec2.internal resourceVersion: "6773" uid: cfa98bb6-8a54-489e-9ce9-1c05c41798f2 kind: Event lastTimestamp: "2026-04-21T14:26:33Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-61.ec2.internal, error getting gateway config for node ip-10-0-141-61.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-141-61.ec2.internal", failed to update chassis to local for local node ip-10-0-141-61.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-141-61.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-61.ec2.internal]' metadata: creationTimestamp: "2026-04-21T14:26:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-141-61 operation: Update time: "2026-04-21T14:26:33Z" name: ip-10-0-141-61.ec2.internal.18a86571f9d7134b namespace: default resourceVersion: "6774" uid: f7362ccd-be2e-4a32-aa94-4da8df9078cf reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:26:39Z" involvedObject: kind: Node name: ip-10-0-141-61.ec2.internal uid: ip-10-0-141-61.ec2.internal kind: Event lastTimestamp: "2026-04-21T14:26:39Z" message: 'Node ip-10-0-141-61.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-21T14:26:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-21T14:26:39Z" name: ip-10-0-141-61.ec2.internal.18a865738864d8dc namespace: default resourceVersion: "6850" uid: b3e613d7-dc6c-47a7-a5c3-f0ae8527bfdc reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-141-61.ec2.internal source: component: kubelet host: ip-10-0-141-61.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:28:12Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-61.ec2.internal uid: cfa98bb6-8a54-489e-9ce9-1c05c41798f2 kind: Event lastTimestamp: "2026-04-21T14:28:12Z" message: 'Node ip-10-0-141-61.ec2.internal event: Registered Node ip-10-0-141-61.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-21T14:28:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-21T14:28:12Z" name: ip-10-0-141-61.ec2.internal.18a86589119d4e3c namespace: default resourceVersion: "7996" uid: 5d647ce8-a87f-49b5-83e7-72a175e66e3b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-21T14:38:20Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "17478" uid: a40489a4-d317-4083-8015-4faec39ae79f kind: Event lastTimestamp: "2026-04-21T14:38:23Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-21T14:38:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-21T14:38:24Z" name: kuadrant-system.18a86616948d2310 namespace: default resourceVersion: "17711" uid: 9ecbd4b5-4ad3-4335-872d-0d31f5f468ca reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-21T14:27:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:27:52Z" name: kube-system.18a86584709a4807 namespace: default resourceVersion: "7619" uid: 7c17ca12-df33-4377-be9d-fdc0eb07b96e reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6458875d9-ntg9s type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-21T14:28:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:28:07Z" name: kube-system.18a86587ef063344 namespace: default resourceVersion: "7864" uid: dca98831-b476-4f14-a9c8-b15e2ed31fc8 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6458875d9-ntg9s type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-21T14:28:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:28:07Z" name: kube-system.18a86587ef4d67b7 namespace: default resourceVersion: "7867" uid: 89cce990-68e9-47ac-97da-8fa95dcbf60b reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6458875d9-ntg9s type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-21T14:28:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:28:07Z" name: kube-system.18a86587efc400a0 namespace: default resourceVersion: "7868" uid: f7c02003-2d4c-4e91-9212-15c715897813 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6458875d9-ntg9s type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-21T14:29:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-21T14:29:07Z" name: kube-system.18a86595e7b74410 namespace: default resourceVersion: "10154" uid: a32846f6-f29b-4ea1-a986-7bdfe193a4fa reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6458875d9-ntg9s type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-21T14:20:37Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-21T14:20:37Z" message: readyz=true metadata: creationTimestamp: "2026-04-21T14:20:37Z" name: openshift-kube-apiserver.18a8651f0b891b9f namespace: default resourceVersion: "275" uid: e8e45600-0798-431b-879b-1d2646ac329f reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-69b8fb665b-72x5t type: Warning kind: EventList metadata: resourceVersion: "50788"