--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:57:19Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-20T07:57:19Z" message: controller is starting metadata: creationTimestamp: "2026-04-20T07:57:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-20T07:57:19Z" name: 249d207b-256d-45d2-ac88-c33ea401b3f3 namespace: default resourceVersion: "12009" uid: 879d32b8-fa76-4aaf-bb1d-fcbf974e0359 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:35Z" involvedObject: kind: CertificateSigningRequest name: csr-bgnf6 kind: Event lastTimestamp: "2026-04-20T07:50:35Z" message: CSR "csr-bgnf6" has been approved metadata: creationTimestamp: "2026-04-20T07:50:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:35Z" name: csr-bgnf6.18a80141c77cd8ca namespace: default resourceVersion: "6284" uid: eb79ec61-259d-46fc-b823-10ea2ef24377 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:31Z" involvedObject: kind: CertificateSigningRequest name: csr-dt5kf kind: Event lastTimestamp: "2026-04-20T07:50:31Z" message: CSR "csr-dt5kf" has been approved metadata: creationTimestamp: "2026-04-20T07:50:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:31Z" name: csr-dt5kf.18a80140f7bac239 namespace: default resourceVersion: "6222" uid: 88b43ca8-6e81-4f31-971c-2e76484f10db reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:37Z" involvedObject: kind: CertificateSigningRequest name: csr-jqbd9 kind: Event lastTimestamp: "2026-04-20T07:50:37Z" message: CSR "csr-jqbd9" has been approved metadata: creationTimestamp: "2026-04-20T07:50:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:37Z" name: csr-jqbd9.18a801424f727c31 namespace: default resourceVersion: "6404" uid: 99d40610-8a01-4c28-9066-cb9a44202d91 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:48Z" involvedObject: kind: CertificateSigningRequest name: csr-n4gnd kind: Event lastTimestamp: "2026-04-20T07:50:48Z" message: CSR "csr-n4gnd" has been approved metadata: creationTimestamp: "2026-04-20T07:50:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:48Z" name: csr-n4gnd.18a80144ce7f6d12 namespace: default resourceVersion: "6635" uid: 5e3ce1fd-9fc2-4f70-a776-de1621205b4b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:42Z" involvedObject: kind: CertificateSigningRequest name: csr-rffwt kind: Event lastTimestamp: "2026-04-20T07:50:42Z" message: CSR "csr-rffwt" has been approved metadata: creationTimestamp: "2026-04-20T07:50:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:42Z" name: csr-rffwt.18a801437bd47e7a namespace: default resourceVersion: "6530" uid: a3989784-d88b-4ab6-af3d-c48065a167f0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:29Z" involvedObject: kind: CertificateSigningRequest name: csr-vg8mr kind: Event lastTimestamp: "2026-04-20T07:50:29Z" message: CSR "csr-vg8mr" has been approved metadata: creationTimestamp: "2026-04-20T07:50:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:50:29Z" name: csr-vg8mr.18a801406cb48237 namespace: default resourceVersion: "6173" uid: 896a0e7a-f48c-427a-80de-154a95e778c7 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-20T07:58:11Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14749" uid: 720e54ea-3269-47f3-a55e-eda62af615b0 kind: Event lastTimestamp: "2026-04-20T07:58:19Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-20T07:58:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:58:19Z" name: default-gateway.18a801ac069f5b9c namespace: default resourceVersion: "14994" uid: 8c773db0-7999-482b-b4a0-97bf46473933 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-20T07:58:20Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15077" uid: b44b9d7d-8a64-47a1-9c03-5db8e52b0184 kind: Event lastTimestamp: "2026-04-20T07:58:47Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:b44b9d7d-8a64-47a1-9c03-5db8e52b0184 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:b44b9d7d-8a64-47a1-9c03-5db8e52b0184]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-20T07:58:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:58:47Z" name: default-kserve.18a801ae10fee1d1 namespace: default resourceVersion: "16536" uid: 1968572a-7955-4e26-a50c-4e5de4df5daf reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-20T07:58:09Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14448" uid: 228a9e38-7f3e-484a-81cf-7a03eb6a578b kind: Event lastTimestamp: "2026-04-20T08:15:04Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-20T07:58:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T08:15:04Z" name: default-monitoring.18a801aba6a809a4 namespace: default resourceVersion: "35306" uid: daab1bf6-d984-4c5f-9c83-e598c2c01c2c reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:03Z" involvedObject: kind: Node name: ip-10-0-129-24.ec2.internal uid: ip-10-0-129-24.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:03Z" message: 'Node ip-10-0-129-24.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:50:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:03Z" name: ip-10-0-129-24.ec2.internal.18a8013a60f8c780 namespace: default resourceVersion: "5331" uid: c8e16b67-e36f-418a-92f4-747634dd3fda reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-24.ec2.internal source: component: kubelet host: ip-10-0-129-24.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:03Z" involvedObject: kind: Node name: ip-10-0-129-24.ec2.internal uid: ip-10-0-129-24.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:03Z" message: 'Node ip-10-0-129-24.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:50:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:03Z" name: ip-10-0-129-24.ec2.internal.18a8013a60f9120c namespace: default resourceVersion: "5332" uid: 80614e04-5a97-402d-9c53-dcfc9aef9059 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-24.ec2.internal source: component: kubelet host: ip-10-0-129-24.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:03Z" involvedObject: kind: Node name: ip-10-0-129-24.ec2.internal uid: ip-10-0-129-24.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:03Z" message: 'Node ip-10-0-129-24.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:50:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:03Z" name: ip-10-0-129-24.ec2.internal.18a8013a60f93b3a namespace: default resourceVersion: "5334" uid: 3436b6a1-e3e5-4961-9a21-c56af149f736 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-24.ec2.internal source: component: kubelet host: ip-10-0-129-24.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:03Z" involvedObject: kind: Node name: ip-10-0-129-24.ec2.internal uid: ip-10-0-129-24.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:03Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:50:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:03Z" name: ip-10-0-129-24.ec2.internal.18a8013a63d1a346 namespace: default resourceVersion: "5296" uid: 77488390-8e6e-4d11-a0d2-02939e5a20fc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-24.ec2.internal source: component: kubelet host: ip-10-0-129-24.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-24.ec2.internal resourceVersion: "5297" uid: d90eb8d5-4e4a-4ccd-980d-8c8cb465bd72 kind: Event lastTimestamp: "2026-04-20T07:50:03Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:50:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:50:03Z" name: ip-10-0-129-24.ec2.internal.18a8013a7bf004bb namespace: default resourceVersion: "5389" uid: 34bab7f6-d1e3-4471-82ad-ede6bda8f1c3 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-24.ec2.internal uid: d90eb8d5-4e4a-4ccd-980d-8c8cb465bd72 kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: 'Node ip-10-0-129-24.ec2.internal event: Registered Node ip-10-0-129-24.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-129-24.ec2.internal.18a8013b4f98e8ee namespace: default resourceVersion: "5484" uid: b56ba0b2-6260-42e9-bffc-390cade8af7b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:36Z" involvedObject: kind: Node name: ip-10-0-129-24.ec2.internal uid: ip-10-0-129-24.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:36Z" message: 'Node ip-10-0-129-24.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:50:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:36Z" name: ip-10-0-129-24.ec2.internal.18a801422731acdf namespace: default resourceVersion: "6333" uid: 6de72fe2-d0da-45d9-aee8-06be4af0fdb0 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-24.ec2.internal source: component: kubelet host: ip-10-0-129-24.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:52:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-24.ec2.internal uid: d90eb8d5-4e4a-4ccd-980d-8c8cb465bd72 kind: Event lastTimestamp: "2026-04-20T07:52:46Z" message: 'Node ip-10-0-129-24.ec2.internal event: Registered Node ip-10-0-129-24.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:52:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:52:46Z" name: ip-10-0-129-24.ec2.internal.18a8016080b15c2b namespace: default resourceVersion: "7993" uid: 655ef6cf-50e7-4f0f-8eab-af433f726a72 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d78815853 namespace: default resourceVersion: "5711" uid: ebbe354a-eac5-43ae-a8ae-e35591ed75e3 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: 'Node ip-10-0-133-161.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d7a5c8d0d namespace: default resourceVersion: "5782" uid: c1ff6c60-f004-4595-bedb-fdcd1311a2bb reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: 'Node ip-10-0-133-161.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d7a5cd41d namespace: default resourceVersion: "5789" uid: f4e530d9-0b01-47d2-97e2-72c63f4a9646 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: 'Node ip-10-0-133-161.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d7a5cfa12 namespace: default resourceVersion: "5797" uid: 6847b259-d21b-48b0-a602-b6ade6b362a9 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d7d545090 namespace: default resourceVersion: "5716" uid: ad420551-9ff6-4482-b8c3-6b0ed82f5d93 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:16Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-161.ec2.internal resourceVersion: "5718" uid: 4fa31c05-20ed-4eca-aa7c-988e723cd63f kind: Event lastTimestamp: "2026-04-20T07:50:16Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:50:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:50:16Z" name: ip-10-0-133-161.ec2.internal.18a8013d93e4b468 namespace: default resourceVersion: "5819" uid: 6e9f8d27-07b2-4d34-9c7f-2f2ae82ece9b reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:17Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-161.ec2.internal uid: 4fa31c05-20ed-4eca-aa7c-988e723cd63f kind: Event lastTimestamp: "2026-04-20T07:50:17Z" message: 'Node ip-10-0-133-161.ec2.internal event: Registered Node ip-10-0-133-161.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:50:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:50:17Z" name: ip-10-0-133-161.ec2.internal.18a8013da3c64bb6 namespace: default resourceVersion: "5826" uid: 0e090ac1-b8ab-4041-bc8e-76dc1ebe9fc9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:43Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-161.ec2.internal resourceVersion: "6565" uid: 4fa31c05-20ed-4eca-aa7c-988e723cd63f kind: Event lastTimestamp: "2026-04-20T07:50:43Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-161.ec2.internal, error getting gateway config for node ip-10-0-133-161.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-133-161.ec2.internal", failed to update chassis to local for local node ip-10-0-133-161.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-133-161.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-161.ec2.internal]' metadata: creationTimestamp: "2026-04-20T07:50:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-133-161 operation: Update time: "2026-04-20T07:50:43Z" name: ip-10-0-133-161.ec2.internal.18a80143d848c41e namespace: default resourceVersion: "6568" uid: e6d0c636-d18d-4e65-91be-a049cd0e1719 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:49Z" involvedObject: kind: Node name: ip-10-0-133-161.ec2.internal uid: ip-10-0-133-161.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:49Z" message: 'Node ip-10-0-133-161.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:50:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:49Z" name: ip-10-0-133-161.ec2.internal.18a80145238adc43 namespace: default resourceVersion: "6677" uid: c73744d3-a1c2-428e-97a9-4828890cc7d1 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-161.ec2.internal source: component: kubelet host: ip-10-0-133-161.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:52:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-161.ec2.internal uid: 4fa31c05-20ed-4eca-aa7c-988e723cd63f kind: Event lastTimestamp: "2026-04-20T07:52:46Z" message: 'Node ip-10-0-133-161.ec2.internal event: Registered Node ip-10-0-133-161.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:52:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:52:47Z" name: ip-10-0-133-161.ec2.internal.18a8016080b24471 namespace: default resourceVersion: "8002" uid: 4dcb45c7-077b-436a-b9c5-45b91d607d3e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b4a5b71ee namespace: default resourceVersion: "5479" uid: da05c0c9-7e7d-4700-b563-8ffc626a8c1c reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: 'Node ip-10-0-138-4.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b4bfe38ef namespace: default resourceVersion: "5489" uid: 44b37039-823e-4645-b4b9-0973d537b6d1 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: 'Node ip-10-0-138-4.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b4bfe9de9 namespace: default resourceVersion: "5491" uid: 30397630-ed30-4e1d-8c99-7e39b04eb7b0 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: 'Node ip-10-0-138-4.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b4bfec299 namespace: default resourceVersion: "5499" uid: e763203e-f109-48a5-ad02-59633e12abd1 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b4edf6c67 namespace: default resourceVersion: "5483" uid: f4f4f35a-fcbb-4fb6-b419-c135725a83cd reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:07Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-4.ec2.internal resourceVersion: "5490" uid: 509c0ad5-d8ef-4212-8b54-4da6ffa5ac00 kind: Event lastTimestamp: "2026-04-20T07:50:07Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:50:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:50:07Z" name: ip-10-0-138-4.ec2.internal.18a8013b6444089a namespace: default resourceVersion: "5580" uid: 7d29fa79-82b3-43ec-9d9a-973e733f8741 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:12Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-4.ec2.internal uid: 509c0ad5-d8ef-4212-8b54-4da6ffa5ac00 kind: Event lastTimestamp: "2026-04-20T07:50:12Z" message: 'Node ip-10-0-138-4.ec2.internal event: Registered Node ip-10-0-138-4.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:50:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:50:12Z" name: ip-10-0-138-4.ec2.internal.18a8013c79b2235e namespace: default resourceVersion: "5668" uid: 5db19dc9-d233-48a6-9264-e013648fead0 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:50:38Z" involvedObject: kind: Node name: ip-10-0-138-4.ec2.internal uid: ip-10-0-138-4.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:50:38Z" message: 'Node ip-10-0-138-4.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:50:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:50:38Z" name: ip-10-0-138-4.ec2.internal.18a8014299bf2b4c namespace: default resourceVersion: "6439" uid: abe8f37f-a10d-4aa4-a882-06849e85b2e4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-4.ec2.internal source: component: kubelet host: ip-10-0-138-4.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:52:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-4.ec2.internal uid: 509c0ad5-d8ef-4212-8b54-4da6ffa5ac00 kind: Event lastTimestamp: "2026-04-20T07:52:46Z" message: 'Node ip-10-0-138-4.ec2.internal event: Registered Node ip-10-0-138-4.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:52:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:52:47Z" name: ip-10-0-138-4.ec2.internal.18a8016080b2599d namespace: default resourceVersion: "8008" uid: 46076c53-8d55-41cb-a7a6-10e592c982f4 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-20T07:59:22Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16829" uid: 2e7aef2a-3fe4-48b4-81b2-1c337eba3427 kind: Event lastTimestamp: "2026-04-20T07:59:25Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-20T07:59:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-20T07:59:26Z" name: kuadrant-system.18a801bc8466807c namespace: default resourceVersion: "17070" uid: eed4f421-12de-46ea-99e8-ffca25b9f25c reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-20T07:52:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:52:27Z" name: kube-system.18a8015bdbf954b3 namespace: default resourceVersion: "7569" uid: 0e2afe60-c39d-495a-b332-178f8468060a reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5db9f6ddd6-jpl4t type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-20T07:52:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:52:42Z" name: kube-system.18a8015f5a71933a namespace: default resourceVersion: "7816" uid: a818d51a-3d34-4072-947a-4dcf36bdcd7a reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5db9f6ddd6-jpl4t type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-20T07:52:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:52:42Z" name: kube-system.18a8015f5aa72fda namespace: default resourceVersion: "7817" uid: 329a3f35-ba4d-4945-aa5b-62ab18b60a54 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5db9f6ddd6-jpl4t type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-20T07:52:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:52:42Z" name: kube-system.18a8015f5ae10af3 namespace: default resourceVersion: "7818" uid: b9c2915c-118e-4904-87bd-a86da00a0641 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5db9f6ddd6-jpl4t type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-20T07:53:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:53:42Z" name: kube-system.18a8016d53016964 namespace: default resourceVersion: "10100" uid: 5ae7513b-70ca-4c0e-a12b-697669ce7fb0 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5db9f6ddd6-jpl4t type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:45:40Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-20T07:45:40Z" message: readyz=true metadata: creationTimestamp: "2026-04-20T07:45:40Z" name: openshift-kube-apiserver.18a800fd2fb8ad52 namespace: default resourceVersion: "274" uid: 5b7fcda7-c1f4-4f67-9f1d-75f3e92b955e reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-64b768959-ccnb9 type: Warning kind: EventList metadata: resourceVersion: "47219"