--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T21:03:50Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T21:03:50Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T21:03:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T21:03:50Z" name: 955c8061-c418-417c-932f-314f8622db23 namespace: default resourceVersion: "11680" uid: adfd63b0-d7e1-44c6-b8e5-6d0dde3a17b5 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:23Z" involvedObject: kind: CertificateSigningRequest name: csr-b89qh kind: Event lastTimestamp: "2026-04-16T20:58:23Z" message: CSR "csr-b89qh" has been approved metadata: creationTimestamp: "2026-04-16T20:58:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:23Z" name: csr-b89qh.18a6f1ed051e3072 namespace: default resourceVersion: "6054" uid: cc775f56-b44e-48b3-a40e-2115dd84616e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:43Z" involvedObject: kind: CertificateSigningRequest name: csr-gp4pv kind: Event lastTimestamp: "2026-04-16T20:58:43Z" message: CSR "csr-gp4pv" has been approved metadata: creationTimestamp: "2026-04-16T20:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:43Z" name: csr-gp4pv.18a6f1f19bab3f35 namespace: default resourceVersion: "6599" uid: fe1f1767-e917-44d0-8124-924e5c4df858 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:49Z" involvedObject: kind: CertificateSigningRequest name: csr-hmdhr kind: Event lastTimestamp: "2026-04-16T20:58:49Z" message: CSR "csr-hmdhr" has been approved metadata: creationTimestamp: "2026-04-16T20:58:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:49Z" name: csr-hmdhr.18a6f1f303891720 namespace: default resourceVersion: "6682" uid: 155bf5ec-e0a0-4872-98f6-932ff977627b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:37Z" involvedObject: kind: CertificateSigningRequest name: csr-qxcfj kind: Event lastTimestamp: "2026-04-16T20:58:37Z" message: CSR "csr-qxcfj" has been approved metadata: creationTimestamp: "2026-04-16T20:58:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:37Z" name: csr-qxcfj.18a6f1f05e5118bb namespace: default resourceVersion: "6429" uid: c1e80631-7d04-4f3b-a95d-8767ee6162a8 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:18Z" involvedObject: kind: CertificateSigningRequest name: csr-v2nvx kind: Event lastTimestamp: "2026-04-16T20:58:18Z" message: CSR "csr-v2nvx" has been approved metadata: creationTimestamp: "2026-04-16T20:58:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:18Z" name: csr-v2nvx.18a6f1ebff96561f namespace: default resourceVersion: "5940" uid: dc9640c2-f4ed-4525-8a44-5bd7f96f5d3d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:31Z" involvedObject: kind: CertificateSigningRequest name: csr-xw9g7 kind: Event lastTimestamp: "2026-04-16T20:58:31Z" message: CSR "csr-xw9g7" has been approved metadata: creationTimestamp: "2026-04-16T20:58:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T20:58:31Z" name: csr-xw9g7.18a6f1ef05202a0e namespace: default resourceVersion: "6291" uid: 9ca44e9d-135e-45f6-b292-1061550f763d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T21:04:40Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "13911" uid: 6f3aff95-6a1b-45e2-b65e-f342645d48fa kind: Event lastTimestamp: "2026-04-16T21:04:40Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-16T21:04:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T21:04:40Z" name: default-gateway.18a6f244c7b85267 namespace: default resourceVersion: "13918" uid: eb0cd96a-97f8-40b6-994a-eee7a4d2a6c0 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T21:04:42Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14218" uid: 6f3aff95-6a1b-45e2-b65e-f342645d48fa kind: Event lastTimestamp: "2026-04-16T21:04:49Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T21:04:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T21:04:49Z" name: default-gateway.18a6f245430fe831 namespace: default resourceVersion: "14463" uid: 9aa06f95-597b-4782-9c55-664373df8be0 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T21:04:53Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14834" uid: cc693b76-1ab8-4fde-ae35-f7bd4630448f kind: Event lastTimestamp: "2026-04-16T21:05:21Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:cc693b76-1ab8-4fde-ae35-f7bd4630448f platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:cc693b76-1ab8-4fde-ae35-f7bd4630448f]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T21:04:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T21:05:21Z" name: default-kserve.18a6f247c3d7e94f namespace: default resourceVersion: "15901" uid: 9d8e9a01-cfd1-494b-90d9-7ce9d7436260 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-16T21:04:40Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13877" uid: 21aa49b8-1f2a-45cc-b040-7fc168d8b144 kind: Event lastTimestamp: "2026-04-16T21:24:19Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T21:04:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T21:24:19Z" name: default-monitoring.18a6f244c10ff79a namespace: default resourceVersion: "37639" uid: 18376ab7-47fb-48b7-bd0f-8311cadf23f6 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:06Z" involvedObject: kind: Node name: ip-10-0-138-120.ec2.internal uid: ip-10-0-138-120.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:06Z" message: 'Node ip-10-0-138-120.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:58:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:06Z" name: ip-10-0-138-120.ec2.internal.18a6f1e9085b42a0 namespace: default resourceVersion: "5569" uid: be87a898-2706-470b-8fc6-2b40c4dc6aed reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-120.ec2.internal source: component: kubelet host: ip-10-0-138-120.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:06Z" involvedObject: kind: Node name: ip-10-0-138-120.ec2.internal uid: ip-10-0-138-120.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:06Z" message: 'Node ip-10-0-138-120.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:58:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:06Z" name: ip-10-0-138-120.ec2.internal.18a6f1e9085b8690 namespace: default resourceVersion: "5570" uid: 4827dbf0-1e99-4cc8-8d6c-b0c6ff8f2d3c reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-120.ec2.internal source: component: kubelet host: ip-10-0-138-120.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:06Z" involvedObject: kind: Node name: ip-10-0-138-120.ec2.internal uid: ip-10-0-138-120.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:06Z" message: 'Node ip-10-0-138-120.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:58:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:06Z" name: ip-10-0-138-120.ec2.internal.18a6f1e9085bbd6d namespace: default resourceVersion: "5572" uid: 4e44fd1e-16e0-4d0c-88af-10e768ea42e6 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-120.ec2.internal source: component: kubelet host: ip-10-0-138-120.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:06Z" involvedObject: kind: Node name: ip-10-0-138-120.ec2.internal uid: ip-10-0-138-120.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:06Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:58:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:06Z" name: ip-10-0-138-120.ec2.internal.18a6f1e90b5bb5e9 namespace: default resourceVersion: "5479" uid: 9564c6c4-357c-4481-9ba6-42873df61d83 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-120.ec2.internal source: component: kubelet host: ip-10-0-138-120.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-120.ec2.internal resourceVersion: "5480" uid: 81939008-f547-4112-9cce-478d8a5d0310 kind: Event lastTimestamp: "2026-04-16T20:58:06Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:58:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:58:06Z" name: ip-10-0-138-120.ec2.internal.18a6f1e920b5797d namespace: default resourceVersion: "5576" uid: 2a9a07fc-a61a-41a8-9038-af3569ab61c0 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:11Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-120.ec2.internal uid: 81939008-f547-4112-9cce-478d8a5d0310 kind: Event lastTimestamp: "2026-04-16T20:58:11Z" message: 'Node ip-10-0-138-120.ec2.internal event: Registered Node ip-10-0-138-120.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:58:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:58:11Z" name: ip-10-0-138-120.ec2.internal.18a6f1ea29c29e95 namespace: default resourceVersion: "5669" uid: de90f838-2262-48e7-92dc-e8364a377404 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-120.ec2.internal resourceVersion: "6313" uid: 81939008-f547-4112-9cce-478d8a5d0310 kind: Event lastTimestamp: "2026-04-16T20:58:33Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-138-120.ec2.internal, error getting gateway config for node ip-10-0-138-120.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-138-120.ec2.internal", failed to update chassis to local for local node ip-10-0-138-120.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-138-120.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-138-120.ec2.internal]' metadata: creationTimestamp: "2026-04-16T20:58:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-138-120 operation: Update time: "2026-04-16T20:58:33Z" name: ip-10-0-138-120.ec2.internal.18a6f1ef6099b76a namespace: default resourceVersion: "6318" uid: 3a56bbc5-a44c-49b8-b11b-d35da7ab8fd9 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:39Z" involvedObject: kind: Node name: ip-10-0-138-120.ec2.internal uid: ip-10-0-138-120.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:39Z" message: 'Node ip-10-0-138-120.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:39Z" name: ip-10-0-138-120.ec2.internal.18a6f1f0b659af2a namespace: default resourceVersion: "6489" uid: 91d80f7e-2df2-4aac-bd49-6ab4a45366c8 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-120.ec2.internal source: component: kubelet host: ip-10-0-138-120.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T21:00:50Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-120.ec2.internal uid: 81939008-f547-4112-9cce-478d8a5d0310 kind: Event lastTimestamp: "2026-04-16T21:00:50Z" message: 'Node ip-10-0-138-120.ec2.internal event: Registered Node ip-10-0-138-120.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T21:00:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T21:00:50Z" name: ip-10-0-138-120.ec2.internal.18a6f20f581a350e namespace: default resourceVersion: "7961" uid: de034596-61b2-44b0-b338-7f98a42c9a84 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:17Z" involvedObject: kind: Node name: ip-10-0-139-17.ec2.internal uid: ip-10-0-139-17.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:17Z" message: 'Node ip-10-0-139-17.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:58:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:17Z" name: ip-10-0-139-17.ec2.internal.18a6f1eba65499c7 namespace: default resourceVersion: "5883" uid: 8fbeec8a-5b93-4187-90a4-dc6f38e9d539 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-139-17.ec2.internal source: component: kubelet host: ip-10-0-139-17.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:17Z" involvedObject: kind: Node name: ip-10-0-139-17.ec2.internal uid: ip-10-0-139-17.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:17Z" message: 'Node ip-10-0-139-17.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:58:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:17Z" name: ip-10-0-139-17.ec2.internal.18a6f1eba654de97 namespace: default resourceVersion: "5891" uid: dddfd275-fbad-4833-ab77-5eb5f3d260bb reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-139-17.ec2.internal source: component: kubelet host: ip-10-0-139-17.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:58:17Z" involvedObject: kind: Node name: ip-10-0-139-17.ec2.internal uid: ip-10-0-139-17.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:17Z" message: 'Node ip-10-0-139-17.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:58:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:17Z" name: ip-10-0-139-17.ec2.internal.18a6f1eba655026f namespace: default resourceVersion: "5898" uid: a4566301-56e6-4752-b331-3bc3265d96fc reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-139-17.ec2.internal source: component: kubelet host: ip-10-0-139-17.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:17Z" involvedObject: kind: Node name: ip-10-0-139-17.ec2.internal uid: ip-10-0-139-17.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:17Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:58:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:17Z" name: ip-10-0-139-17.ec2.internal.18a6f1ebaaeadaf3 namespace: default resourceVersion: "5803" uid: bbe1a1b5-5732-4237-8d1c-8edca10078a8 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-139-17.ec2.internal source: component: kubelet host: ip-10-0-139-17.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:17Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-139-17.ec2.internal resourceVersion: "5804" uid: 083f9db4-af53-4399-9f5c-0e1f8488a78b kind: Event lastTimestamp: "2026-04-16T20:58:17Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:58:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:58:17Z" name: ip-10-0-139-17.ec2.internal.18a6f1ebbf0de544 namespace: default resourceVersion: "5905" uid: dac7b317-004f-49b4-b2d7-1bf696a9a4ef reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-139-17.ec2.internal uid: 083f9db4-af53-4399-9f5c-0e1f8488a78b kind: Event lastTimestamp: "2026-04-16T20:58:21Z" message: 'Node ip-10-0-139-17.ec2.internal event: Registered Node ip-10-0-139-17.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:58:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:58:21Z" name: ip-10-0-139-17.ec2.internal.18a6f1ec7ddb452c namespace: default resourceVersion: "6008" uid: acd3b2ec-ada4-4892-8e42-d951b43daefa reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-139-17.ec2.internal resourceVersion: "6624" uid: 083f9db4-af53-4399-9f5c-0e1f8488a78b kind: Event lastTimestamp: "2026-04-16T20:58:44Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-139-17.ec2.internal, error getting gateway config for node ip-10-0-139-17.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-139-17.ec2.internal", failed to update chassis to local for local node ip-10-0-139-17.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-139-17.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-139-17.ec2.internal]' metadata: creationTimestamp: "2026-04-16T20:58:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-139-17 operation: Update time: "2026-04-16T20:58:44Z" name: ip-10-0-139-17.ec2.internal.18a6f1f1f76b5f94 namespace: default resourceVersion: "6627" uid: 0fa3e100-bbc8-4cb4-91ee-9d4b23f48a02 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:50Z" involvedObject: kind: Node name: ip-10-0-139-17.ec2.internal uid: ip-10-0-139-17.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:50Z" message: 'Node ip-10-0-139-17.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:58:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:50Z" name: ip-10-0-139-17.ec2.internal.18a6f1f3495a5d14 namespace: default resourceVersion: "6693" uid: 45dbcd79-d119-4cd9-842f-bf0d19eabaf1 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-139-17.ec2.internal source: component: kubelet host: ip-10-0-139-17.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T21:00:50Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-139-17.ec2.internal uid: 083f9db4-af53-4399-9f5c-0e1f8488a78b kind: Event lastTimestamp: "2026-04-16T21:00:50Z" message: 'Node ip-10-0-139-17.ec2.internal event: Registered Node ip-10-0-139-17.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T21:00:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T21:00:50Z" name: ip-10-0-139-17.ec2.internal.18a6f20f581c0e81 namespace: default resourceVersion: "7962" uid: a5a7358b-d7fa-4b3b-b6a2-22d568a38a75 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:57:52Z" involvedObject: kind: Node name: ip-10-0-141-171.ec2.internal uid: ip-10-0-141-171.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:57:52Z" message: 'Node ip-10-0-141-171.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T20:57:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:57:52Z" name: ip-10-0-141-171.ec2.internal.18a6f1e5de7cdbba namespace: default resourceVersion: "5262" uid: ac901cbe-3860-4580-884d-ac6223f37997 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-141-171.ec2.internal source: component: kubelet host: ip-10-0-141-171.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:57:52Z" involvedObject: kind: Node name: ip-10-0-141-171.ec2.internal uid: ip-10-0-141-171.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:57:52Z" message: 'Node ip-10-0-141-171.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T20:57:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:57:52Z" name: ip-10-0-141-171.ec2.internal.18a6f1e5de7d20bb namespace: default resourceVersion: "5268" uid: ba98532b-bca9-450e-a4ac-84c2f0d398c6 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-141-171.ec2.internal source: component: kubelet host: ip-10-0-141-171.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T20:57:52Z" involvedObject: kind: Node name: ip-10-0-141-171.ec2.internal uid: ip-10-0-141-171.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:57:52Z" message: 'Node ip-10-0-141-171.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T20:57:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:57:52Z" name: ip-10-0-141-171.ec2.internal.18a6f1e5de7d46fa namespace: default resourceVersion: "5273" uid: 71b02a2c-80bf-43db-a9ea-cdc2017d2b8e reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-141-171.ec2.internal source: component: kubelet host: ip-10-0-141-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:57:52Z" involvedObject: kind: Node name: ip-10-0-141-171.ec2.internal uid: ip-10-0-141-171.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:57:52Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T20:57:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:57:52Z" name: ip-10-0-141-171.ec2.internal.18a6f1e5e181c94d namespace: default resourceVersion: "5224" uid: 4dd63778-27fe-487e-b310-b1e823594fa2 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-141-171.ec2.internal source: component: kubelet host: ip-10-0-141-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:57:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-171.ec2.internal resourceVersion: "5225" uid: ee67f6b5-e3db-45a4-a18e-57c51ba5d1bd kind: Event lastTimestamp: "2026-04-16T20:57:53Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T20:57:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T20:57:53Z" name: ip-10-0-141-171.ec2.internal.18a6f1e5f859f054 namespace: default resourceVersion: "5309" uid: 342a80a5-8587-407b-96ee-971d34285094 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:57:56Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-171.ec2.internal uid: ee67f6b5-e3db-45a4-a18e-57c51ba5d1bd kind: Event lastTimestamp: "2026-04-16T20:57:56Z" message: 'Node ip-10-0-141-171.ec2.internal event: Registered Node ip-10-0-141-171.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T20:57:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T20:57:56Z" name: ip-10-0-141-171.ec2.internal.18a6f1e6ab7d19fb namespace: default resourceVersion: "5405" uid: 04949f46-a4c1-4775-918d-29139bb318f7 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:58:24Z" involvedObject: kind: Node name: ip-10-0-141-171.ec2.internal uid: ip-10-0-141-171.ec2.internal kind: Event lastTimestamp: "2026-04-16T20:58:24Z" message: 'Node ip-10-0-141-171.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T20:58:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T20:58:24Z" name: ip-10-0-141-171.ec2.internal.18a6f1ed4ec84090 namespace: default resourceVersion: "6065" uid: 3c9063a8-08e5-450a-b6d6-0221eeb9c192 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-141-171.ec2.internal source: component: kubelet host: ip-10-0-141-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T21:00:50Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-171.ec2.internal uid: ee67f6b5-e3db-45a4-a18e-57c51ba5d1bd kind: Event lastTimestamp: "2026-04-16T21:00:50Z" message: 'Node ip-10-0-141-171.ec2.internal event: Registered Node ip-10-0-141-171.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T21:00:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T21:00:50Z" name: ip-10-0-141-171.ec2.internal.18a6f20f581c2c1d namespace: default resourceVersion: "7963" uid: 06d25e32-9ee5-4e7b-ba72-57a10c7b0c02 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 9 eventTime: null firstTimestamp: "2026-04-16T21:05:52Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16247" uid: 88928cc8-ea2d-4963-b6a7-7fb4c82a4232 kind: Event lastTimestamp: "2026-04-16T21:05:55Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T21:05:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T21:05:56Z" name: kuadrant-system.18a6f25599cfd9be namespace: default resourceVersion: "16464" uid: fc684f70-948b-4542-b818-2fb63023dae3 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T21:00:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T21:00:33Z" name: kube-system.18a6f20b36cc691b namespace: default resourceVersion: "7615" uid: 5799127f-cc41-47c5-a09e-a427f80af126 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-679f47587-qcg9h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T21:00:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T21:00:48Z" name: kube-system.18a6f20eb535d6bf namespace: default resourceVersion: "7935" uid: 25329268-8e42-4d64-9598-b25cee8a579c reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-679f47587-qcg9h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T21:00:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T21:00:48Z" name: kube-system.18a6f20eb56d6814 namespace: default resourceVersion: "7936" uid: a57206ec-705e-4e7f-b129-aefbf666b518 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-679f47587-qcg9h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T21:00:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T21:00:48Z" name: kube-system.18a6f20eb5cd753a namespace: default resourceVersion: "7937" uid: 0db01e12-7706-4afc-b27b-2e8b8537f4ed reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-679f47587-qcg9h type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T21:01:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T21:01:48Z" name: kube-system.18a6f21cadc2b8fd namespace: default resourceVersion: "10199" uid: d087aeda-7dcf-417a-9365-c38c9b73314c reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-679f47587-qcg9h type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T20:53:40Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T20:53:40Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T20:53:40Z" name: openshift-kube-apiserver.18a6f1ab1315846e namespace: default resourceVersion: "274" uid: 615dbc23-8d50-42c2-a540-5cb622c173f9 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-565fd85576-znkqf type: Warning kind: EventList metadata: resourceVersion: "41773"