--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:28:11Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-17T14:28:11Z" message: controller is starting metadata: creationTimestamp: "2026-04-17T14:28:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-17T14:28:11Z" name: 0b7c032a-f22b-44d8-9ba9-2f697e567292 namespace: default resourceVersion: "12123" uid: 885d7401-784f-4009-bd32-1b3a99b9ab5c reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:16Z" involvedObject: kind: CertificateSigningRequest name: csr-4lxwn kind: Event lastTimestamp: "2026-04-17T14:21:16Z" message: CSR "csr-4lxwn" has been approved metadata: creationTimestamp: "2026-04-17T14:21:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:21:16Z" name: csr-4lxwn.18a72ad5e63693c2 namespace: default resourceVersion: "5664" uid: cd17b447-0dd0-4359-9c0e-c3909e0d35c1 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:22:10Z" involvedObject: kind: CertificateSigningRequest name: csr-8k98q kind: Event lastTimestamp: "2026-04-17T14:22:10Z" message: CSR "csr-8k98q" has been approved metadata: creationTimestamp: "2026-04-17T14:22:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:22:10Z" name: csr-8k98q.18a72ae2846809cb namespace: default resourceVersion: "6723" uid: 94587602-a920-4601-b399-3a383795a91e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:22:15Z" involvedObject: kind: CertificateSigningRequest name: csr-9zl2b kind: Event lastTimestamp: "2026-04-17T14:22:15Z" message: CSR "csr-9zl2b" has been approved metadata: creationTimestamp: "2026-04-17T14:22:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:22:15Z" name: csr-9zl2b.18a72ae3b127dd51 namespace: default resourceVersion: "6776" uid: daea94cb-54ff-4992-9ab6-6f67e118cf32 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:53Z" involvedObject: kind: CertificateSigningRequest name: csr-bm5s5 kind: Event lastTimestamp: "2026-04-17T14:21:53Z" message: CSR "csr-bm5s5" has been approved metadata: creationTimestamp: "2026-04-17T14:21:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:21:53Z" name: csr-bm5s5.18a72ade85eeadb9 namespace: default resourceVersion: "6456" uid: 5b94a715-2c8b-4be4-9e7f-cd1a2300fe01 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:47Z" involvedObject: kind: CertificateSigningRequest name: csr-dn27t kind: Event lastTimestamp: "2026-04-17T14:21:47Z" message: CSR "csr-dn27t" has been approved metadata: creationTimestamp: "2026-04-17T14:21:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:21:47Z" name: csr-dn27t.18a72add3f0c83cf namespace: default resourceVersion: "6383" uid: ba2c1f6a-094e-4a94-9d69-41fed505e805 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:09Z" involvedObject: kind: CertificateSigningRequest name: csr-rjwlh kind: Event lastTimestamp: "2026-04-17T14:21:09Z" message: CSR "csr-rjwlh" has been approved metadata: creationTimestamp: "2026-04-17T14:21:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:21:09Z" name: csr-rjwlh.18a72ad471fe78b6 namespace: default resourceVersion: "5601" uid: afd86911-e7b5-4a75-ba7e-408c8bd9c691 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-17T14:29:05Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14387" uid: 758a033f-9553-4a4c-83ac-3ead70f9cee9 kind: Event lastTimestamp: "2026-04-17T14:29:12Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-17T14:29:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:29:12Z" name: default-gateway.18a72b432d88e676 namespace: default resourceVersion: "14640" uid: c648d5b1-4839-44af-a85b-3dd3a8a1711d reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-17T14:29:14Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14748" uid: 6671ac98-25bf-4fac-9d0e-7f0b4a5ca234 kind: Event lastTimestamp: "2026-04-17T14:29:40Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:6671ac98-25bf-4fac-9d0e-7f0b4a5ca234 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:6671ac98-25bf-4fac-9d0e-7f0b4a5ca234]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-17T14:29:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:29:40Z" name: default-kserve.18a72b453396c587 namespace: default resourceVersion: "16219" uid: 572d4a6b-52e8-4b78-ae53-0c6b39547870 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 32 eventTime: null firstTimestamp: "2026-04-17T14:29:04Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14243" uid: cf0e48a4-2ca1-46b7-86a4-d928fae2456d kind: Event lastTimestamp: "2026-04-17T14:48:43Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-17T14:29:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:48:43Z" name: default-monitoring.18a72b42f09d1444 namespace: default resourceVersion: "37748" uid: 03d62700-3831-443f-be6f-46070328b380 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:20:45Z" involvedObject: kind: Node name: ip-10-0-132-119.ec2.internal uid: ip-10-0-132-119.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:20:45Z" message: 'Node ip-10-0-132-119.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:20:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:20:45Z" name: ip-10-0-132-119.ec2.internal.18a72aceb30583eb namespace: default resourceVersion: "5194" uid: 1e02b506-809b-46e9-a0c6-1a805f184cff reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-119.ec2.internal source: component: kubelet host: ip-10-0-132-119.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:20:45Z" involvedObject: kind: Node name: ip-10-0-132-119.ec2.internal uid: ip-10-0-132-119.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:20:45Z" message: 'Node ip-10-0-132-119.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:20:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:20:45Z" name: ip-10-0-132-119.ec2.internal.18a72aceb305c48c namespace: default resourceVersion: "5195" uid: c0256be8-c343-43a1-a99a-7a1993f4bf7f reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-119.ec2.internal source: component: kubelet host: ip-10-0-132-119.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:20:45Z" involvedObject: kind: Node name: ip-10-0-132-119.ec2.internal uid: ip-10-0-132-119.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:20:45Z" message: 'Node ip-10-0-132-119.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:20:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:20:45Z" name: ip-10-0-132-119.ec2.internal.18a72aceb305ec68 namespace: default resourceVersion: "5196" uid: 9a207ee8-ddc2-4f8e-8467-71a619e67c41 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-119.ec2.internal source: component: kubelet host: ip-10-0-132-119.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:20:45Z" involvedObject: kind: Node name: ip-10-0-132-119.ec2.internal uid: ip-10-0-132-119.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:20:45Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:20:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:20:45Z" name: ip-10-0-132-119.ec2.internal.18a72aceb56593be namespace: default resourceVersion: "5165" uid: 655f4a64-9c8b-4d6e-b4e8-2011cae132b3 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-119.ec2.internal source: component: kubelet host: ip-10-0-132-119.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:20:45Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-119.ec2.internal resourceVersion: "5166" uid: d8546d01-653a-47ce-9243-a22d3032b92b kind: Event lastTimestamp: "2026-04-17T14:20:45Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:20:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:20:45Z" name: ip-10-0-132-119.ec2.internal.18a72acecb7d2063 namespace: default resourceVersion: "5233" uid: 2fb175e7-f6c1-45bb-ad43-ddb26e8adc7a reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:20:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-119.ec2.internal uid: d8546d01-653a-47ce-9243-a22d3032b92b kind: Event lastTimestamp: "2026-04-17T14:20:49Z" message: 'Node ip-10-0-132-119.ec2.internal event: Registered Node ip-10-0-132-119.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:20:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:20:49Z" name: ip-10-0-132-119.ec2.internal.18a72acfa90f0913 namespace: default resourceVersion: "5358" uid: 3cd6a886-7b05-46e0-9f8e-444b8403cd1c reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:11Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-119.ec2.internal resourceVersion: "5615" uid: d8546d01-653a-47ce-9243-a22d3032b92b kind: Event lastTimestamp: "2026-04-17T14:21:11Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-119.ec2.internal, error getting gateway config for node ip-10-0-132-119.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-132-119.ec2.internal", failed to update chassis to local for local node ip-10-0-132-119.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-132-119.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-119.ec2.internal]' metadata: creationTimestamp: "2026-04-17T14:21:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-132-119 operation: Update time: "2026-04-17T14:21:11Z" name: ip-10-0-132-119.ec2.internal.18a72ad4cd4e8bca namespace: default resourceVersion: "5616" uid: a9b6db43-9bfe-4eb5-90f6-b207cbc7571b reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:17Z" involvedObject: kind: Node name: ip-10-0-132-119.ec2.internal uid: ip-10-0-132-119.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:17Z" message: 'Node ip-10-0-132-119.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:21:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:17Z" name: ip-10-0-132-119.ec2.internal.18a72ad63eab148e namespace: default resourceVersion: "5675" uid: 63f65abb-c555-46c7-b797-dcbb7d182c11 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-119.ec2.internal source: component: kubelet host: ip-10-0-132-119.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:23:48Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-119.ec2.internal uid: d8546d01-653a-47ce-9243-a22d3032b92b kind: Event lastTimestamp: "2026-04-17T14:23:48Z" message: 'Node ip-10-0-132-119.ec2.internal event: Registered Node ip-10-0-132-119.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:23:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:23:48Z" name: ip-10-0-132-119.ec2.internal.18a72af946398699 namespace: default resourceVersion: "7849" uid: 4d46dcb4-40e7-4af9-ae2e-6631adfa6e51 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad74e6abaa6 namespace: default resourceVersion: "5768" uid: 28f6871f-5f19-4e2f-8cb8-bad484bec06a reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: 'Node ip-10-0-138-3.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad7504bd100 namespace: default resourceVersion: "5775" uid: e16aebe4-d17c-4b9d-877a-8633702d45b5 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: 'Node ip-10-0-138-3.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad7504c1783 namespace: default resourceVersion: "5777" uid: 0d259d5e-d3ce-4e26-85a8-d4b553418c88 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: 'Node ip-10-0-138-3.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad7504c3b81 namespace: default resourceVersion: "5782" uid: 527b8750-e021-45c0-9531-77d17eee3035 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad7547522e8 namespace: default resourceVersion: "5772" uid: d454180c-f350-46bc-a946-15ae50c06d09 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:22Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-3.ec2.internal resourceVersion: "5776" uid: 59a6a96d-b99f-4872-a99f-6c300fe28f69 kind: Event lastTimestamp: "2026-04-17T14:21:22Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:21:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:21:22Z" name: ip-10-0-138-3.ec2.internal.18a72ad7669ddf59 namespace: default resourceVersion: "5818" uid: e227b1e0-e4a1-4981-b1bb-9ba5c0af4952 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:24Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-3.ec2.internal uid: 59a6a96d-b99f-4872-a99f-6c300fe28f69 kind: Event lastTimestamp: "2026-04-17T14:21:24Z" message: 'Node ip-10-0-138-3.ec2.internal event: Registered Node ip-10-0-138-3.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:21:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:21:24Z" name: ip-10-0-138-3.ec2.internal.18a72ad7cf737d56 namespace: default resourceVersion: "5918" uid: 47c96199-9eb9-4bc5-8849-50fb6a284a16 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-3.ec2.internal resourceVersion: "6410" uid: 59a6a96d-b99f-4872-a99f-6c300fe28f69 kind: Event lastTimestamp: "2026-04-17T14:21:49Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-138-3.ec2.internal, error getting gateway config for node ip-10-0-138-3.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-138-3.ec2.internal", failed to update chassis to local for local node ip-10-0-138-3.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-138-3.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-138-3.ec2.internal]' metadata: creationTimestamp: "2026-04-17T14:21:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-138-3 operation: Update time: "2026-04-17T14:21:49Z" name: ip-10-0-138-3.ec2.internal.18a72addba097613 namespace: default resourceVersion: "6416" uid: b6daf9f0-a9a2-4d4e-85c3-aa401c15ac75 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:54Z" involvedObject: kind: Node name: ip-10-0-138-3.ec2.internal uid: ip-10-0-138-3.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:54Z" message: 'Node ip-10-0-138-3.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:21:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:54Z" name: ip-10-0-138-3.ec2.internal.18a72adecf26bc8d namespace: default resourceVersion: "6470" uid: 781026a2-137f-4cf8-9d13-0f24c735f59d reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-3.ec2.internal source: component: kubelet host: ip-10-0-138-3.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:23:48Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-3.ec2.internal uid: 59a6a96d-b99f-4872-a99f-6c300fe28f69 kind: Event lastTimestamp: "2026-04-17T14:23:48Z" message: 'Node ip-10-0-138-3.ec2.internal event: Registered Node ip-10-0-138-3.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:23:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:23:48Z" name: ip-10-0-138-3.ec2.internal.18a72af94638a4f8 namespace: default resourceVersion: "7846" uid: 2c503237-f959-442a-b93b-1ed51d46c379 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcb5c324c4 namespace: default resourceVersion: "6205" uid: 43a0df65-f787-48be-b24a-1fecae086e8e reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: 'Node ip-10-0-143-215.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcb78b0920 namespace: default resourceVersion: "6217" uid: aaaf8648-2640-49e9-a503-07be700fdba8 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: 'Node ip-10-0-143-215.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcb78b4fc8 namespace: default resourceVersion: "6219" uid: 592b655f-e883-460d-a054-bd6bd480155d reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: 'Node ip-10-0-143-215.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcb78b9cd3 namespace: default resourceVersion: "6232" uid: 6a1caac9-2c37-45bb-9d04-476dc80a2e00 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcbb35527d namespace: default resourceVersion: "6212" uid: 6bf3a659-c181-4c6f-87fb-24f3d59a9b60 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:45Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-215.ec2.internal resourceVersion: "6216" uid: fe6db6bd-7632-4871-88b6-92beb86aa336 kind: Event lastTimestamp: "2026-04-17T14:21:45Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:21:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:21:45Z" name: ip-10-0-143-215.ec2.internal.18a72adcd1e6febc namespace: default resourceVersion: "6311" uid: 427b4dae-cf12-4480-a412-8b6ca472cf7f reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:21:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-215.ec2.internal uid: fe6db6bd-7632-4871-88b6-92beb86aa336 kind: Event lastTimestamp: "2026-04-17T14:21:49Z" message: 'Node ip-10-0-143-215.ec2.internal event: Registered Node ip-10-0-143-215.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:21:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:21:49Z" name: ip-10-0-143-215.ec2.internal.18a72adda1c0ce98 namespace: default resourceVersion: "6403" uid: 9c3dea30-2322-4082-a249-2eddbc84ce87 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:22:11Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-215.ec2.internal resourceVersion: "6738" uid: fe6db6bd-7632-4871-88b6-92beb86aa336 kind: Event lastTimestamp: "2026-04-17T14:22:11Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-215.ec2.internal, error getting gateway config for node ip-10-0-143-215.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-143-215.ec2.internal", failed to update chassis to local for local node ip-10-0-143-215.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-143-215.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-215.ec2.internal]' metadata: creationTimestamp: "2026-04-17T14:22:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-143-215 operation: Update time: "2026-04-17T14:22:11Z" name: ip-10-0-143-215.ec2.internal.18a72ae2e00fb859 namespace: default resourceVersion: "6745" uid: 7578f6a2-abf8-45a9-82d4-cfc8530d1b66 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:22:16Z" involvedObject: kind: Node name: ip-10-0-143-215.ec2.internal uid: ip-10-0-143-215.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:22:16Z" message: 'Node ip-10-0-143-215.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:22:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:22:16Z" name: ip-10-0-143-215.ec2.internal.18a72ae3ff8fbe33 namespace: default resourceVersion: "6788" uid: e71bafee-30a4-4943-bc54-45c04352d915 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-143-215.ec2.internal source: component: kubelet host: ip-10-0-143-215.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:23:48Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-215.ec2.internal uid: fe6db6bd-7632-4871-88b6-92beb86aa336 kind: Event lastTimestamp: "2026-04-17T14:23:48Z" message: 'Node ip-10-0-143-215.ec2.internal event: Registered Node ip-10-0-143-215.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:23:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:23:48Z" name: ip-10-0-143-215.ec2.internal.18a72af946397127 namespace: default resourceVersion: "7847" uid: 220df59b-5530-4ab5-b246-e627de51ece1 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-17T14:30:16Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16492" uid: ed177c02-6394-4230-bcf2-7004697a638c kind: Event lastTimestamp: "2026-04-17T14:30:20Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-17T14:30:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-17T14:30:20Z" name: kuadrant-system.18a72b53c5ad160a namespace: default resourceVersion: "16717" uid: 7b76fb4e-927b-4f6b-9af3-0789dab888d6 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-17T14:23:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:23:25Z" name: kube-system.18a72af3ee22498f namespace: default resourceVersion: "7513" uid: 1a61d145-681e-4595-82c4-04b391a4fd08 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7bc6b5978c-6kb95 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-17T14:23:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:23:40Z" name: kube-system.18a72af76caf557f namespace: default resourceVersion: "7804" uid: f45972ae-34da-4056-b111-631f73b86d15 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7bc6b5978c-6kb95 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-17T14:23:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:23:40Z" name: kube-system.18a72af76ce17cc0 namespace: default resourceVersion: "7805" uid: 45956854-92b9-4132-b5bd-63fd4f6b8a55 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7bc6b5978c-6kb95 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-17T14:23:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:23:40Z" name: kube-system.18a72af76d1623f1 namespace: default resourceVersion: "7806" uid: 5bb48501-c216-4958-9414-55ba9cc12aef reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7bc6b5978c-6kb95 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-17T14:24:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:24:40Z" name: kube-system.18a72b0565370cf7 namespace: default resourceVersion: "9969" uid: e11a1ee7-9b69-48f4-be5f-3e7d3075154c reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-7bc6b5978c-6kb95 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:16:49Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-17T14:16:49Z" message: readyz=true metadata: creationTimestamp: "2026-04-17T14:16:49Z" name: openshift-kube-apiserver.18a72a97db7ee8db namespace: default resourceVersion: "274" uid: 53b3a996-2006-4daf-9985-258d698e6103 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-dcdf47d86-shr44 type: Warning kind: EventList metadata: resourceVersion: "45932"