--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:28:58Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T04:28:58Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T04:28:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T04:28:58Z" name: b8abc673-0127-4b7b-8a21-0b46eaa933d2 namespace: default resourceVersion: "11493" uid: 2e563438-1258-4fde-95ed-0b681d956b11 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:30Z" involvedObject: kind: CertificateSigningRequest name: csr-4gdlz kind: Event lastTimestamp: "2026-04-16T04:24:30Z" message: CSR "csr-4gdlz" has been approved metadata: creationTimestamp: "2026-04-16T04:24:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:30Z" name: csr-4gdlz.18a6bbb0915b0021 namespace: default resourceVersion: "6280" uid: 8129052a-89b7-43ce-9df7-ae90678f5726 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:44Z" involvedObject: kind: CertificateSigningRequest name: csr-4nz42 kind: Event lastTimestamp: "2026-04-16T04:24:44Z" message: CSR "csr-4nz42" has been approved metadata: creationTimestamp: "2026-04-16T04:24:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:44Z" name: csr-4nz42.18a6bbb3dc7a982c namespace: default resourceVersion: "6670" uid: a2bcd196-9bc7-464d-8860-d818ab9699de reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:35Z" involvedObject: kind: CertificateSigningRequest name: csr-66rsz kind: Event lastTimestamp: "2026-04-16T04:24:35Z" message: CSR "csr-66rsz" has been approved metadata: creationTimestamp: "2026-04-16T04:24:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:35Z" name: csr-66rsz.18a6bbb1ee9347b7 namespace: default resourceVersion: "6481" uid: 178f5f54-913d-4408-87ad-208844276f20 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:35Z" involvedObject: kind: CertificateSigningRequest name: csr-fzrbp kind: Event lastTimestamp: "2026-04-16T04:24:35Z" message: CSR "csr-fzrbp" has been approved metadata: creationTimestamp: "2026-04-16T04:24:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:35Z" name: csr-fzrbp.18a6bbb1e77c7c21 namespace: default resourceVersion: "6477" uid: 8afd6c7a-e76b-4259-9132-e1817d8f0041 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:37Z" involvedObject: kind: CertificateSigningRequest name: csr-h97fj kind: Event lastTimestamp: "2026-04-16T04:24:37Z" message: CSR "csr-h97fj" has been approved metadata: creationTimestamp: "2026-04-16T04:24:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:37Z" name: csr-h97fj.18a6bbb26472a9af namespace: default resourceVersion: "6562" uid: ce2f6344-2fc1-40c0-b80a-83fe898e9536 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:29Z" involvedObject: kind: CertificateSigningRequest name: csr-mlkvs kind: Event lastTimestamp: "2026-04-16T04:24:29Z" message: CSR "csr-mlkvs" has been approved metadata: creationTimestamp: "2026-04-16T04:24:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T04:24:29Z" name: csr-mlkvs.18a6bbb06bafbe43 namespace: default resourceVersion: "6258" uid: 407c7dcc-cbfa-4be2-a1ee-73093104d3e5 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T04:29:57Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14266" uid: 9fd95f32-250d-4383-980b-2ca7db04506e kind: Event lastTimestamp: "2026-04-16T04:30:05Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T04:29:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T04:30:05Z" name: default-gateway.18a6bbfcbd16f907 namespace: default resourceVersion: "14474" uid: cb6d495e-61aa-4ce3-bf20-c5294f16a5d5 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T04:30:06Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14577" uid: 0868963e-85d4-4c7b-9811-13b2b48301d7 kind: Event lastTimestamp: "2026-04-16T04:30:33Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:0868963e-85d4-4c7b-9811-13b2b48301d7 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:0868963e-85d4-4c7b-9811-13b2b48301d7]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T04:30:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T04:30:33Z" name: default-kserve.18a6bbfef52fb5e8 namespace: default resourceVersion: "16022" uid: d0dfcc9e-41ff-4644-ace1-a5e17bba30ab reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 29 eventTime: null firstTimestamp: "2026-04-16T04:29:55Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14049" uid: 89990be8-d974-4bb4-b7cc-a397427db6b4 kind: Event lastTimestamp: "2026-04-16T04:46:50Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T04:29:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T04:46:50Z" name: default-monitoring.18a6bbfc72e80f88 namespace: default resourceVersion: "35572" uid: a3ed8c39-aace-4b90-b4a3-9de3a95395e1 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:12Z" involvedObject: kind: Node name: ip-10-0-133-103.ec2.internal uid: ip-10-0-133-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:12Z" message: 'Node ip-10-0-133-103.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T04:24:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:12Z" name: ip-10-0-133-103.ec2.internal.18a6bbac7e5d9dc4 namespace: default resourceVersion: "5862" uid: f1dddbd3-c6e5-4cc1-8280-fa70d96a89fe reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-103.ec2.internal source: component: kubelet host: ip-10-0-133-103.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:12Z" involvedObject: kind: Node name: ip-10-0-133-103.ec2.internal uid: ip-10-0-133-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:12Z" message: 'Node ip-10-0-133-103.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T04:24:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:12Z" name: ip-10-0-133-103.ec2.internal.18a6bbac7e5e04cb namespace: default resourceVersion: "5863" uid: 1cf7f2f6-18ec-4747-81b7-a587a44f969f reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-103.ec2.internal source: component: kubelet host: ip-10-0-133-103.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:12Z" involvedObject: kind: Node name: ip-10-0-133-103.ec2.internal uid: ip-10-0-133-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:12Z" message: 'Node ip-10-0-133-103.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T04:24:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:12Z" name: ip-10-0-133-103.ec2.internal.18a6bbac7e5e2982 namespace: default resourceVersion: "5865" uid: 2c855bf0-6c73-4e6e-9f97-9bc8489ffc53 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-103.ec2.internal source: component: kubelet host: ip-10-0-133-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:12Z" involvedObject: kind: Node name: ip-10-0-133-103.ec2.internal uid: ip-10-0-133-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:12Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T04:24:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:12Z" name: ip-10-0-133-103.ec2.internal.18a6bbac81bbc95c namespace: default resourceVersion: "5767" uid: 729a40d5-3097-4dae-a3e9-30d22ef8014f reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-103.ec2.internal source: component: kubelet host: ip-10-0-133-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-103.ec2.internal resourceVersion: "5770" uid: 2fb8238c-5380-4af6-ad90-7023e8271a8d kind: Event lastTimestamp: "2026-04-16T04:24:13Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T04:24:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T04:24:13Z" name: ip-10-0-133-103.ec2.internal.18a6bbac984ee822 namespace: default resourceVersion: "5871" uid: 53380822-a0c1-4d7e-9955-4ccedd1aea6e reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-103.ec2.internal uid: 2fb8238c-5380-4af6-ad90-7023e8271a8d kind: Event lastTimestamp: "2026-04-16T04:24:15Z" message: 'Node ip-10-0-133-103.ec2.internal event: Registered Node ip-10-0-133-103.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:24:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:24:15Z" name: ip-10-0-133-103.ec2.internal.18a6bbad46771570 namespace: default resourceVersion: "5948" uid: 37d762e4-15a4-4a07-9589-d2f043320458 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:39Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-103.ec2.internal resourceVersion: "6595" uid: 2fb8238c-5380-4af6-ad90-7023e8271a8d kind: Event lastTimestamp: "2026-04-16T04:24:39Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-103.ec2.internal, error getting gateway config for node ip-10-0-133-103.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-133-103.ec2.internal", failed to update chassis to local for local node ip-10-0-133-103.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-133-103.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-103.ec2.internal]' metadata: creationTimestamp: "2026-04-16T04:24:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-133-103 operation: Update time: "2026-04-16T04:24:39Z" name: ip-10-0-133-103.ec2.internal.18a6bbb2c1685872 namespace: default resourceVersion: "6600" uid: 771fffe2-5ee4-471f-a56a-98d6dbb313ed reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:45Z" involvedObject: kind: Node name: ip-10-0-133-103.ec2.internal uid: ip-10-0-133-103.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:45Z" message: 'Node ip-10-0-133-103.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T04:24:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:45Z" name: ip-10-0-133-103.ec2.internal.18a6bbb422f1324d namespace: default resourceVersion: "6686" uid: d67b7c68-d659-4c2d-9038-4f7efb9d941b reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-103.ec2.internal source: component: kubelet host: ip-10-0-133-103.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:26:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-103.ec2.internal uid: 2fb8238c-5380-4af6-ad90-7023e8271a8d kind: Event lastTimestamp: "2026-04-16T04:26:01Z" message: 'Node ip-10-0-133-103.ec2.internal event: Registered Node ip-10-0-133-103.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:26:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:26:01Z" name: ip-10-0-133-103.ec2.internal.18a6bbc5c63db044 namespace: default resourceVersion: "7821" uid: 55c691e2-9af5-4f7e-89f2-18a8ff5c8eb3 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-133-81.ec2.internal uid: ip-10-0-133-81.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-133-81.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-133-81.ec2.internal.18a6bbaa9d31b1fc namespace: default resourceVersion: "5451" uid: 3da68f7d-0c86-4490-a275-116bf9d485bc reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-81.ec2.internal source: component: kubelet host: ip-10-0-133-81.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-133-81.ec2.internal uid: ip-10-0-133-81.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-133-81.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-133-81.ec2.internal.18a6bbaa9d3207fa namespace: default resourceVersion: "5452" uid: efb09bf1-d737-484b-b739-00c9e2e6bf9b reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-81.ec2.internal source: component: kubelet host: ip-10-0-133-81.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-133-81.ec2.internal uid: ip-10-0-133-81.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-133-81.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-133-81.ec2.internal.18a6bbaa9d323259 namespace: default resourceVersion: "5453" uid: f132511c-d402-4fde-9d65-c4fc15b0cfd8 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-81.ec2.internal source: component: kubelet host: ip-10-0-133-81.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-133-81.ec2.internal uid: ip-10-0-133-81.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-133-81.ec2.internal.18a6bbaaa077b731 namespace: default resourceVersion: "5395" uid: ad721850-9588-477d-af0e-76214b0f16f1 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-81.ec2.internal source: component: kubelet host: ip-10-0-133-81.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-81.ec2.internal resourceVersion: "5400" uid: 29e27961-be7f-4bb1-a5da-4de596c531e1 kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-133-81.ec2.internal.18a6bbaab6b2fbec namespace: default resourceVersion: "5500" uid: 8ad60442-b8c2-498b-a47a-33414bbfd208 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:05Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-81.ec2.internal uid: 29e27961-be7f-4bb1-a5da-4de596c531e1 kind: Event lastTimestamp: "2026-04-16T04:24:05Z" message: 'Node ip-10-0-133-81.ec2.internal event: Registered Node ip-10-0-133-81.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:24:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:24:05Z" name: ip-10-0-133-81.ec2.internal.18a6bbaaf2527665 namespace: default resourceVersion: "5639" uid: 973e7e1f-6144-4d89-ad04-999bf055563a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:37Z" involvedObject: kind: Node name: ip-10-0-133-81.ec2.internal uid: ip-10-0-133-81.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:37Z" message: 'Node ip-10-0-133-81.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T04:24:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:37Z" name: ip-10-0-133-81.ec2.internal.18a6bbb240a3b54b namespace: default resourceVersion: "6500" uid: 13eb88df-adc7-40e7-89f8-8321f185f250 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-81.ec2.internal source: component: kubelet host: ip-10-0-133-81.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:26:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-81.ec2.internal uid: 29e27961-be7f-4bb1-a5da-4de596c531e1 kind: Event lastTimestamp: "2026-04-16T04:26:01Z" message: 'Node ip-10-0-133-81.ec2.internal event: Registered Node ip-10-0-133-81.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:26:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:26:01Z" name: ip-10-0-133-81.ec2.internal.18a6bbc5c63c2b00 namespace: default resourceVersion: "7810" uid: 0ec59145-83b8-47de-8653-fd7d5e5f8f4f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaa642ae0b namespace: default resourceVersion: "5403" uid: ef32cd18-8f78-4f15-8d36-bd8224701db5 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-140-211.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaa7d39645 namespace: default resourceVersion: "5454" uid: 81cfb9cb-3bae-4b2c-9842-1c27491ee670 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-140-211.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaa7d3d7e8 namespace: default resourceVersion: "5458" uid: c88e5161-c444-4392-b250-f3fce31fd2fc reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: 'Node ip-10-0-140-211.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaa7d3fc64 namespace: default resourceVersion: "5462" uid: 1ef2e584-891c-43ff-a3e3-e52dbeec40be reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:04Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:04Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T04:24:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:04Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaaac7b74b namespace: default resourceVersion: "5427" uid: 5ef6e6a9-fec7-4db1-b70a-82274e76eec8 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:05Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-211.ec2.internal resourceVersion: "5474" uid: 15f6f7dd-de4f-4104-a46c-25d0dd598df5 kind: Event lastTimestamp: "2026-04-16T04:24:05Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T04:24:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T04:24:05Z" name: ip-10-0-140-211.ec2.internal.18a6bbaac4a4293a namespace: default resourceVersion: "5588" uid: 52f31c9e-8323-4fe1-affc-bae69da90d38 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:05Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-211.ec2.internal uid: 15f6f7dd-de4f-4104-a46c-25d0dd598df5 kind: Event lastTimestamp: "2026-04-16T04:24:05Z" message: 'Node ip-10-0-140-211.ec2.internal event: Registered Node ip-10-0-140-211.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:24:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:24:05Z" name: ip-10-0-140-211.ec2.internal.18a6bbaaf254099c namespace: default resourceVersion: "5640" uid: b6be2469-2718-4384-94db-54197b491b7f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:31Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-211.ec2.internal resourceVersion: "6320" uid: 15f6f7dd-de4f-4104-a46c-25d0dd598df5 kind: Event lastTimestamp: "2026-04-16T04:24:31Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-140-211.ec2.internal, error getting gateway config for node ip-10-0-140-211.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-140-211.ec2.internal", failed to update chassis to local for local node ip-10-0-140-211.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-140-211.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-140-211.ec2.internal]' metadata: creationTimestamp: "2026-04-16T04:24:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-140-211 operation: Update time: "2026-04-16T04:24:31Z" name: ip-10-0-140-211.ec2.internal.18a6bbb0ee962f26 namespace: default resourceVersion: "6322" uid: 91842292-f2c6-4c18-ac3d-e79e8ecef08f reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:24:37Z" involvedObject: kind: Node name: ip-10-0-140-211.ec2.internal uid: ip-10-0-140-211.ec2.internal kind: Event lastTimestamp: "2026-04-16T04:24:37Z" message: 'Node ip-10-0-140-211.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T04:24:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T04:24:37Z" name: ip-10-0-140-211.ec2.internal.18a6bbb24cb48531 namespace: default resourceVersion: "6525" uid: e8a1df2f-5af1-4ec6-8a40-a5aaffe75d0d reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-140-211.ec2.internal source: component: kubelet host: ip-10-0-140-211.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:26:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-211.ec2.internal uid: 15f6f7dd-de4f-4104-a46c-25d0dd598df5 kind: Event lastTimestamp: "2026-04-16T04:26:01Z" message: 'Node ip-10-0-140-211.ec2.internal event: Registered Node ip-10-0-140-211.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T04:26:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T04:26:01Z" name: ip-10-0-140-211.ec2.internal.18a6bbc5c63d92da namespace: default resourceVersion: "7815" uid: 3ffdcb46-7ddc-4304-a1f7-194e45434d8b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T04:31:08Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16311" uid: 9ce0949b-30fe-4eaf-b7a6-49c71bd0a801 kind: Event lastTimestamp: "2026-04-16T04:31:12Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T04:31:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T04:31:12Z" name: kuadrant-system.18a6bc0d53313432 namespace: default resourceVersion: "16542" uid: 2e1a1c10-d613-48fb-b14d-105b5521e089 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T04:25:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T04:25:40Z" name: kube-system.18a6bbc10c480259 namespace: default resourceVersion: "7466" uid: 934bc113-77ee-45d4-adb3-bd584650a5ec reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846c4c797c-xzksb type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T04:25:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T04:25:55Z" name: kube-system.18a6bbc48ad0c21e namespace: default resourceVersion: "7757" uid: 061695da-ffd3-451f-8c73-0d366302cfee reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846c4c797c-xzksb type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T04:25:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T04:25:55Z" name: kube-system.18a6bbc48b00c4d6 namespace: default resourceVersion: "7758" uid: 3661c232-c282-43c6-96cc-4d1fa424a18b reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846c4c797c-xzksb type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T04:25:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T04:25:55Z" name: kube-system.18a6bbc48b4ea9f1 namespace: default resourceVersion: "7759" uid: 986a24aa-00d5-4519-9e86-edb3eb2a4e5a reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846c4c797c-xzksb type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T04:26:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T04:26:55Z" name: kube-system.18a6bbd28353b791 namespace: default resourceVersion: "9994" uid: 6d22c99f-21bc-43f1-af8c-913c5cf0f254 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-846c4c797c-xzksb type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T04:18:57Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T04:18:57Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T04:18:57Z" name: openshift-kube-apiserver.18a6bb63312a5627 namespace: default resourceVersion: "274" uid: a62231e7-117d-4553-a310-570f2cd0d8bc reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-75f7564b9b-pkh9h type: Warning kind: EventList metadata: resourceVersion: "43534"