--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:41:22Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-17T14:41:22Z" message: controller is starting metadata: creationTimestamp: "2026-04-17T14:41:22Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-17T14:41:22Z" name: 4087a4b4-774c-400e-8fc8-34c681e15d85 namespace: default resourceVersion: "12126" uid: b1e33639-1406-4a2b-963b-800989f36b4e reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:27Z" involvedObject: kind: CertificateSigningRequest name: csr-58m8v kind: Event lastTimestamp: "2026-04-17T14:34:27Z" message: CSR "csr-58m8v" has been approved metadata: creationTimestamp: "2026-04-17T14:34:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:27Z" name: csr-58m8v.18a72b8e17b83e7a namespace: default resourceVersion: "6275" uid: 18c9583a-cdc4-463d-a20b-4a5e9989fc4e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:38Z" involvedObject: kind: CertificateSigningRequest name: csr-lf6t2 kind: Event lastTimestamp: "2026-04-17T14:34:38Z" message: CSR "csr-lf6t2" has been approved metadata: creationTimestamp: "2026-04-17T14:34:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:38Z" name: csr-lf6t2.18a72b90c0a07c65 namespace: default resourceVersion: "6633" uid: c47784b0-45ce-4eb8-a4ab-1e97204c9944 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:19Z" involvedObject: kind: CertificateSigningRequest name: csr-rr2rl kind: Event lastTimestamp: "2026-04-17T14:34:19Z" message: CSR "csr-rr2rl" has been approved metadata: creationTimestamp: "2026-04-17T14:34:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:19Z" name: csr-rr2rl.18a72b8c30716e53 namespace: default resourceVersion: "6120" uid: 3aa60821-4406-4841-b51c-b405e1767cca reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:20Z" involvedObject: kind: CertificateSigningRequest name: csr-v7h8s kind: Event lastTimestamp: "2026-04-17T14:34:20Z" message: CSR "csr-v7h8s" has been approved metadata: creationTimestamp: "2026-04-17T14:34:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:20Z" name: csr-v7h8s.18a72b8c69aeb862 namespace: default resourceVersion: "6143" uid: d2fd335b-a5b9-494f-852b-72991a38347c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:26Z" involvedObject: kind: CertificateSigningRequest name: csr-vmbqb kind: Event lastTimestamp: "2026-04-17T14:34:26Z" message: CSR "csr-vmbqb" has been approved metadata: creationTimestamp: "2026-04-17T14:34:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:26Z" name: csr-vmbqb.18a72b8dd0474e69 namespace: default resourceVersion: "6237" uid: 33351e99-f643-4a54-a4db-a6a41e8787f9 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:32Z" involvedObject: kind: CertificateSigningRequest name: csr-w58ps kind: Event lastTimestamp: "2026-04-17T14:34:32Z" message: CSR "csr-w58ps" has been approved metadata: creationTimestamp: "2026-04-17T14:34:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T14:34:32Z" name: csr-w58ps.18a72b8f37db0b18 namespace: default resourceVersion: "6525" uid: 607c5e13-f6f4-43f1-92c4-ed5fb60ea5c6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-17T14:42:13Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14740" uid: 1b040f5f-d4f1-4448-984e-238391d36ead kind: Event lastTimestamp: "2026-04-17T14:42:21Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-17T14:42:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:42:21Z" name: default-gateway.18a72bfabec243f7 namespace: default resourceVersion: "15037" uid: c3ea0ead-53e7-40da-b2b5-c6a0344bec31 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-17T14:42:24Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15190" uid: 81d3f3a1-239b-4c53-92f7-377bdadc2d0c kind: Event lastTimestamp: "2026-04-17T14:42:51Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:81d3f3a1-239b-4c53-92f7-377bdadc2d0c platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:81d3f3a1-239b-4c53-92f7-377bdadc2d0c]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-17T14:42:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:42:51Z" name: default-kserve.18a72bfd41fbd7ed namespace: default resourceVersion: "16645" uid: 605203c9-bef3-4118-8a1d-0cfa94f8d037 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 25 eventTime: null firstTimestamp: "2026-04-17T14:42:12Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14515" uid: 5e9a3d3e-7cf1-4a17-8f12-264ed8bb54dd kind: Event lastTimestamp: "2026-04-17T14:42:59Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-17T14:42:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T14:42:59Z" name: default-monitoring.18a72bfa755fda38 namespace: default resourceVersion: "16774" uid: 7d7aea2a-aeb9-4029-a5e8-a993032e9a25 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:54Z" involvedObject: kind: Node name: ip-10-0-129-134.ec2.internal uid: ip-10-0-129-134.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:54Z" message: 'Node ip-10-0-129-134.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:33:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:54Z" name: ip-10-0-129-134.ec2.internal.18a72b865e1f6d7c namespace: default resourceVersion: "5463" uid: a6bb4f20-5a3a-48f9-8dc1-e4dd9ed175dd reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-134.ec2.internal source: component: kubelet host: ip-10-0-129-134.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:54Z" involvedObject: kind: Node name: ip-10-0-129-134.ec2.internal uid: ip-10-0-129-134.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:54Z" message: 'Node ip-10-0-129-134.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:33:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:54Z" name: ip-10-0-129-134.ec2.internal.18a72b865e1fc4f2 namespace: default resourceVersion: "5464" uid: 9d7686d4-3d06-436c-9ee4-94bfdbaa4414 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-134.ec2.internal source: component: kubelet host: ip-10-0-129-134.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:54Z" involvedObject: kind: Node name: ip-10-0-129-134.ec2.internal uid: ip-10-0-129-134.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:54Z" message: 'Node ip-10-0-129-134.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:33:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:54Z" name: ip-10-0-129-134.ec2.internal.18a72b865e1ff070 namespace: default resourceVersion: "5466" uid: 03fc0cb9-4bac-4115-96a2-37eb98a37b3c reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-134.ec2.internal source: component: kubelet host: ip-10-0-129-134.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:54Z" involvedObject: kind: Node name: ip-10-0-129-134.ec2.internal uid: ip-10-0-129-134.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:54Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:33:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:54Z" name: ip-10-0-129-134.ec2.internal.18a72b86612457ba namespace: default resourceVersion: "5374" uid: f9eb63c3-6c5a-4da7-acd5-5d02142b74cc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-134.ec2.internal source: component: kubelet host: ip-10-0-129-134.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-134.ec2.internal resourceVersion: "5375" uid: 92c07820-51da-4cf6-8e73-5b84c3a70d6a kind: Event lastTimestamp: "2026-04-17T14:33:54Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:33:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:33:54Z" name: ip-10-0-129-134.ec2.internal.18a72b86764e175a namespace: default resourceVersion: "5473" uid: 42ce122b-0ff4-457f-ba64-58fd0c821181 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-134.ec2.internal uid: 92c07820-51da-4cf6-8e73-5b84c3a70d6a kind: Event lastTimestamp: "2026-04-17T14:33:58Z" message: 'Node ip-10-0-129-134.ec2.internal event: Registered Node ip-10-0-129-134.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:33:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:33:58Z" name: ip-10-0-129-134.ec2.internal.18a72b87655bab6b namespace: default resourceVersion: "5586" uid: 5bbdde26-5149-4b14-9d84-747db16249e9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:27Z" involvedObject: kind: Node name: ip-10-0-129-134.ec2.internal uid: ip-10-0-129-134.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:27Z" message: 'Node ip-10-0-129-134.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:34:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:27Z" name: ip-10-0-129-134.ec2.internal.18a72b8e1f6c5dae namespace: default resourceVersion: "6283" uid: 40b70beb-5794-43bc-894d-a52a396c7f8d reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-134.ec2.internal source: component: kubelet host: ip-10-0-129-134.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:36:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-134.ec2.internal uid: 92c07820-51da-4cf6-8e73-5b84c3a70d6a kind: Event lastTimestamp: "2026-04-17T14:36:41Z" message: 'Node ip-10-0-129-134.ec2.internal event: Registered Node ip-10-0-129-134.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:36:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:36:41Z" name: ip-10-0-129-134.ec2.internal.18a72bad69e3cb77 namespace: default resourceVersion: "7917" uid: 62c09eb3-75aa-4b4f-a70d-dc0afd9ddf81 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:51Z" involvedObject: kind: Node name: ip-10-0-135-180.ec2.internal uid: ip-10-0-135-180.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:51Z" message: 'Node ip-10-0-135-180.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:33:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:51Z" name: ip-10-0-135-180.ec2.internal.18a72b85d07dfbcb namespace: default resourceVersion: "5249" uid: e80a241c-9bbe-4cd9-aacd-84d8e57c44d4 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-180.ec2.internal source: component: kubelet host: ip-10-0-135-180.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:51Z" involvedObject: kind: Node name: ip-10-0-135-180.ec2.internal uid: ip-10-0-135-180.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:51Z" message: 'Node ip-10-0-135-180.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:33:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:52Z" name: ip-10-0-135-180.ec2.internal.18a72b85d07e5b91 namespace: default resourceVersion: "5256" uid: 69c88be6-8615-445f-a085-f96f0222f281 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-180.ec2.internal source: component: kubelet host: ip-10-0-135-180.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T14:33:51Z" involvedObject: kind: Node name: ip-10-0-135-180.ec2.internal uid: ip-10-0-135-180.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:51Z" message: 'Node ip-10-0-135-180.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:33:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:52Z" name: ip-10-0-135-180.ec2.internal.18a72b85d07e7ff2 namespace: default resourceVersion: "5264" uid: 2226cdb7-667b-4b7d-89e6-000ace8588db reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-180.ec2.internal source: component: kubelet host: ip-10-0-135-180.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:51Z" involvedObject: kind: Node name: ip-10-0-135-180.ec2.internal uid: ip-10-0-135-180.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:33:51Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:33:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:33:51Z" name: ip-10-0-135-180.ec2.internal.18a72b85d2be44bb namespace: default resourceVersion: "5202" uid: 99e13282-aae6-4258-a7d9-4a84aa90a11a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-135-180.ec2.internal source: component: kubelet host: ip-10-0-135-180.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:52Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-180.ec2.internal resourceVersion: "5203" uid: 2b2259b1-0863-48b3-b423-a79ee9659f27 kind: Event lastTimestamp: "2026-04-17T14:33:52Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:33:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:33:52Z" name: ip-10-0-135-180.ec2.internal.18a72b85ef3a3859 namespace: default resourceVersion: "5303" uid: b5c8b0fb-e7a5-4c71-8e68-76fb0e37e939 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:33:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-180.ec2.internal uid: 2b2259b1-0863-48b3-b423-a79ee9659f27 kind: Event lastTimestamp: "2026-04-17T14:33:53Z" message: 'Node ip-10-0-135-180.ec2.internal event: Registered Node ip-10-0-135-180.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:33:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:33:53Z" name: ip-10-0-135-180.ec2.internal.18a72b863b4717c2 namespace: default resourceVersion: "5344" uid: 67c60e22-96a1-4d9d-814e-178453686426 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-180.ec2.internal resourceVersion: "6173" uid: 2b2259b1-0863-48b3-b423-a79ee9659f27 kind: Event lastTimestamp: "2026-04-17T14:34:21Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-135-180.ec2.internal, error getting gateway config for node ip-10-0-135-180.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-135-180.ec2.internal", failed to update chassis to local for local node ip-10-0-135-180.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-135-180.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-135-180.ec2.internal]' metadata: creationTimestamp: "2026-04-17T14:34:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-135-180 operation: Update time: "2026-04-17T14:34:21Z" name: ip-10-0-135-180.ec2.internal.18a72b8cc563906f namespace: default resourceVersion: "6176" uid: e0acc0ed-16b6-4067-a440-8a3dbf3d277e reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:28Z" involvedObject: kind: Node name: ip-10-0-135-180.ec2.internal uid: ip-10-0-135-180.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:28Z" message: 'Node ip-10-0-135-180.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:34:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:28Z" name: ip-10-0-135-180.ec2.internal.18a72b8e664e2f02 namespace: default resourceVersion: "6368" uid: df5c7dd9-4379-4c8f-9b17-60375e5f7d0b reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-180.ec2.internal source: component: kubelet host: ip-10-0-135-180.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:36:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-180.ec2.internal uid: 2b2259b1-0863-48b3-b423-a79ee9659f27 kind: Event lastTimestamp: "2026-04-17T14:36:41Z" message: 'Node ip-10-0-135-180.ec2.internal event: Registered Node ip-10-0-135-180.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:36:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:36:41Z" name: ip-10-0-135-180.ec2.internal.18a72bad69e4b77d namespace: default resourceVersion: "7920" uid: 67bfce68-9371-452a-9d96-4bfaea794463 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:07Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:07Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-17T14:34:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:07Z" name: ip-10-0-143-171.ec2.internal.18a72b8980f0ffa4 namespace: default resourceVersion: "5715" uid: 6f18325b-a49c-4883-9fdb-08d1692153ef reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:34:07Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:07Z" message: 'Node ip-10-0-143-171.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T14:34:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:07Z" name: ip-10-0-143-171.ec2.internal.18a72b8982ac45f2 namespace: default resourceVersion: "5721" uid: 88c5783d-f19c-4c58-9e94-6c152d22c98c reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:34:07Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:07Z" message: 'Node ip-10-0-143-171.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T14:34:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:07Z" name: ip-10-0-143-171.ec2.internal.18a72b8982acde08 namespace: default resourceVersion: "5723" uid: 47c5fac6-4ba8-40fd-a70f-42dca006c726 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-17T14:34:07Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:07Z" message: 'Node ip-10-0-143-171.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T14:34:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:07Z" name: ip-10-0-143-171.ec2.internal.18a72b8982ad0a22 namespace: default resourceVersion: "5727" uid: 6ba9fc9b-2a74-434c-a72a-c6d91a52534b reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:07Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:07Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T14:34:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:07Z" name: ip-10-0-143-171.ec2.internal.18a72b89856e7217 namespace: default resourceVersion: "5720" uid: faeeff27-33e8-444b-b24b-05c25b35d26a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-171.ec2.internal resourceVersion: "5722" uid: 8d293e4d-1426-48f7-a2e6-d5e81028546b kind: Event lastTimestamp: "2026-04-17T14:34:08Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T14:34:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T14:34:08Z" name: ip-10-0-143-171.ec2.internal.18a72b899a95467e namespace: default resourceVersion: "5813" uid: 010ee4df-56b1-4276-84a5-a5699043aadd reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:08Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-171.ec2.internal uid: 8d293e4d-1426-48f7-a2e6-d5e81028546b kind: Event lastTimestamp: "2026-04-17T14:34:08Z" message: 'Node ip-10-0-143-171.ec2.internal event: Registered Node ip-10-0-143-171.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:34:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:34:08Z" name: ip-10-0-143-171.ec2.internal.18a72b89b97ac82f namespace: default resourceVersion: "5822" uid: 1f1816b9-bee6-464d-8b10-213466d142a5 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-171.ec2.internal resourceVersion: "6541" uid: 8d293e4d-1426-48f7-a2e6-d5e81028546b kind: Event lastTimestamp: "2026-04-17T14:34:33Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-171.ec2.internal, error getting gateway config for node ip-10-0-143-171.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-143-171.ec2.internal", failed to update chassis to local for local node ip-10-0-143-171.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-143-171.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-171.ec2.internal]' metadata: creationTimestamp: "2026-04-17T14:34:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-143-171 operation: Update time: "2026-04-17T14:34:33Z" name: ip-10-0-143-171.ec2.internal.18a72b8f95526ebb namespace: default resourceVersion: "6543" uid: 7989abbc-3f3a-4169-bd73-6e77e62e0497 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:34:39Z" involvedObject: kind: Node name: ip-10-0-143-171.ec2.internal uid: ip-10-0-143-171.ec2.internal kind: Event lastTimestamp: "2026-04-17T14:34:39Z" message: 'Node ip-10-0-143-171.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T14:34:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T14:34:39Z" name: ip-10-0-143-171.ec2.internal.18a72b910a1570a8 namespace: default resourceVersion: "6643" uid: 4c854368-88c8-4158-a629-b4e7ce638388 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-143-171.ec2.internal source: component: kubelet host: ip-10-0-143-171.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:36:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-171.ec2.internal uid: 8d293e4d-1426-48f7-a2e6-d5e81028546b kind: Event lastTimestamp: "2026-04-17T14:36:41Z" message: 'Node ip-10-0-143-171.ec2.internal event: Registered Node ip-10-0-143-171.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T14:36:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T14:36:41Z" name: ip-10-0-143-171.ec2.internal.18a72bad69e4e546 namespace: default resourceVersion: "7921" uid: 8f22639e-6bd9-4054-8171-0541737363c2 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 9 eventTime: null firstTimestamp: "2026-04-17T14:43:25Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16909" uid: 628ec934-2a1e-4b04-bb7e-9e915285bf06 kind: Event lastTimestamp: "2026-04-17T14:43:28Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-17T14:43:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-17T14:43:29Z" name: kuadrant-system.18a72c0b4fbaa407 namespace: default resourceVersion: "17133" uid: 95a68ae0-cd2b-4190-9916-6eafe9a1182c reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-17T14:36:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:36:24Z" name: kube-system.18a72ba9445936cf namespace: default resourceVersion: "7581" uid: bf41e618-543b-41b4-9b95-4ada16fdd80f reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-654c68ff59-k6jcx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-17T14:36:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:36:39Z" name: kube-system.18a72bacc2bedf3b namespace: default resourceVersion: "7892" uid: b9011996-562d-4e17-b63c-87d625dfcf29 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-654c68ff59-k6jcx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-17T14:36:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:36:39Z" name: kube-system.18a72bacc2efbb42 namespace: default resourceVersion: "7893" uid: 99c79bc6-fb58-4bd7-b34d-2866f6a06139 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-654c68ff59-k6jcx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-17T14:36:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:36:39Z" name: kube-system.18a72bacc3251d5e namespace: default resourceVersion: "7894" uid: 55a67eda-fb57-4eb6-b5f4-8f9c33b47067 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-654c68ff59-k6jcx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-17T14:37:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T14:37:39Z" name: kube-system.18a72bbabb41819a namespace: default resourceVersion: "10105" uid: 314c7666-40d7-4760-b16d-c1cef0484f3a reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-654c68ff59-k6jcx type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T14:29:43Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-17T14:29:43Z" message: readyz=true metadata: creationTimestamp: "2026-04-17T14:29:43Z" name: openshift-kube-apiserver.18a72b4bf3d0d7f1 namespace: default resourceVersion: "274" uid: 54f77bbd-e302-4e5a-a708-5ce8abf213e9 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-86c75d5cc7-9kjmp type: Warning kind: EventList metadata: resourceVersion: "27463"