--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:26:58Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T19:26:58Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T19:26:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T19:26:58Z" name: c505856b-2489-4714-97f8-a204ed16e271 namespace: default resourceVersion: "12431" uid: a1415908-d1e8-433a-84f9-e93c9232e951 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:44Z" involvedObject: kind: CertificateSigningRequest name: csr-8jnhx kind: Event lastTimestamp: "2026-04-16T19:18:44Z" message: CSR "csr-8jnhx" has been approved metadata: creationTimestamp: "2026-04-16T19:18:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:44Z" name: csr-8jnhx.18a6ec7ce0385610 namespace: default resourceVersion: "6877" uid: e711294c-9978-4b9e-b255-490545f122c6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:21Z" involvedObject: kind: CertificateSigningRequest name: csr-96m8h kind: Event lastTimestamp: "2026-04-16T19:18:21Z" message: CSR "csr-96m8h" has been approved metadata: creationTimestamp: "2026-04-16T19:18:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:21Z" name: csr-96m8h.18a6ec778c0e5c46 namespace: default resourceVersion: "6130" uid: 2b96838d-2c40-4526-9c8a-f614f5d859e8 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:27Z" involvedObject: kind: CertificateSigningRequest name: csr-qm79v kind: Event lastTimestamp: "2026-04-16T19:18:27Z" message: CSR "csr-qm79v" has been approved metadata: creationTimestamp: "2026-04-16T19:18:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:27Z" name: csr-qm79v.18a6ec7912877821 namespace: default resourceVersion: "6206" uid: 23552658-556a-4367-8bf9-159563cd697b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:41Z" involvedObject: kind: CertificateSigningRequest name: csr-qvhbc kind: Event lastTimestamp: "2026-04-16T19:18:41Z" message: CSR "csr-qvhbc" has been approved metadata: creationTimestamp: "2026-04-16T19:18:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:41Z" name: csr-qvhbc.18a6ec7c53653f12 namespace: default resourceVersion: "6743" uid: f3362479-6468-4c2b-853d-f2d9ce939aa0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:38Z" involvedObject: kind: CertificateSigningRequest name: csr-r8rld kind: Event lastTimestamp: "2026-04-16T19:18:38Z" message: CSR "csr-r8rld" has been approved metadata: creationTimestamp: "2026-04-16T19:18:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:38Z" name: csr-r8rld.18a6ec7b7859c01d namespace: default resourceVersion: "6676" uid: 633a9b93-1440-4550-9371-b2a8da4309fe reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:34Z" involvedObject: kind: CertificateSigningRequest name: csr-vz2rd kind: Event lastTimestamp: "2026-04-16T19:18:34Z" message: CSR "csr-vz2rd" has been approved metadata: creationTimestamp: "2026-04-16T19:18:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T19:18:34Z" name: csr-vz2rd.18a6ec7ab2d2a516 namespace: default resourceVersion: "6470" uid: 8c7679ff-f5e9-41fe-b7cc-cd87d057e3fc reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T19:27:48Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14683" uid: eb94d78f-68f9-4aca-a7cf-1c0ea336ffdb kind: Event lastTimestamp: "2026-04-16T19:27:56Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T19:27:48Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T19:27:56Z" name: default-gateway.18a6ecfb8f905456 namespace: default resourceVersion: "15065" uid: 3750ecb0-bb33-4705-8b2c-f905af34c8e8 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T19:27:58Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15160" uid: 0a3fe2ea-51ba-41a8-947d-820afd9b13a6 kind: Event lastTimestamp: "2026-04-16T19:28:25Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:0a3fe2ea-51ba-41a8-947d-820afd9b13a6 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:0a3fe2ea-51ba-41a8-947d-820afd9b13a6]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T19:27:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T19:28:25Z" name: default-kserve.18a6ecfde110bb34 namespace: default resourceVersion: "16457" uid: 86d7b12e-84e8-42c5-a70b-54d05586b69b reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-16T19:27:47Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14367" uid: d422f51e-99b1-4668-8607-397ec0e2be37 kind: Event lastTimestamp: "2026-04-16T19:47:25Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T19:27:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T19:47:25Z" name: default-monitoring.18a6ecfb533e6aa7 namespace: default resourceVersion: "37864" uid: e22a7c00-62dd-4afa-ba57-2447a4a8bbb6 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:17:52Z" involvedObject: kind: Node name: ip-10-0-128-123.ec2.internal uid: ip-10-0-128-123.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:17:53Z" message: 'Node ip-10-0-128-123.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T19:17:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:17:53Z" name: ip-10-0-128-123.ec2.internal.18a6ec70f6ebcd0e namespace: default resourceVersion: "5378" uid: fd0cfb26-a6aa-4a79-86c2-8e4c1cc46325 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-128-123.ec2.internal source: component: kubelet host: ip-10-0-128-123.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:17:52Z" involvedObject: kind: Node name: ip-10-0-128-123.ec2.internal uid: ip-10-0-128-123.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:17:53Z" message: 'Node ip-10-0-128-123.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T19:17:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:17:53Z" name: ip-10-0-128-123.ec2.internal.18a6ec70f6ec0f13 namespace: default resourceVersion: "5379" uid: 27a78c81-4e39-44cf-a5f4-ed2cf0c65359 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-128-123.ec2.internal source: component: kubelet host: ip-10-0-128-123.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:17:52Z" involvedObject: kind: Node name: ip-10-0-128-123.ec2.internal uid: ip-10-0-128-123.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:17:53Z" message: 'Node ip-10-0-128-123.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T19:17:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:17:53Z" name: ip-10-0-128-123.ec2.internal.18a6ec70f6ec329a namespace: default resourceVersion: "5380" uid: bb105730-dbbd-41f8-a442-6379f4befadf reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-128-123.ec2.internal source: component: kubelet host: ip-10-0-128-123.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:17:53Z" involvedObject: kind: Node name: ip-10-0-128-123.ec2.internal uid: ip-10-0-128-123.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:17:53Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T19:17:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:17:53Z" name: ip-10-0-128-123.ec2.internal.18a6ec70f9af9cec namespace: default resourceVersion: "5318" uid: 8297308f-f52b-4d76-a580-2f288d13c750 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-128-123.ec2.internal source: component: kubelet host: ip-10-0-128-123.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:17:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-123.ec2.internal resourceVersion: "5319" uid: 233caef2-43ab-40b1-88d3-e328daadfea6 kind: Event lastTimestamp: "2026-04-16T19:17:53Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T19:17:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T19:17:53Z" name: ip-10-0-128-123.ec2.internal.18a6ec71151c9f15 namespace: default resourceVersion: "5394" uid: 6c312f87-cb30-4b1b-b227-7948efd0e0ca reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:17:56Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-123.ec2.internal uid: 233caef2-43ab-40b1-88d3-e328daadfea6 kind: Event lastTimestamp: "2026-04-16T19:17:56Z" message: 'Node ip-10-0-128-123.ec2.internal event: Registered Node ip-10-0-128-123.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:17:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:17:56Z" name: ip-10-0-128-123.ec2.internal.18a6ec71bf6fb063 namespace: default resourceVersion: "5499" uid: f856c82f-9155-468b-ab3f-8cfe00f8498b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:29Z" involvedObject: kind: Node name: ip-10-0-128-123.ec2.internal uid: ip-10-0-128-123.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:29Z" message: 'Node ip-10-0-128-123.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T19:18:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:29Z" name: ip-10-0-128-123.ec2.internal.18a6ec79709022bf namespace: default resourceVersion: "6237" uid: 4acb6ac8-cd79-46cc-87ec-02dc05791063 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-128-123.ec2.internal source: component: kubelet host: ip-10-0-128-123.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:19:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-123.ec2.internal uid: 233caef2-43ab-40b1-88d3-e328daadfea6 kind: Event lastTimestamp: "2026-04-16T19:19:13Z" message: 'Node ip-10-0-128-123.ec2.internal event: Registered Node ip-10-0-128-123.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:19:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:19:13Z" name: ip-10-0-128-123.ec2.internal.18a6ec839c5124df namespace: default resourceVersion: "7608" uid: e45ebf33-aa6f-4cf8-863f-368195fe3ff5 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:08Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:08Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T19:18:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:08Z" name: ip-10-0-130-163.ec2.internal.18a6ec74a2f48ac5 namespace: default resourceVersion: "5634" uid: b4322205-3ca8-42f2-9836-826d1d9a52c3 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:08Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:08Z" message: 'Node ip-10-0-130-163.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T19:18:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:09Z" name: ip-10-0-130-163.ec2.internal.18a6ec74a4dfd52a namespace: default resourceVersion: "5732" uid: fc7798b2-e99e-4463-ab8d-12cb4beaeee0 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:08Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:08Z" message: 'Node ip-10-0-130-163.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T19:18:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:09Z" name: ip-10-0-130-163.ec2.internal.18a6ec74a4e03287 namespace: default resourceVersion: "5734" uid: 66da1e6f-dc83-4501-89e5-1c881290c7de reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:08Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:08Z" message: 'Node ip-10-0-130-163.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T19:18:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:09Z" name: ip-10-0-130-163.ec2.internal.18a6ec74a4e06b17 namespace: default resourceVersion: "5735" uid: a1d32c50-2768-4095-ab16-5f145f2ebddd reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:08Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:08Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T19:18:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:08Z" name: ip-10-0-130-163.ec2.internal.18a6ec74a76eba8e namespace: default resourceVersion: "5638" uid: 30f9acb0-d1de-4c42-8f04-66120d680dbc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-163.ec2.internal resourceVersion: "5640" uid: a39bb71d-87c9-41f7-90e4-076597ec46cd kind: Event lastTimestamp: "2026-04-16T19:18:09Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T19:18:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T19:18:09Z" name: ip-10-0-130-163.ec2.internal.18a6ec74bcdeb566 namespace: default resourceVersion: "5738" uid: 519019ad-e7c5-4359-b27c-1ee08a3b4438 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:11Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-163.ec2.internal uid: a39bb71d-87c9-41f7-90e4-076597ec46cd kind: Event lastTimestamp: "2026-04-16T19:18:11Z" message: 'Node ip-10-0-130-163.ec2.internal event: Registered Node ip-10-0-130-163.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:18:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:18:11Z" name: ip-10-0-130-163.ec2.internal.18a6ec753d9e06ff namespace: default resourceVersion: "5794" uid: 19024b29-a623-45d9-b174-e4c19bcc9dbc reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-163.ec2.internal resourceVersion: "6634" uid: a39bb71d-87c9-41f7-90e4-076597ec46cd kind: Event lastTimestamp: "2026-04-16T19:18:36Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-163.ec2.internal, error getting gateway config for node ip-10-0-130-163.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-130-163.ec2.internal", failed to update chassis to local for local node ip-10-0-130-163.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-130-163.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-163.ec2.internal]' metadata: creationTimestamp: "2026-04-16T19:18:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-130-163 operation: Update time: "2026-04-16T19:18:36Z" name: ip-10-0-130-163.ec2.internal.18a6ec7b10b81e5a namespace: default resourceVersion: "6638" uid: ef739a9b-d21a-428d-b616-d61fb04ea88f reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:43Z" involvedObject: kind: Node name: ip-10-0-130-163.ec2.internal uid: ip-10-0-130-163.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:43Z" message: 'Node ip-10-0-130-163.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T19:18:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:43Z" name: ip-10-0-130-163.ec2.internal.18a6ec7ca4f81e0b namespace: default resourceVersion: "6789" uid: b921df03-d707-461a-bdb3-d381233a1c32 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-130-163.ec2.internal source: component: kubelet host: ip-10-0-130-163.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:19:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-163.ec2.internal uid: a39bb71d-87c9-41f7-90e4-076597ec46cd kind: Event lastTimestamp: "2026-04-16T19:19:13Z" message: 'Node ip-10-0-130-163.ec2.internal event: Registered Node ip-10-0-130-163.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:19:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:19:13Z" name: ip-10-0-130-163.ec2.internal.18a6ec839c5178db namespace: default resourceVersion: "7617" uid: 534cb9b8-ada6-4ced-b0d9-e174f42a0d4c reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:12Z" involvedObject: kind: Node name: ip-10-0-130-83.ec2.internal uid: ip-10-0-130-83.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:13Z" message: 'Node ip-10-0-130-83.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T19:18:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec759fa3c383 namespace: default resourceVersion: "5924" uid: 95d87dfa-8298-4d82-a17f-177596df5695 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-130-83.ec2.internal source: component: kubelet host: ip-10-0-130-83.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:12Z" involvedObject: kind: Node name: ip-10-0-130-83.ec2.internal uid: ip-10-0-130-83.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:13Z" message: 'Node ip-10-0-130-83.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T19:18:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec759fa40b26 namespace: default resourceVersion: "5926" uid: 7f9bb85e-7cfa-4557-963e-02c5b4b06e38 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-130-83.ec2.internal source: component: kubelet host: ip-10-0-130-83.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T19:18:12Z" involvedObject: kind: Node name: ip-10-0-130-83.ec2.internal uid: ip-10-0-130-83.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:13Z" message: 'Node ip-10-0-130-83.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T19:18:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec759fa42ec5 namespace: default resourceVersion: "5931" uid: e3022b64-cb07-4180-94cf-1074a6cd4859 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-130-83.ec2.internal source: component: kubelet host: ip-10-0-130-83.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:13Z" involvedObject: kind: Node name: ip-10-0-130-83.ec2.internal uid: ip-10-0-130-83.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:13Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T19:18:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec75a22789da namespace: default resourceVersion: "5832" uid: 30044b9a-5544-42fc-b4f2-02edd825bb42 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-130-83.ec2.internal source: component: kubelet host: ip-10-0-130-83.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-83.ec2.internal resourceVersion: "5835" uid: 861f0726-925b-43ea-a73a-6871578acb44 kind: Event lastTimestamp: "2026-04-16T19:18:13Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T19:18:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T19:18:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec75b8c52614 namespace: default resourceVersion: "5939" uid: 79b2a6d7-c357-47db-8c7b-4e4fe114ddac reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:16Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-83.ec2.internal uid: 861f0726-925b-43ea-a73a-6871578acb44 kind: Event lastTimestamp: "2026-04-16T19:18:16Z" message: 'Node ip-10-0-130-83.ec2.internal event: Registered Node ip-10-0-130-83.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:18:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:18:16Z" name: ip-10-0-130-83.ec2.internal.18a6ec7667b18095 namespace: default resourceVersion: "6015" uid: 2fe3a148-384f-41fd-8d14-8c32ee27c114 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:39Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-83.ec2.internal resourceVersion: "6711" uid: 861f0726-925b-43ea-a73a-6871578acb44 kind: Event lastTimestamp: "2026-04-16T19:18:39Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-83.ec2.internal, error getting gateway config for node ip-10-0-130-83.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-130-83.ec2.internal", failed to update chassis to local for local node ip-10-0-130-83.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-130-83.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-130-83.ec2.internal]' metadata: creationTimestamp: "2026-04-16T19:18:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-130-83 operation: Update time: "2026-04-16T19:18:39Z" name: ip-10-0-130-83.ec2.internal.18a6ec7bd3f21821 namespace: default resourceVersion: "6713" uid: a81b37cf-13fa-466d-bad1-3a308706f8e4 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:18:45Z" involvedObject: kind: Node name: ip-10-0-130-83.ec2.internal uid: ip-10-0-130-83.ec2.internal kind: Event lastTimestamp: "2026-04-16T19:18:45Z" message: 'Node ip-10-0-130-83.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T19:18:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T19:18:45Z" name: ip-10-0-130-83.ec2.internal.18a6ec7d2a092a7d namespace: default resourceVersion: "7004" uid: 30ecbbc4-8832-4114-8228-90507db61e12 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-130-83.ec2.internal source: component: kubelet host: ip-10-0-130-83.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:19:13Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-83.ec2.internal uid: 861f0726-925b-43ea-a73a-6871578acb44 kind: Event lastTimestamp: "2026-04-16T19:19:13Z" message: 'Node ip-10-0-130-83.ec2.internal event: Registered Node ip-10-0-130-83.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T19:19:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T19:19:13Z" name: ip-10-0-130-83.ec2.internal.18a6ec839c51874f namespace: default resourceVersion: "7628" uid: 18ed6036-ccae-4ff1-a257-4e10545067dc reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T19:28:59Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16689" uid: 1579968d-87f6-41d4-a5c6-a9a9fda7013d kind: Event lastTimestamp: "2026-04-16T19:29:03Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T19:28:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T19:29:03Z" name: kuadrant-system.18a6ed0c2d6fa9ce namespace: default resourceVersion: "17148" uid: 63735752-089b-46fb-b008-2d83a6f444ce reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T19:18:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T19:18:46Z" name: kube-system.18a6ec7d86351cd5 namespace: default resourceVersion: "7073" uid: b1c8b31b-dbd1-4ff0-9963-fa286688f95d reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64d5c7f6bf-bv9cv type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T19:19:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T19:19:01Z" name: kube-system.18a6ec8104a0996e namespace: default resourceVersion: "7392" uid: ab268200-0aea-47a6-bdbf-0162e9a3d05b reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64d5c7f6bf-bv9cv type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T19:19:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T19:19:01Z" name: kube-system.18a6ec8104deee8b namespace: default resourceVersion: "7393" uid: 7bd6360e-6570-43a1-b463-7e58d4e95dbd reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64d5c7f6bf-bv9cv type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T19:19:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T19:19:01Z" name: kube-system.18a6ec810554592a namespace: default resourceVersion: "7394" uid: 2eae775b-0784-420e-80be-07d8f29fc389 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64d5c7f6bf-bv9cv type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T19:20:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T19:20:01Z" name: kube-system.18a6ec8efd368bc8 namespace: default resourceVersion: "9795" uid: c686f56a-d18a-47c7-841b-e9c879cb47bb reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-64d5c7f6bf-bv9cv type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T19:13:35Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T19:13:35Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T19:13:35Z" name: openshift-kube-apiserver.18a6ec34eda6b8b6 namespace: default resourceVersion: "274" uid: 6962bed9-6b4f-460e-8026-d17f71d27111 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-559ff448c4-4jwtn type: Warning kind: EventList metadata: resourceVersion: "46668"