--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:09:28Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-20T15:09:28Z" message: controller is starting metadata: creationTimestamp: "2026-04-20T15:09:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-20T15:09:28Z" name: ba8d41b1-cad1-4271-9d69-e866dc08df28 namespace: default resourceVersion: "12062" uid: 5b4b63bd-6038-41a0-9b52-ab87da49b960 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:03:01Z" involvedObject: kind: CertificateSigningRequest name: csr-22xtc kind: Event lastTimestamp: "2026-04-20T15:03:01Z" message: CSR "csr-22xtc" has been approved metadata: creationTimestamp: "2026-04-20T15:03:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:03:01Z" name: csr-22xtc.18a818dae3e25c39 namespace: default resourceVersion: "6622" uid: 2fbd43d7-6540-4939-82b6-ead77e9ad133 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:50Z" involvedObject: kind: CertificateSigningRequest name: csr-chzgf kind: Event lastTimestamp: "2026-04-20T15:02:50Z" message: CSR "csr-chzgf" has been approved metadata: creationTimestamp: "2026-04-20T15:02:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:02:50Z" name: csr-chzgf.18a818d8490c66cc namespace: default resourceVersion: "6282" uid: 386fa127-0723-4569-9579-66df0e923102 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:54Z" involvedObject: kind: CertificateSigningRequest name: csr-hjjx4 kind: Event lastTimestamp: "2026-04-20T15:02:54Z" message: CSR "csr-hjjx4" has been approved metadata: creationTimestamp: "2026-04-20T15:02:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:02:54Z" name: csr-hjjx4.18a818d937a3736b namespace: default resourceVersion: "6421" uid: 0afa9549-0dad-433c-91a4-0ab627979953 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:47Z" involvedObject: kind: CertificateSigningRequest name: csr-mj9bp kind: Event lastTimestamp: "2026-04-20T15:02:47Z" message: CSR "csr-mj9bp" has been approved metadata: creationTimestamp: "2026-04-20T15:02:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:02:47Z" name: csr-mj9bp.18a818d7bc504b0b namespace: default resourceVersion: "6203" uid: 32da787e-c38a-4b3e-b03e-17585aaa2fa8 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:54Z" involvedObject: kind: CertificateSigningRequest name: csr-v45dc kind: Event lastTimestamp: "2026-04-20T15:02:54Z" message: CSR "csr-v45dc" has been approved metadata: creationTimestamp: "2026-04-20T15:02:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:02:54Z" name: csr-v45dc.18a818d963014917 namespace: default resourceVersion: "6437" uid: 9f81e08e-e0ac-4462-8104-33f484f2b2da reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:45Z" involvedObject: kind: CertificateSigningRequest name: csr-xqwwp kind: Event lastTimestamp: "2026-04-20T15:02:45Z" message: CSR "csr-xqwwp" has been approved metadata: creationTimestamp: "2026-04-20T15:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T15:02:45Z" name: csr-xqwwp.18a818d747895445 namespace: default resourceVersion: "6160" uid: 48be396e-2ce4-4e5d-a788-b2b25e6d36e2 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 12 eventTime: null firstTimestamp: "2026-04-20T15:10:21Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14645" uid: 11c0b986-c957-4388-9fce-fd2d18d85e6a kind: Event lastTimestamp: "2026-04-20T15:10:34Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-20T15:10:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T15:10:34Z" name: default-gateway.18a819415ffe3284 namespace: default resourceVersion: "15025" uid: aadd40b5-2fe7-4389-bdca-be66468f1494 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-20T15:10:31Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14967" uid: 80e4620e-0b66-4cfa-bb3a-03b87a1c639a kind: Event lastTimestamp: "2026-04-20T15:10:59Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:80e4620e-0b66-4cfa-bb3a-03b87a1c639a platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:80e4620e-0b66-4cfa-bb3a-03b87a1c639a]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-20T15:10:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T15:10:59Z" name: default-kserve.18a81943a854030a namespace: default resourceVersion: "16295" uid: 1f4c9514-0a9e-48f8-8661-07db5b59a1f4 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 31 eventTime: null firstTimestamp: "2026-04-20T15:10:20Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14399" uid: ba308165-5b23-4f4f-8b09-4d5b936f5473 kind: Event lastTimestamp: "2026-04-20T15:29:59Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-20T15:10:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T15:29:59Z" name: default-monitoring.18a8194116826268 namespace: default resourceVersion: "37680" uid: 21d4efbd-acec-4818-8cda-aeeb4fc122d4 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d396c39706 namespace: default resourceVersion: "5642" uid: c8bae533-cbe7-4eca-ba2b-da221c3a4965 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: 'Node ip-10-0-129-115.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d398dac3e5 namespace: default resourceVersion: "5737" uid: f2efee6f-4ffc-4ceb-886b-db4b85a89a29 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: 'Node ip-10-0-129-115.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d398db39a8 namespace: default resourceVersion: "5738" uid: 91d3ee16-c5c0-4923-a68f-853f07587d85 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: 'Node ip-10-0-129-115.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d398db6229 namespace: default resourceVersion: "5740" uid: b4156a9b-7be6-4daa-ae6f-1f8d0415e4b1 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d39b874d22 namespace: default resourceVersion: "5648" uid: f77c8e78-554e-41c6-8057-26f964439acc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:30Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-115.ec2.internal resourceVersion: "5651" uid: 3477c30b-2a6f-44e8-9f8b-294a62b68366 kind: Event lastTimestamp: "2026-04-20T15:02:30Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T15:02:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T15:02:30Z" name: ip-10-0-129-115.ec2.internal.18a818d3b1784d6f namespace: default resourceVersion: "5747" uid: c8c71ed8-7d4a-4a31-a1aa-6555e8f85063 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:34Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-115.ec2.internal uid: 3477c30b-2a6f-44e8-9f8b-294a62b68366 kind: Event lastTimestamp: "2026-04-20T15:02:34Z" message: 'Node ip-10-0-129-115.ec2.internal event: Registered Node ip-10-0-129-115.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:02:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:02:34Z" name: ip-10-0-129-115.ec2.internal.18a818d495913ac2 namespace: default resourceVersion: "5834" uid: c1520c86-9e42-44d8-a7f7-ff4e384bc0ce reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:03:02Z" involvedObject: kind: Node name: ip-10-0-129-115.ec2.internal uid: ip-10-0-129-115.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:03:02Z" message: 'Node ip-10-0-129-115.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T15:03:02Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:03:02Z" name: ip-10-0-129-115.ec2.internal.18a818db3d183dc4 namespace: default resourceVersion: "6635" uid: 3d8aa29e-f29d-4924-af0e-ced2efa90910 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-129-115.ec2.internal source: component: kubelet host: ip-10-0-129-115.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:05:28Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-129-115.ec2.internal uid: 3477c30b-2a6f-44e8-9f8b-294a62b68366 kind: Event lastTimestamp: "2026-04-20T15:05:28Z" message: 'Node ip-10-0-129-115.ec2.internal event: Registered Node ip-10-0-129-115.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:05:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:05:28Z" name: ip-10-0-129-115.ec2.internal.18a818fd0d634dcc namespace: default resourceVersion: "8155" uid: e6ba5472-4acb-4bf8-a89a-7b4cbe7e897f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:23Z" involvedObject: kind: Node name: ip-10-0-133-198.ec2.internal uid: ip-10-0-133-198.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:23Z" message: 'Node ip-10-0-133-198.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T15:02:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:23Z" name: ip-10-0-133-198.ec2.internal.18a818d1f703a6ba namespace: default resourceVersion: "5510" uid: 36d6e5f1-7161-40c8-b7b2-03e216a026a8 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-133-198.ec2.internal source: component: kubelet host: ip-10-0-133-198.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:23Z" involvedObject: kind: Node name: ip-10-0-133-198.ec2.internal uid: ip-10-0-133-198.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:23Z" message: 'Node ip-10-0-133-198.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T15:02:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:23Z" name: ip-10-0-133-198.ec2.internal.18a818d1f703ece3 namespace: default resourceVersion: "5511" uid: 2f4babb0-5eec-41db-9aba-42103bc51271 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-133-198.ec2.internal source: component: kubelet host: ip-10-0-133-198.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:23Z" involvedObject: kind: Node name: ip-10-0-133-198.ec2.internal uid: ip-10-0-133-198.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:23Z" message: 'Node ip-10-0-133-198.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T15:02:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:23Z" name: ip-10-0-133-198.ec2.internal.18a818d1f7041126 namespace: default resourceVersion: "5512" uid: 85e0dbd4-fb3e-4f82-a345-965765e3a31d reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-133-198.ec2.internal source: component: kubelet host: ip-10-0-133-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:23Z" involvedObject: kind: Node name: ip-10-0-133-198.ec2.internal uid: ip-10-0-133-198.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:23Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T15:02:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:23Z" name: ip-10-0-133-198.ec2.internal.18a818d1fa45af12 namespace: default resourceVersion: "5425" uid: 2b630dae-48bc-4828-a59b-5be4e5b1a5ec reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-133-198.ec2.internal source: component: kubelet host: ip-10-0-133-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:23Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-198.ec2.internal resourceVersion: "5427" uid: 699c6dd6-4b4d-4b92-af9f-99429bf9fdae kind: Event lastTimestamp: "2026-04-20T15:02:23Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T15:02:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T15:02:23Z" name: ip-10-0-133-198.ec2.internal.18a818d20f63295f namespace: default resourceVersion: "5516" uid: 8fd3ef0e-dcc6-4705-927a-bbafb06d6a95 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:24Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-198.ec2.internal uid: 699c6dd6-4b4d-4b92-af9f-99429bf9fdae kind: Event lastTimestamp: "2026-04-20T15:02:24Z" message: 'Node ip-10-0-133-198.ec2.internal event: Registered Node ip-10-0-133-198.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:02:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:02:24Z" name: ip-10-0-133-198.ec2.internal.18a818d2416f91f4 namespace: default resourceVersion: "5536" uid: 81ca7701-74d9-40f6-b5f0-930acc3b0547 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-198.ec2.internal resourceVersion: "6228" uid: 699c6dd6-4b4d-4b92-af9f-99429bf9fdae kind: Event lastTimestamp: "2026-04-20T15:02:49Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-198.ec2.internal, error getting gateway config for node ip-10-0-133-198.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-133-198.ec2.internal", failed to update chassis to local for local node ip-10-0-133-198.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-133-198.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-133-198.ec2.internal]' metadata: creationTimestamp: "2026-04-20T15:02:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-133-198 operation: Update time: "2026-04-20T15:02:49Z" name: ip-10-0-133-198.ec2.internal.18a818d81b21c83b namespace: default resourceVersion: "6234" uid: 58198b04-f316-4a29-8a59-5a42f2cf5d68 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:55Z" involvedObject: kind: Node name: ip-10-0-133-198.ec2.internal uid: ip-10-0-133-198.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:55Z" message: 'Node ip-10-0-133-198.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T15:02:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:55Z" name: ip-10-0-133-198.ec2.internal.18a818d98008c1a8 namespace: default resourceVersion: "6480" uid: 58d85df6-9b75-4787-b182-53594d202f28 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-133-198.ec2.internal source: component: kubelet host: ip-10-0-133-198.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:05:28Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-133-198.ec2.internal uid: 699c6dd6-4b4d-4b92-af9f-99429bf9fdae kind: Event lastTimestamp: "2026-04-20T15:05:28Z" message: 'Node ip-10-0-133-198.ec2.internal event: Registered Node ip-10-0-133-198.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:05:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:05:28Z" name: ip-10-0-133-198.ec2.internal.18a818fd0d636078 namespace: default resourceVersion: "8161" uid: a06fe35b-73f5-48d1-8a3a-b311df0269f9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:20Z" involvedObject: kind: Node name: ip-10-0-134-230.ec2.internal uid: ip-10-0-134-230.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:20Z" message: 'Node ip-10-0-134-230.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T15:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:20Z" name: ip-10-0-134-230.ec2.internal.18a818d151bd6710 namespace: default resourceVersion: "5311" uid: facc6d2a-b73f-479b-970a-2ddcf387a29d reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-230.ec2.internal source: component: kubelet host: ip-10-0-134-230.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:20Z" involvedObject: kind: Node name: ip-10-0-134-230.ec2.internal uid: ip-10-0-134-230.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:20Z" message: 'Node ip-10-0-134-230.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T15:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:20Z" name: ip-10-0-134-230.ec2.internal.18a818d151bdc955 namespace: default resourceVersion: "5312" uid: 0a9cb4c3-2a35-4d9d-ac6c-67c9110eec1f reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-230.ec2.internal source: component: kubelet host: ip-10-0-134-230.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T15:02:20Z" involvedObject: kind: Node name: ip-10-0-134-230.ec2.internal uid: ip-10-0-134-230.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:20Z" message: 'Node ip-10-0-134-230.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T15:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:20Z" name: ip-10-0-134-230.ec2.internal.18a818d151be36a4 namespace: default resourceVersion: "5313" uid: 24728a67-bcf9-4b52-944c-7aecd87aea78 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-230.ec2.internal source: component: kubelet host: ip-10-0-134-230.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:20Z" involvedObject: kind: Node name: ip-10-0-134-230.ec2.internal uid: ip-10-0-134-230.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:20Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T15:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:20Z" name: ip-10-0-134-230.ec2.internal.18a818d154e433c9 namespace: default resourceVersion: "5255" uid: e3324cb0-7417-4590-9520-d90694fd28f7 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-230.ec2.internal source: component: kubelet host: ip-10-0-134-230.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-230.ec2.internal resourceVersion: "5257" uid: 7754fd33-b0c9-4bc7-b5a9-a33d681b7706 kind: Event lastTimestamp: "2026-04-20T15:02:20Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T15:02:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T15:02:20Z" name: ip-10-0-134-230.ec2.internal.18a818d16cbf1e68 namespace: default resourceVersion: "5342" uid: c17bb86b-d1db-4f7c-9fb6-669dc827554e reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:24Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-230.ec2.internal uid: 7754fd33-b0c9-4bc7-b5a9-a33d681b7706 kind: Event lastTimestamp: "2026-04-20T15:02:24Z" message: 'Node ip-10-0-134-230.ec2.internal event: Registered Node ip-10-0-134-230.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:02:24Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:02:24Z" name: ip-10-0-134-230.ec2.internal.18a818d2416d8051 namespace: default resourceVersion: "5535" uid: d5b1dbe6-1fb2-4f3c-9b27-f968402e38be reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:02:51Z" involvedObject: kind: Node name: ip-10-0-134-230.ec2.internal uid: ip-10-0-134-230.ec2.internal kind: Event lastTimestamp: "2026-04-20T15:02:51Z" message: 'Node ip-10-0-134-230.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T15:02:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T15:02:51Z" name: ip-10-0-134-230.ec2.internal.18a818d8a2377c60 namespace: default resourceVersion: "6342" uid: 602f31e5-1047-4e90-b12b-3f8fc08cf1e8 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-230.ec2.internal source: component: kubelet host: ip-10-0-134-230.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T15:05:28Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-230.ec2.internal uid: 7754fd33-b0c9-4bc7-b5a9-a33d681b7706 kind: Event lastTimestamp: "2026-04-20T15:05:28Z" message: 'Node ip-10-0-134-230.ec2.internal event: Registered Node ip-10-0-134-230.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T15:05:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T15:05:28Z" name: ip-10-0-134-230.ec2.internal.18a818fd0d629163 namespace: default resourceVersion: "8148" uid: 5fd96401-b032-439e-90bc-31ab27a9e341 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-20T15:11:32Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16547" uid: 1d40b37d-920d-4f08-bc76-c339b532d4be kind: Event lastTimestamp: "2026-04-20T15:11:36Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-20T15:11:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-20T15:11:38Z" name: kuadrant-system.18a81951eeff7f80 namespace: default resourceVersion: "17009" uid: c95de6e1-3b9a-473a-b63b-80e4690c1474 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-20T15:05:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T15:05:06Z" name: kube-system.18a818f7f7d1e342 namespace: default resourceVersion: "7692" uid: 12be6bb0-da1f-4c53-924f-1227692bf2ed reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-545d87f5cd-djqm5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-20T15:05:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T15:05:21Z" name: kube-system.18a818fb76347e4a namespace: default resourceVersion: "7985" uid: 7b3bac71-95ff-4d25-8ea9-556e8622e9b9 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-545d87f5cd-djqm5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-20T15:05:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T15:05:21Z" name: kube-system.18a818fb766d22b1 namespace: default resourceVersion: "7986" uid: 9547fb9c-6fa9-4c0e-bc0b-09bffd98af27 reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-545d87f5cd-djqm5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-20T15:05:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T15:05:21Z" name: kube-system.18a818fb76cfe29a namespace: default resourceVersion: "7987" uid: 072046f5-24ae-4cfe-8c94-6b25dbbf3ad7 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-545d87f5cd-djqm5 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-20T15:06:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T15:06:21Z" name: kube-system.18a819096ec18f5e namespace: default resourceVersion: "9604" uid: e23d0bf7-c3c2-4ef8-9749-4f1160fb3225 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-545d87f5cd-djqm5 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T14:58:05Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-20T14:58:05Z" message: readyz=true metadata: creationTimestamp: "2026-04-20T14:58:05Z" name: openshift-kube-apiserver.18a818960e09d8e8 namespace: default resourceVersion: "274" uid: b8cc3a9d-5510-4fd2-9b30-7a1a696bfbe5 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-7fbbff79b6-dkd66 type: Warning kind: EventList metadata: resourceVersion: "46516"