--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:14:12Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-17T17:14:12Z" message: controller is starting metadata: creationTimestamp: "2026-04-17T17:14:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-17T17:14:12Z" name: 09526268-f34f-4401-8b0c-1bfbe47c113f namespace: default resourceVersion: "11981" uid: 77a34d8f-aa76-415b-b13e-294ad05c0ff3 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:07Z" involvedObject: kind: CertificateSigningRequest name: csr-5sps7 kind: Event lastTimestamp: "2026-04-17T17:08:07Z" message: CSR "csr-5sps7" has been approved metadata: creationTimestamp: "2026-04-17T17:08:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:08:07Z" name: csr-5sps7.18a733f0e9efc82c namespace: default resourceVersion: "6445" uid: 4b31ed74-9848-4fca-95ac-cd803d4be062 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:17Z" involvedObject: kind: CertificateSigningRequest name: csr-hf8gp kind: Event lastTimestamp: "2026-04-17T17:08:17Z" message: CSR "csr-hf8gp" has been approved metadata: creationTimestamp: "2026-04-17T17:08:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:08:17Z" name: csr-hf8gp.18a733f31518e6f8 namespace: default resourceVersion: "6706" uid: a725327d-fba4-4351-aee6-e4ee53032bd6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:52Z" involvedObject: kind: CertificateSigningRequest name: csr-hlwqk kind: Event lastTimestamp: "2026-04-17T17:07:52Z" message: CSR "csr-hlwqk" has been approved metadata: creationTimestamp: "2026-04-17T17:07:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:07:53Z" name: csr-hlwqk.18a733ed75ee0860 namespace: default resourceVersion: "5987" uid: a15a0512-0e36-4882-8419-1e7e27fd8bab reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:23Z" involvedObject: kind: CertificateSigningRequest name: csr-hmq9z kind: Event lastTimestamp: "2026-04-17T17:08:23Z" message: CSR "csr-hmq9z" has been approved metadata: creationTimestamp: "2026-04-17T17:08:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:08:23Z" name: csr-hmq9z.18a733f4ab7682a4 namespace: default resourceVersion: "6811" uid: cb59b9aa-9bd7-47e6-a698-3d3e1d6ae796 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:58Z" involvedObject: kind: CertificateSigningRequest name: csr-n4bb9 kind: Event lastTimestamp: "2026-04-17T17:07:58Z" message: CSR "csr-n4bb9" has been approved metadata: creationTimestamp: "2026-04-17T17:07:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:07:58Z" name: csr-n4bb9.18a733eeab8e3274 namespace: default resourceVersion: "6201" uid: 4ae87156-e353-44fe-9371-142662016b64 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:13Z" involvedObject: kind: CertificateSigningRequest name: csr-qdcsx kind: Event lastTimestamp: "2026-04-17T17:08:13Z" message: CSR "csr-qdcsx" has been approved metadata: creationTimestamp: "2026-04-17T17:08:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-17T17:08:13Z" name: csr-qdcsx.18a733f23345b91e namespace: default resourceVersion: "6599" uid: 9379b6da-7cf5-475d-9496-33cea9ad8f8a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:15:26Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14227" uid: d9e64e75-5e9f-41e6-8cc8-70276f598ff0 kind: Event lastTimestamp: "2026-04-17T17:15:26Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-17T17:15:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T17:15:26Z" name: default-gateway.18a73456f023e8aa namespace: default resourceVersion: "14238" uid: 16dfe033-9da4-4a12-973f-e86ad77945c0 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-17T17:15:28Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14569" uid: d9e64e75-5e9f-41e6-8cc8-70276f598ff0 kind: Event lastTimestamp: "2026-04-17T17:15:35Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-17T17:15:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T17:15:35Z" name: default-gateway.18a7345779319158 namespace: default resourceVersion: "14770" uid: a6e2e9ac-ce8d-4e1a-9d10-d4395c95d1cf reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-17T17:15:35Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14857" uid: 9070dc6e-a912-4a73-93a8-46f136c99fa7 kind: Event lastTimestamp: "2026-04-17T17:16:03Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:9070dc6e-a912-4a73-93a8-46f136c99fa7 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:9070dc6e-a912-4a73-93a8-46f136c99fa7]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-17T17:15:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T17:16:03Z" name: default-kserve.18a73459383a5c45 namespace: default resourceVersion: "16147" uid: 93167088-7122-4679-a7b4-4a3f88fa5b0a reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-17T17:15:25Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14182" uid: b0de110a-9b40-4dcf-925c-c5df1b7b40c1 kind: Event lastTimestamp: "2026-04-17T17:32:54Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-17T17:15:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-17T17:32:54Z" name: default-monitoring.18a73456e5b2bf42 namespace: default resourceVersion: "35610" uid: 0ceb9be9-a5de-4c10-8906-bd7cc1460979 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:19:47Z" involvedObject: apiVersion: v1 kind: Endpoints name: e2e-unconfiab60ef4d3a239b5143b412cab04acac3-kserve-workload-svc kind: Event lastTimestamp: "2026-04-17T17:19:47Z" message: 'Failed to create endpoint for service llm/e2e-unconfiab60ef4d3a239b5143b412cab04acac3-kserve-workload-svc: endpoints "e2e-unconfiab60ef4d3a239b5143b412cab04acac3-kserve-workload-svc" already exists' metadata: creationTimestamp: "2026-04-17T17:19:47Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:19:47Z" name: e2e-unconfiab60ef4d3a239b5143b412cab04acac3-kserve-workload-svc.18a73493c22dab65 namespace: default resourceVersion: "23540" uid: f5b57896-7c90-4a41-a5a5-ad834329301f reason: FailedToCreateEndpoint reportingComponent: endpoint-controller reportingInstance: "" source: component: endpoint-controller type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: kind: Node name: ip-10-0-132-98.ec2.internal uid: ip-10-0-132-98.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: 'Node ip-10-0-132-98.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eaf9ee202a namespace: default resourceVersion: "5731" uid: 758fec3c-fb18-409d-923e-e573a0f86d69 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-98.ec2.internal source: component: kubelet host: ip-10-0-132-98.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: kind: Node name: ip-10-0-132-98.ec2.internal uid: ip-10-0-132-98.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: 'Node ip-10-0-132-98.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eaf9ee7eeb namespace: default resourceVersion: "5733" uid: 4f805b64-a80f-4dec-99d6-bac7484dcf90 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-98.ec2.internal source: component: kubelet host: ip-10-0-132-98.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: kind: Node name: ip-10-0-132-98.ec2.internal uid: ip-10-0-132-98.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: 'Node ip-10-0-132-98.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eaf9eeb833 namespace: default resourceVersion: "5735" uid: e1a17b80-18b8-4873-8fda-442d505feb4e reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-98.ec2.internal source: component: kubelet host: ip-10-0-132-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: kind: Node name: ip-10-0-132-98.ec2.internal uid: ip-10-0-132-98.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eafd21f377 namespace: default resourceVersion: "5637" uid: 661859fb-8bca-4a0d-a3bb-4eb06da466d7 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-98.ec2.internal source: component: kubelet host: ip-10-0-132-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-98.ec2.internal uid: 798e4e36-809c-4fa5-9e34-16dcf2dc050f kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: 'Node ip-10-0-132-98.ec2.internal event: Registered Node ip-10-0-132-98.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eb051c738a namespace: default resourceVersion: "5660" uid: 2facfeaf-3a8e-4c3b-8d42-5ca51705a41d reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:42Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-98.ec2.internal resourceVersion: "5638" uid: 798e4e36-809c-4fa5-9e34-16dcf2dc050f kind: Event lastTimestamp: "2026-04-17T17:07:42Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T17:07:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T17:07:42Z" name: ip-10-0-132-98.ec2.internal.18a733eb13410c51 namespace: default resourceVersion: "5739" uid: 7e4533ce-f98d-4fe0-ba93-346f4a910a7f reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:14Z" involvedObject: kind: Node name: ip-10-0-132-98.ec2.internal uid: ip-10-0-132-98.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:08:14Z" message: 'Node ip-10-0-132-98.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T17:08:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:08:14Z" name: ip-10-0-132-98.ec2.internal.18a733f28909ab05 namespace: default resourceVersion: "6643" uid: 875a3302-a1fc-4998-9347-4d96c1e2f0d5 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-98.ec2.internal source: component: kubelet host: ip-10-0-132-98.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:10:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-98.ec2.internal uid: 798e4e36-809c-4fa5-9e34-16dcf2dc050f kind: Event lastTimestamp: "2026-04-17T17:10:06Z" message: 'Node ip-10-0-132-98.ec2.internal event: Registered Node ip-10-0-132-98.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:10:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:10:06Z" name: ip-10-0-132-98.ec2.internal.18a7340c7f327c54 namespace: default resourceVersion: "8120" uid: 2daa9346-5004-486c-aef0-256b67eda7e5 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:26Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:27Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T17:07:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:27Z" name: ip-10-0-134-244.ec2.internal.18a733e75c39362e namespace: default resourceVersion: "5399" uid: 166c302c-16b2-4f24-a2e2-95143f9c51fa reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:26Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:27Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T17:07:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:27Z" name: ip-10-0-134-244.ec2.internal.18a733e75c3991bf namespace: default resourceVersion: "5401" uid: c175ecec-328a-455e-86c2-255a6fcbc6ce reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:26Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:27Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T17:07:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:27Z" name: ip-10-0-134-244.ec2.internal.18a733e75c39b5e8 namespace: default resourceVersion: "5402" uid: b4d753d9-31d2-4728-ac6a-4c5da5ae490e reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:26Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:26Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T17:07:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:26Z" name: ip-10-0-134-244.ec2.internal.18a733e75ec33b6a namespace: default resourceVersion: "5349" uid: 860499a2-6555-4948-b934-2f63048bab41 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:27Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal resourceVersion: "5350" uid: d4fcecf1-c4d2-428d-8e59-872e9da38551 kind: Event lastTimestamp: "2026-04-17T17:07:27Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T17:07:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T17:07:27Z" name: ip-10-0-134-244.ec2.internal.18a733e778282fdb namespace: default resourceVersion: "5432" uid: c6b92a1c-ffaf-427a-bfba-2c942d66297b reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:27Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal uid: d4fcecf1-c4d2-428d-8e59-872e9da38551 kind: Event lastTimestamp: "2026-04-17T17:07:27Z" message: 'Node ip-10-0-134-244.ec2.internal event: Registered Node ip-10-0-134-244.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:07:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:07:27Z" name: ip-10-0-134-244.ec2.internal.18a733e786ea6452 namespace: default resourceVersion: "5466" uid: 991d07c4-9461-4422-9f1b-a68945a7794b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:59Z" involvedObject: kind: Node name: ip-10-0-134-244.ec2.internal uid: ip-10-0-134-244.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:59Z" message: 'Node ip-10-0-134-244.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T17:07:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:59Z" name: ip-10-0-134-244.ec2.internal.18a733ef09597155 namespace: default resourceVersion: "6221" uid: 48ca64c0-c12f-4be6-9b8d-38560396eea1 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-244.ec2.internal source: component: kubelet host: ip-10-0-134-244.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:10:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-244.ec2.internal uid: d4fcecf1-c4d2-428d-8e59-872e9da38551 kind: Event lastTimestamp: "2026-04-17T17:10:06Z" message: 'Node ip-10-0-134-244.ec2.internal event: Registered Node ip-10-0-134-244.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:10:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:10:06Z" name: ip-10-0-134-244.ec2.internal.18a7340c7f33eaf0 namespace: default resourceVersion: "8134" uid: 8d3d61f2-4d14-40f7-b56a-49fce5b9154a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:52Z" involvedObject: kind: Node name: ip-10-0-138-224.ec2.internal uid: ip-10-0-138-224.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:53Z" message: 'Node ip-10-0-138-224.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-17T17:07:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:53Z" name: ip-10-0-138-224.ec2.internal.18a733ed6bea250e namespace: default resourceVersion: "6077" uid: cb15fba4-5c09-4d59-9e0b-59f0ccdb107b reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-224.ec2.internal source: component: kubelet host: ip-10-0-138-224.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:52Z" involvedObject: kind: Node name: ip-10-0-138-224.ec2.internal uid: ip-10-0-138-224.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:53Z" message: 'Node ip-10-0-138-224.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-17T17:07:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:53Z" name: ip-10-0-138-224.ec2.internal.18a733ed6bea825b namespace: default resourceVersion: "6078" uid: 159fc897-64b0-4218-9682-7d869a7d61fa reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-224.ec2.internal source: component: kubelet host: ip-10-0-138-224.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:07:52Z" involvedObject: kind: Node name: ip-10-0-138-224.ec2.internal uid: ip-10-0-138-224.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:53Z" message: 'Node ip-10-0-138-224.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-17T17:07:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:53Z" name: ip-10-0-138-224.ec2.internal.18a733ed6bead955 namespace: default resourceVersion: "6079" uid: 1535eeb1-7b78-441e-ac04-fb3feebc7cb8 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-224.ec2.internal source: component: kubelet host: ip-10-0-138-224.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:52Z" involvedObject: kind: Node name: ip-10-0-138-224.ec2.internal uid: ip-10-0-138-224.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:07:52Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-17T17:07:52Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:07:52Z" name: ip-10-0-138-224.ec2.internal.18a733ed6eb6a4dc namespace: default resourceVersion: "5967" uid: a9d3df33-4dd7-4f2c-8ef6-4e7b435bb4d4 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-224.ec2.internal source: component: kubelet host: ip-10-0-138-224.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:53Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-224.ec2.internal resourceVersion: "5970" uid: 25fa09a6-b3c5-4071-a358-4237cbaf76be kind: Event lastTimestamp: "2026-04-17T17:07:53Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-17T17:07:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-17T17:07:53Z" name: ip-10-0-138-224.ec2.internal.18a733ed86dd4757 namespace: default resourceVersion: "6085" uid: 875ac153-86a4-44ac-8346-e36f309078e4 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:07:57Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-224.ec2.internal uid: 25fa09a6-b3c5-4071-a358-4237cbaf76be kind: Event lastTimestamp: "2026-04-17T17:07:57Z" message: 'Node ip-10-0-138-224.ec2.internal event: Registered Node ip-10-0-138-224.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:07:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:07:57Z" name: ip-10-0-138-224.ec2.internal.18a733ee835fb317 namespace: default resourceVersion: "6189" uid: a7585f03-4cd1-4871-8e30-80352c4e27df reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:08:25Z" involvedObject: kind: Node name: ip-10-0-138-224.ec2.internal uid: ip-10-0-138-224.ec2.internal kind: Event lastTimestamp: "2026-04-17T17:08:25Z" message: 'Node ip-10-0-138-224.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-17T17:08:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-17T17:08:25Z" name: ip-10-0-138-224.ec2.internal.18a733f4f4523527 namespace: default resourceVersion: "6826" uid: 2bbbb292-b304-489c-9740-c83333b31906 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-224.ec2.internal source: component: kubelet host: ip-10-0-138-224.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:10:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-224.ec2.internal uid: 25fa09a6-b3c5-4071-a358-4237cbaf76be kind: Event lastTimestamp: "2026-04-17T17:10:06Z" message: 'Node ip-10-0-138-224.ec2.internal event: Registered Node ip-10-0-138-224.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-17T17:10:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-17T17:10:06Z" name: ip-10-0-138-224.ec2.internal.18a7340c7f3409c3 namespace: default resourceVersion: "8136" uid: d158f301-565c-43be-abaa-0651d4a4f9d2 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-17T17:16:38Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16572" uid: eb5ddc69-2372-4662-a3d7-0b787399399a kind: Event lastTimestamp: "2026-04-17T17:16:39Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-17T17:16:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-17T17:16:40Z" name: kuadrant-system.18a73467bcdcf631 namespace: default resourceVersion: "16759" uid: 54cb937a-1d03-464d-8422-4cc27fa3cbde reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-17T17:09:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T17:09:41Z" name: kube-system.18a73406c7ae6086 namespace: default resourceVersion: "7627" uid: 4bf96ede-866e-4334-a5f4-7bd288980269 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-57b8df6b47-kjjlx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-17T17:09:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T17:09:56Z" name: kube-system.18a7340a460f37b8 namespace: default resourceVersion: "7880" uid: 5c00722c-2db3-4285-8945-e5c86653c71e reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-57b8df6b47-kjjlx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-17T17:09:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T17:09:56Z" name: kube-system.18a7340a4643e407 namespace: default resourceVersion: "7881" uid: 6d950dab-d87a-4493-b259-b5183354622a reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-57b8df6b47-kjjlx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-17T17:09:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T17:09:56Z" name: kube-system.18a7340a469cce62 namespace: default resourceVersion: "7882" uid: 5689e289-3ad7-4bd2-9a42-c5951c65ab23 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-57b8df6b47-kjjlx type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-17T17:10:56Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-17T17:10:56Z" name: kube-system.18a734183e9ea722 namespace: default resourceVersion: "10235" uid: 27c7d7c0-41ed-4a5b-b8a9-d95c810fcb9b reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-57b8df6b47-kjjlx type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-17T17:02:43Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-17T17:02:43Z" message: readyz=true metadata: creationTimestamp: "2026-04-17T17:02:43Z" name: openshift-kube-apiserver.18a733a54c4fda2c namespace: default resourceVersion: "274" uid: bb940137-ac6f-437b-a69f-132296853d6b reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-598d6b6484-2djfx type: Warning kind: EventList metadata: resourceVersion: "45981"