--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:01Z" involvedObject: kind: CertificateSigningRequest name: csr-cpdrs kind: Event lastTimestamp: "2026-04-22T15:59:01Z" message: CSR "csr-cpdrs" has been approved metadata: creationTimestamp: "2026-04-22T15:59:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:01Z" name: csr-cpdrs.18a8b9126ea83c3a namespace: default resourceVersion: "6320" uid: 6a118938-2bbd-4cb7-aff4-9e8691a91ee0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:07Z" involvedObject: kind: CertificateSigningRequest name: csr-qjcsw kind: Event lastTimestamp: "2026-04-22T15:59:07Z" message: CSR "csr-qjcsw" has been approved metadata: creationTimestamp: "2026-04-22T15:59:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:07Z" name: csr-qjcsw.18a8b913c4a09852 namespace: default resourceVersion: "6534" uid: 2ffe4d52-3708-4800-98ff-63a8f44aa00c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:14Z" involvedObject: kind: CertificateSigningRequest name: csr-rzg8w kind: Event lastTimestamp: "2026-04-22T15:59:14Z" message: CSR "csr-rzg8w" has been approved metadata: creationTimestamp: "2026-04-22T15:59:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:14Z" name: csr-rzg8w.18a8b91551c6efbf namespace: default resourceVersion: "6712" uid: 183e8d48-0d7b-43fb-92c5-d6345e0c37b9 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:08Z" involvedObject: kind: CertificateSigningRequest name: csr-w9sh5 kind: Event lastTimestamp: "2026-04-22T15:59:08Z" message: CSR "csr-w9sh5" has been approved metadata: creationTimestamp: "2026-04-22T15:59:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:08Z" name: csr-w9sh5.18a8b91417a52988 namespace: default resourceVersion: "6560" uid: f6b45c37-14ba-4fa9-b22d-16d0ba3a4f68 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:11Z" involvedObject: kind: CertificateSigningRequest name: csr-zhs9w kind: Event lastTimestamp: "2026-04-22T15:59:11Z" message: CSR "csr-zhs9w" has been approved metadata: creationTimestamp: "2026-04-22T15:59:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:11Z" name: csr-zhs9w.18a8b914acb3e76b namespace: default resourceVersion: "6648" uid: f2470368-1c4c-4496-88c4-0a3f48c70631 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:04Z" involvedObject: kind: CertificateSigningRequest name: csr-zvvsg kind: Event lastTimestamp: "2026-04-22T15:59:04Z" message: CSR "csr-zvvsg" has been approved metadata: creationTimestamp: "2026-04-22T15:59:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T15:59:04Z" name: csr-zvvsg.18a8b9132950e68b namespace: default resourceVersion: "6450" uid: 547d34e0-9749-417a-8e07-07025270138b reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 8 eventTime: null firstTimestamp: "2026-04-22T16:06:09Z" involvedObject: apiVersion: datasciencecluster.opendatahub.io/v2 kind: DataScienceCluster name: default-dsc resourceVersion: "14703" uid: 138d82be-adb0-4465-8a41-3549c0e5d8ab kind: Event lastTimestamp: "2026-04-22T16:06:10Z" message: 'failure deploying resource {map[apiVersion:components.platform.opendatahub.io/v1alpha1 kind:Kserve metadata:map[annotations:map[component.opendatahub.io/management-state:Managed platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-dsc platform.opendatahub.io/instance.uid:138d82be-adb0-4465-8a41-3549c0e5d8ab platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1] creationTimestamp: labels:map[platform.opendatahub.io/part-of:datasciencecluster] name:default-kserve ownerReferences:[map[apiVersion:datasciencecluster.opendatahub.io/v2 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:DataScienceCluster name:default-dsc uid:138d82be-adb0-4465-8a41-3549c0e5d8ab]]] spec:map[modelsAsService:map[managementState:Removed] nim:map[managementState:Managed] rawDeploymentServiceConfig:Headed] status:map[]]}: apply failed components.platform.opendatahub.io/v1alpha1, Kind=Kserve: unable to patch components.platform.opendatahub.io/v1alpha1, Kind=Kserve default-kserve: kserves.components.platform.opendatahub.io "default-kserve" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion datasciencecluster.opendatahub.io/v2 Kind DataScienceCluster: no matches for kind "DataScienceCluster" in version "datasciencecluster.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-22T16:06:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T16:06:10Z" name: default-dsc.18a8b97612a719cf namespace: default resourceVersion: "14753" uid: 7d7a1e68-b798-4f4e-b0a7-8e1a93c1ebcd reason: ProvisioningError reportingComponent: datasciencecluster reportingInstance: "" source: component: datasciencecluster type: Warning - apiVersion: v1 count: 8 eventTime: null firstTimestamp: "2026-04-22T16:06:09Z" involvedObject: apiVersion: dscinitialization.opendatahub.io/v2 kind: DSCInitialization name: default-dsci resourceVersion: "14528" uid: 7d5b6b55-66a4-49bc-bad8-4b8af210dfae kind: Event lastTimestamp: "2026-04-22T16:06:10Z" message: 'failed to create operator resources for instance default-dsci: unable to patch networking.k8s.io/v1, Kind=NetworkPolicy opendatahub/opendatahub: networkpolicies.networking.k8s.io "opendatahub" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion dscinitialization.opendatahub.io/v2 Kind DSCInitialization: no matches for kind "DSCInitialization" in version "dscinitialization.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-22T16:06:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T16:06:10Z" name: default-dsci.18a8b976019a5bac namespace: default resourceVersion: "14749" uid: 55e2726a-dd62-48af-ba6b-a940162188bf reason: DSCInitializationReconcileError reportingComponent: dscinitialization-controller reportingInstance: "" source: component: dscinitialization-controller type: Warning - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-22T16:06:12Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14906" uid: da26f43c-3737-4f41-a5bb-0472cb176aea kind: Event lastTimestamp: "2026-04-22T16:06:19Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-22T16:06:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T16:06:19Z" name: default-gateway.18a8b976b24f898f namespace: default resourceVersion: "15091" uid: da4f7e3f-d8df-4df1-b973-f564c35cd896 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-22T16:06:21Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15194" uid: f370a205-5b51-4984-bdcb-8eb913685bc8 kind: Event lastTimestamp: "2026-04-22T16:06:48Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:f370a205-5b51-4984-bdcb-8eb913685bc8 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:f370a205-5b51-4984-bdcb-8eb913685bc8]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-22T16:06:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T16:06:48Z" name: default-kserve.18a8b978bc4a2adb namespace: default resourceVersion: "16695" uid: fb7bfde2-6ce9-470b-9ad2-67f022cb5e93 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 25 eventTime: null firstTimestamp: "2026-04-22T16:06:11Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14765" uid: f277dc11-4470-4871-b574-743239b29be4 kind: Event lastTimestamp: "2026-04-22T16:06:46Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-22T16:06:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T16:06:46Z" name: default-monitoring.18a8b9765f6c4d25 namespace: default resourceVersion: "16579" uid: b518f3d4-16de-4217-91bd-a41f24480ce8 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e1b2d0596 namespace: default resourceVersion: "5838" uid: 1d49bc6f-8db2-40bf-8ec7-50f3ef2cad4b reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: 'Node ip-10-0-132-57.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e1cf7a5f1 namespace: default resourceVersion: "5843" uid: e61efe62-f1d2-4745-a692-c5429775ccb2 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: 'Node ip-10-0-132-57.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e1cf7f7a5 namespace: default resourceVersion: "5845" uid: d5ca75a4-ae7c-4c27-84a6-4915b921bebb reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: 'Node ip-10-0-132-57.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e1cf81ebd namespace: default resourceVersion: "5852" uid: 6f75c879-f166-4fcb-aa8b-badfca10f3dd reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e200bba66 namespace: default resourceVersion: "5842" uid: d6854e6b-e58f-4ab9-a673-e15695605251 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-57.ec2.internal resourceVersion: "5844" uid: de9d561a-ae07-4f58-8ac5-269c8319d91f kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e358ac1cb namespace: default resourceVersion: "5935" uid: 36056195-5430-486a-9dc9-a63d07bc7aa8 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-57.ec2.internal uid: de9d561a-ae07-4f58-8ac5-269c8319d91f kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: 'Node ip-10-0-132-57.ec2.internal event: Registered Node ip-10-0-132-57.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-132-57.ec2.internal.18a8b90e3e213e84 namespace: default resourceVersion: "5940" uid: ed2f349c-d10e-4d72-935c-962180392519 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:10Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-57.ec2.internal resourceVersion: "6609" uid: de9d561a-ae07-4f58-8ac5-269c8319d91f kind: Event lastTimestamp: "2026-04-22T15:59:10Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-57.ec2.internal, error getting gateway config for node ip-10-0-132-57.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-132-57.ec2.internal", failed to update chassis to local for local node ip-10-0-132-57.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-132-57.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-132-57.ec2.internal]' metadata: creationTimestamp: "2026-04-22T15:59:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-132-57 operation: Update time: "2026-04-22T15:59:10Z" name: ip-10-0-132-57.ec2.internal.18a8b91473ad02f9 namespace: default resourceVersion: "6614" uid: bb3c3681-9101-476f-8461-46c33ccab2d6 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:15Z" involvedObject: kind: Node name: ip-10-0-132-57.ec2.internal uid: ip-10-0-132-57.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:59:15Z" message: 'Node ip-10-0-132-57.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T15:59:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:59:15Z" name: ip-10-0-132-57.ec2.internal.18a8b9159c7a60fb namespace: default resourceVersion: "6734" uid: 32d3021a-5d2f-41c0-bf0e-0fc69baab487 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-132-57.ec2.internal source: component: kubelet host: ip-10-0-132-57.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T16:00:25Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-132-57.ec2.internal uid: de9d561a-ae07-4f58-8ac5-269c8319d91f kind: Event lastTimestamp: "2026-04-22T16:00:25Z" message: 'Node ip-10-0-132-57.ec2.internal event: Registered Node ip-10-0-132-57.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T16:00:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T16:00:25Z" name: ip-10-0-132-57.ec2.internal.18a8b925e4fcd306 namespace: default resourceVersion: "7882" uid: ef5ac860-889d-472b-b9c4-c61d7879f2e0 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:36Z" involvedObject: kind: Node name: ip-10-0-135-152.ec2.internal uid: ip-10-0-135-152.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:36Z" message: 'Node ip-10-0-135-152.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T15:58:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:36Z" name: ip-10-0-135-152.ec2.internal.18a8b90c6cdd4fd7 namespace: default resourceVersion: "5514" uid: 53227547-eb55-4049-b404-9ebb3c9e70bd reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-152.ec2.internal source: component: kubelet host: ip-10-0-135-152.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:36Z" involvedObject: kind: Node name: ip-10-0-135-152.ec2.internal uid: ip-10-0-135-152.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:36Z" message: 'Node ip-10-0-135-152.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T15:58:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:36Z" name: ip-10-0-135-152.ec2.internal.18a8b90c6cdddb2f namespace: default resourceVersion: "5515" uid: cf94eb06-d835-46fb-8e10-36f74bb37e47 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-152.ec2.internal source: component: kubelet host: ip-10-0-135-152.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:36Z" involvedObject: kind: Node name: ip-10-0-135-152.ec2.internal uid: ip-10-0-135-152.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:36Z" message: 'Node ip-10-0-135-152.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T15:58:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:36Z" name: ip-10-0-135-152.ec2.internal.18a8b90c6cde0cdc namespace: default resourceVersion: "5516" uid: ee4c402a-054b-4756-8ddd-9912aeb08a3b reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-152.ec2.internal source: component: kubelet host: ip-10-0-135-152.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:36Z" involvedObject: kind: Node name: ip-10-0-135-152.ec2.internal uid: ip-10-0-135-152.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:36Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T15:58:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:36Z" name: ip-10-0-135-152.ec2.internal.18a8b90c6f6b8530 namespace: default resourceVersion: "5462" uid: 5b510bfc-38f6-4c85-af37-dd865c935bfd reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-135-152.ec2.internal source: component: kubelet host: ip-10-0-135-152.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-152.ec2.internal resourceVersion: "5463" uid: d10eaa7d-1d5f-46c9-a3c2-3c630bb1636f kind: Event lastTimestamp: "2026-04-22T15:58:36Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T15:58:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T15:58:36Z" name: ip-10-0-135-152.ec2.internal.18a8b90c87fd9ec2 namespace: default resourceVersion: "5544" uid: 8bfc9399-72d3-437b-ac4f-381a329732d3 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:38Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-152.ec2.internal uid: d10eaa7d-1d5f-46c9-a3c2-3c630bb1636f kind: Event lastTimestamp: "2026-04-22T15:58:38Z" message: 'Node ip-10-0-135-152.ec2.internal event: Registered Node ip-10-0-135-152.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T15:58:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T15:58:38Z" name: ip-10-0-135-152.ec2.internal.18a8b90d140cfd38 namespace: default resourceVersion: "5634" uid: 95a1f2e8-e143-410d-a62b-5daa27d9b022 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:08Z" involvedObject: kind: Node name: ip-10-0-135-152.ec2.internal uid: ip-10-0-135-152.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:59:08Z" message: 'Node ip-10-0-135-152.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T15:59:08Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:59:08Z" name: ip-10-0-135-152.ec2.internal.18a8b9141703ec7f namespace: default resourceVersion: "6557" uid: fdffdcc9-5f4a-4660-98fc-c620cfe24871 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-152.ec2.internal source: component: kubelet host: ip-10-0-135-152.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T16:00:25Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-152.ec2.internal uid: d10eaa7d-1d5f-46c9-a3c2-3c630bb1636f kind: Event lastTimestamp: "2026-04-22T16:00:25Z" message: 'Node ip-10-0-135-152.ec2.internal event: Registered Node ip-10-0-135-152.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T16:00:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T16:00:25Z" name: ip-10-0-135-152.ec2.internal.18a8b925e4fdcaf6 namespace: default resourceVersion: "7888" uid: 76a247f2-0f9c-4d90-addb-420846f9d07f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d3c4da818 namespace: default resourceVersion: "5653" uid: b548e092-eab2-41c9-9871-e33461721061 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: 'Node ip-10-0-135-9.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d3e5b125c namespace: default resourceVersion: "5744" uid: c16b8924-2899-4ea6-9fe6-39beef59eff5 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: 'Node ip-10-0-135-9.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d3e5b798b namespace: default resourceVersion: "5746" uid: 5c26c9eb-a48b-49ff-9cca-bc2d471bc539 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: 'Node ip-10-0-135-9.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d3e5ba58f namespace: default resourceVersion: "5748" uid: c07de175-6bd3-4f9c-984e-de217f15c922 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d417122be namespace: default resourceVersion: "5658" uid: ab2d1e18-69e3-424c-a6a7-db8874ae095f reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:39Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-9.ec2.internal resourceVersion: "5659" uid: 83098061-6e6d-4035-8034-c9af75292fc2 kind: Event lastTimestamp: "2026-04-22T15:58:39Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T15:58:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T15:58:39Z" name: ip-10-0-135-9.ec2.internal.18a8b90d5471ac9e namespace: default resourceVersion: "5751" uid: d1b99ce0-c1f7-4125-a3ff-1c619259754c reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:58:43Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-9.ec2.internal uid: 83098061-6e6d-4035-8034-c9af75292fc2 kind: Event lastTimestamp: "2026-04-22T15:58:43Z" message: 'Node ip-10-0-135-9.ec2.internal event: Registered Node ip-10-0-135-9.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T15:58:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T15:58:43Z" name: ip-10-0-135-9.ec2.internal.18a8b90e3e20d3ad namespace: default resourceVersion: "5939" uid: 151cf5ed-5ffd-4cf7-91b4-bc41cabe5e2e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-9.ec2.internal resourceVersion: "6493" uid: 83098061-6e6d-4035-8034-c9af75292fc2 kind: Event lastTimestamp: "2026-04-22T15:59:06Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-135-9.ec2.internal, error getting gateway config for node ip-10-0-135-9.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-135-9.ec2.internal", failed to update chassis to local for local node ip-10-0-135-9.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-135-9.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-135-9.ec2.internal]' metadata: creationTimestamp: "2026-04-22T15:59:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-135-9 operation: Update time: "2026-04-22T15:59:06Z" name: ip-10-0-135-9.ec2.internal.18a8b913856ba85b namespace: default resourceVersion: "6501" uid: 8bf50132-5e62-4a07-b1e6-7c29c7f5329e reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:59:12Z" involvedObject: kind: Node name: ip-10-0-135-9.ec2.internal uid: ip-10-0-135-9.ec2.internal kind: Event lastTimestamp: "2026-04-22T15:59:12Z" message: 'Node ip-10-0-135-9.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T15:59:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T15:59:12Z" name: ip-10-0-135-9.ec2.internal.18a8b915065a9a3e namespace: default resourceVersion: "6675" uid: 9da19ebf-cbed-4eae-8f81-541a5874b657 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-135-9.ec2.internal source: component: kubelet host: ip-10-0-135-9.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T16:00:25Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-135-9.ec2.internal uid: 83098061-6e6d-4035-8034-c9af75292fc2 kind: Event lastTimestamp: "2026-04-22T16:00:25Z" message: 'Node ip-10-0-135-9.ec2.internal event: Registered Node ip-10-0-135-9.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T16:00:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T16:00:25Z" name: ip-10-0-135-9.ec2.internal.18a8b925e4fde5c2 namespace: default resourceVersion: "7898" uid: 0b65874e-6c66-447a-b2f4-b3d774ca627e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 9 eventTime: null firstTimestamp: "2026-04-22T16:07:21Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16994" uid: 9e46f543-a95c-4193-864c-c5e294ca3b69 kind: Event lastTimestamp: "2026-04-22T16:07:24Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-22T16:07:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-22T16:07:26Z" name: kuadrant-system.18a8b986d960366d namespace: default resourceVersion: "17239" uid: 60795a89-66df-40fd-b29a-455adbab3bc9 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-22T16:00:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T16:00:04Z" name: kube-system.18a8b920f2afb027 namespace: default resourceVersion: "7449" uid: 44786a5f-86b1-47be-9b06-3ef942c4da17 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-cc7c5f5bd-vxqkh type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-22T16:00:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T16:00:19Z" name: kube-system.18a8b92471202aef namespace: default resourceVersion: "7740" uid: 7cf258a1-9f98-46b8-8567-e93e68d25924 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-cc7c5f5bd-vxqkh type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-22T16:00:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T16:00:19Z" name: kube-system.18a8b924715178e8 namespace: default resourceVersion: "7741" uid: 4d9eef49-cbe4-4e4e-83f7-789c090422ea reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-cc7c5f5bd-vxqkh type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-22T16:00:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T16:00:19Z" name: kube-system.18a8b92471854ace namespace: default resourceVersion: "7742" uid: f8385cc7-442e-4318-9628-2674c64cbce7 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-cc7c5f5bd-vxqkh type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-22T16:01:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T16:01:19Z" name: kube-system.18a8b93269ac81d6 namespace: default resourceVersion: "10086" uid: 4ea55c86-7fbd-4beb-bd72-92392a92dfc5 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-cc7c5f5bd-vxqkh type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T15:53:13Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-22T15:53:13Z" message: readyz=true metadata: creationTimestamp: "2026-04-22T15:53:13Z" name: openshift-kube-apiserver.18a8b8c140650060 namespace: default resourceVersion: "274" uid: 27c29510-1b49-4df6-9234-300c189adf3a reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-6c7d45d88f-ndtpm type: Warning kind: EventList metadata: resourceVersion: "26866"