--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:12:05Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-20T07:12:05Z" message: controller is starting metadata: creationTimestamp: "2026-04-20T07:12:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-20T07:12:05Z" name: 57694f54-9a12-461d-9bb2-9c901f3e0cb5 namespace: default resourceVersion: "12567" uid: df65263b-10ed-4c2a-af87-754bd6059df9 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:23Z" involvedObject: kind: CertificateSigningRequest name: csr-2jt8r kind: Event lastTimestamp: "2026-04-20T07:03:23Z" message: CSR "csr-2jt8r" has been approved metadata: creationTimestamp: "2026-04-20T07:03:23Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:23Z" name: csr-2jt8r.18a7feae8e5ccac0 namespace: default resourceVersion: "6842" uid: d4c6a695-e98d-4fc9-b036-c2e8035a4b43 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:10Z" involvedObject: kind: CertificateSigningRequest name: csr-8bpgq kind: Event lastTimestamp: "2026-04-20T07:03:10Z" message: CSR "csr-8bpgq" has been approved metadata: creationTimestamp: "2026-04-20T07:03:10Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:10Z" name: csr-8bpgq.18a7feab86314ca0 namespace: default resourceVersion: "6276" uid: 90c5a59f-f75b-484c-a98b-5db42bd89ac1 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:28Z" involvedObject: kind: CertificateSigningRequest name: csr-l7kdr kind: Event lastTimestamp: "2026-04-20T07:03:28Z" message: CSR "csr-l7kdr" has been approved metadata: creationTimestamp: "2026-04-20T07:03:28Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:28Z" name: csr-l7kdr.18a7feafab87843a namespace: default resourceVersion: "7018" uid: 7908bbef-46d2-4b2f-a9c5-cc29de232c9c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:11Z" involvedObject: kind: CertificateSigningRequest name: csr-q8p6m kind: Event lastTimestamp: "2026-04-20T07:03:11Z" message: CSR "csr-q8p6m" has been approved metadata: creationTimestamp: "2026-04-20T07:03:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:11Z" name: csr-q8p6m.18a7feabb8f3010f namespace: default resourceVersion: "6291" uid: 3f9cb21b-5da0-40c0-9c59-7dbf303c90c6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:16Z" involvedObject: kind: CertificateSigningRequest name: csr-sn8t5 kind: Event lastTimestamp: "2026-04-20T07:03:16Z" message: CSR "csr-sn8t5" has been approved metadata: creationTimestamp: "2026-04-20T07:03:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:16Z" name: csr-sn8t5.18a7feacfaa4e25b namespace: default resourceVersion: "6494" uid: 73918502-6c37-43df-9d6b-9d09fc5a4996 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:04Z" involvedObject: kind: CertificateSigningRequest name: csr-szdc7 kind: Event lastTimestamp: "2026-04-20T07:03:04Z" message: CSR "csr-szdc7" has been approved metadata: creationTimestamp: "2026-04-20T07:03:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-20T07:03:04Z" name: csr-szdc7.18a7feaa0bdacbee namespace: default resourceVersion: "6096" uid: 9a9fb775-df36-4573-a5e5-408694fc451a reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 2 eventTime: null firstTimestamp: "2026-04-20T07:13:01Z" involvedObject: apiVersion: datasciencecluster.opendatahub.io/v2 kind: DataScienceCluster name: default-dsc resourceVersion: "15033" uid: 4513f4f7-52f4-43e4-aa49-bd07476e16ad kind: Event lastTimestamp: "2026-04-20T07:13:01Z" message: 'failure deploying resource {map[apiVersion:components.platform.opendatahub.io/v1alpha1 kind:Kserve metadata:map[annotations:map[component.opendatahub.io/management-state:Managed platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-dsc platform.opendatahub.io/instance.uid:4513f4f7-52f4-43e4-aa49-bd07476e16ad platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1] creationTimestamp: labels:map[platform.opendatahub.io/part-of:datasciencecluster] name:default-kserve ownerReferences:[map[apiVersion:datasciencecluster.opendatahub.io/v2 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:DataScienceCluster name:default-dsc uid:4513f4f7-52f4-43e4-aa49-bd07476e16ad]]] spec:map[modelsAsService:map[managementState:Removed] nim:map[managementState:Managed] rawDeploymentServiceConfig:Headed] status:map[]]}: apply failed components.platform.opendatahub.io/v1alpha1, Kind=Kserve: unable to patch components.platform.opendatahub.io/v1alpha1, Kind=Kserve default-kserve: kserves.components.platform.opendatahub.io "default-kserve" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion datasciencecluster.opendatahub.io/v2 Kind DataScienceCluster: no matches for kind "DataScienceCluster" in version "datasciencecluster.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-20T07:13:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:13:01Z" name: default-dsc.18a7ff3510437885 namespace: default resourceVersion: "15057" uid: 810c6d84-0b16-4ef0-927c-9daad0bfceca reason: ProvisioningError reportingComponent: datasciencecluster reportingInstance: "" source: component: datasciencecluster type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:13:01Z" involvedObject: apiVersion: dscinitialization.opendatahub.io/v2 kind: DSCInitialization name: default-dsci resourceVersion: "14874" uid: 477506e1-5854-4d63-baf6-fdf90ff8fc9d kind: Event lastTimestamp: "2026-04-20T07:13:01Z" message: 'failed to create operator resources for instance default-dsci: unable to patch networking.k8s.io/v1, Kind=NetworkPolicy opendatahub/opendatahub: networkpolicies.networking.k8s.io "opendatahub" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion dscinitialization.opendatahub.io/v2 Kind DSCInitialization: no matches for kind "DSCInitialization" in version "dscinitialization.opendatahub.io/v2"' metadata: creationTimestamp: "2026-04-20T07:13:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:13:01Z" name: default-dsci.18a7ff34f9672179 namespace: default resourceVersion: "15050" uid: d90b13e1-a606-4f92-a8fe-c5d2b0cf3d09 reason: DSCInitializationReconcileError reportingComponent: dscinitialization-controller reportingInstance: "" source: component: dscinitialization-controller type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:13:01Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "15108" uid: df0aa0fe-4ef2-41f0-a75f-1c470dc1ce77 kind: Event lastTimestamp: "2026-04-20T07:13:01Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-20T07:13:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:13:01Z" name: default-gateway.18a7ff352d34106b namespace: default resourceVersion: "15110" uid: 1aced709-160f-40db-bbac-8c67aeddea09 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-20T07:13:04Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "15243" uid: df0aa0fe-4ef2-41f0-a75f-1c470dc1ce77 kind: Event lastTimestamp: "2026-04-20T07:13:11Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-20T07:13:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:13:11Z" name: default-gateway.18a7ff35b52879e4 namespace: default resourceVersion: "15464" uid: 04b1974c-2c9c-4e60-a1ef-02ef409845ad reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-20T07:13:12Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15549" uid: 0695bc67-b567-4ae1-be77-45e8911d436b kind: Event lastTimestamp: "2026-04-20T07:13:41Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:0695bc67-b567-4ae1-be77-45e8911d436b platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:0695bc67-b567-4ae1-be77-45e8911d436b]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-20T07:13:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:13:41Z" name: default-kserve.18a7ff379a98a8ab namespace: default resourceVersion: "16912" uid: 03724c72-1596-4b67-8e2a-58427563cd30 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 30 eventTime: null firstTimestamp: "2026-04-20T07:13:01Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "15082" uid: ea17793d-04b2-406d-b850-fcce4a2ff342 kind: Event lastTimestamp: "2026-04-20T07:30:09Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-20T07:13:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-20T07:30:09Z" name: default-monitoring.18a7ff35224919ac namespace: default resourceVersion: "34012" uid: 149633c7-387d-43b7-a366-83268a24156d reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:45Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:45Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-20T07:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:45Z" name: ip-10-0-130-105.ec2.internal.18a7fea5b185e848 namespace: default resourceVersion: "5432" uid: 5836a5e6-feb5-4d32-847b-b22531389148 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:45Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:45Z" message: 'Node ip-10-0-130-105.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:45Z" name: ip-10-0-130-105.ec2.internal.18a7fea5b34dd6ec namespace: default resourceVersion: "5519" uid: 8aaaf7f4-a436-4400-846f-1fcfffd07175 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:45Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:45Z" message: 'Node ip-10-0-130-105.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:46Z" name: ip-10-0-130-105.ec2.internal.18a7fea5b34e17df namespace: default resourceVersion: "5524" uid: 3a902dbc-ba44-4932-a40f-83c4d317b406 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:45Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:45Z" message: 'Node ip-10-0-130-105.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:46Z" name: ip-10-0-130-105.ec2.internal.18a7fea5b34e3ca7 namespace: default resourceVersion: "5528" uid: ab0a7cb9-f519-4b12-8e9c-dd54921eaa60 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:45Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:45Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:02:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:45Z" name: ip-10-0-130-105.ec2.internal.18a7fea5b60b573f namespace: default resourceVersion: "5436" uid: 2437a657-3896-4da8-b500-0971eeecc59e reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-105.ec2.internal resourceVersion: "5439" uid: f5f3a71a-3fd3-40be-a859-6b42cf338f3c kind: Event lastTimestamp: "2026-04-20T07:02:46Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:02:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:02:46Z" name: ip-10-0-130-105.ec2.internal.18a7fea5cbab1aaa namespace: default resourceVersion: "5531" uid: 02f8d55b-fe36-4630-b28d-14a14248d6a8 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-105.ec2.internal uid: f5f3a71a-3fd3-40be-a859-6b42cf338f3c kind: Event lastTimestamp: "2026-04-20T07:02:46Z" message: 'Node ip-10-0-130-105.ec2.internal event: Registered Node ip-10-0-130-105.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:02:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:02:46Z" name: ip-10-0-130-105.ec2.internal.18a7fea5e06e950f namespace: default resourceVersion: "5538" uid: e71db988-3e38-400f-97fb-6cdef4a21aa9 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:18Z" involvedObject: kind: Node name: ip-10-0-130-105.ec2.internal uid: ip-10-0-130-105.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:03:18Z" message: 'Node ip-10-0-130-105.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:03:18Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:03:18Z" name: ip-10-0-130-105.ec2.internal.18a7fead58cd0e44 namespace: default resourceVersion: "6571" uid: 773f4eeb-612a-40f4-9f0e-ea632803b0d6 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-130-105.ec2.internal source: component: kubelet host: ip-10-0-130-105.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-130-105.ec2.internal uid: f5f3a71a-3fd3-40be-a859-6b42cf338f3c kind: Event lastTimestamp: "2026-04-20T07:03:49Z" message: 'Node ip-10-0-130-105.ec2.internal event: Registered Node ip-10-0-130-105.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:03:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:03:49Z" name: ip-10-0-130-105.ec2.internal.18a7feb49933c8f4 namespace: default resourceVersion: "7538" uid: 8c8147ea-79e7-4588-aaef-caecd6f8422b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:57Z" involvedObject: kind: Node name: ip-10-0-138-178.ec2.internal uid: ip-10-0-138-178.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:57Z" message: 'Node ip-10-0-138-178.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:02:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:57Z" name: ip-10-0-138-178.ec2.internal.18a7fea87ac10f66 namespace: default resourceVersion: "5846" uid: 44a1eb1f-db5a-42f3-97b4-a9d9230ab58a reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-138-178.ec2.internal source: component: kubelet host: ip-10-0-138-178.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:57Z" involvedObject: kind: Node name: ip-10-0-138-178.ec2.internal uid: ip-10-0-138-178.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:57Z" message: 'Node ip-10-0-138-178.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:02:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:57Z" name: ip-10-0-138-178.ec2.internal.18a7fea87ac17a21 namespace: default resourceVersion: "5857" uid: e67cb08d-4e93-411d-baf8-ec2da0a9a678 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-138-178.ec2.internal source: component: kubelet host: ip-10-0-138-178.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:57Z" involvedObject: kind: Node name: ip-10-0-138-178.ec2.internal uid: ip-10-0-138-178.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:57Z" message: 'Node ip-10-0-138-178.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:02:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:58Z" name: ip-10-0-138-178.ec2.internal.18a7fea87ac19f21 namespace: default resourceVersion: "5863" uid: 62970695-df26-45ac-8815-e38e999eca6b reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-138-178.ec2.internal source: component: kubelet host: ip-10-0-138-178.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:57Z" involvedObject: kind: Node name: ip-10-0-138-178.ec2.internal uid: ip-10-0-138-178.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:57Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:02:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:57Z" name: ip-10-0-138-178.ec2.internal.18a7fea87d84940e namespace: default resourceVersion: "5766" uid: dde747bb-9da2-488f-8bba-64b88b3d2731 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-138-178.ec2.internal source: component: kubelet host: ip-10-0-138-178.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:58Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-178.ec2.internal resourceVersion: "5771" uid: a553389b-6f5f-41b6-82ee-a256b8bf1c71 kind: Event lastTimestamp: "2026-04-20T07:02:58Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:02:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:02:58Z" name: ip-10-0-138-178.ec2.internal.18a7fea893c1aa42 namespace: default resourceVersion: "5865" uid: 89b61dde-9576-4fb2-b473-92c158a7794b reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-178.ec2.internal uid: a553389b-6f5f-41b6-82ee-a256b8bf1c71 kind: Event lastTimestamp: "2026-04-20T07:03:01Z" message: 'Node ip-10-0-138-178.ec2.internal event: Registered Node ip-10-0-138-178.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:03:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:03:01Z" name: ip-10-0-138-178.ec2.internal.18a7fea95eafbce7 namespace: default resourceVersion: "6014" uid: b3315585-7e98-4c89-8021-e70f4d2d520e reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-138-178.ec2.internal uid: a553389b-6f5f-41b6-82ee-a256b8bf1c71 kind: Event lastTimestamp: "2026-04-20T07:03:49Z" message: 'Node ip-10-0-138-178.ec2.internal event: Registered Node ip-10-0-138-178.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:03:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:03:49Z" name: ip-10-0-138-178.ec2.internal.18a7feb49934bcdf namespace: default resourceVersion: "7559" uid: 9fff7859-794c-42d4-98e5-63bb274484ee reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:50Z" involvedObject: kind: Node name: ip-10-0-138-178.ec2.internal uid: ip-10-0-138-178.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:03:50Z" message: 'Node ip-10-0-138-178.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:03:50Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:03:50Z" name: ip-10-0-138-178.ec2.internal.18a7feb4e8e97185 namespace: default resourceVersion: "7764" uid: b542c6fb-9de4-4680-9d80-436fb263372b reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-138-178.ec2.internal source: component: kubelet host: ip-10-0-138-178.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:37Z" involvedObject: kind: Node name: ip-10-0-142-100.ec2.internal uid: ip-10-0-142-100.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:38Z" message: 'Node ip-10-0-142-100.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-20T07:02:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:38Z" name: ip-10-0-142-100.ec2.internal.18a7fea3e41d8c6f namespace: default resourceVersion: "5275" uid: a18bae93-8ebc-4bbf-90b4-e9b7ed544c9c reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-142-100.ec2.internal source: component: kubelet host: ip-10-0-142-100.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:37Z" involvedObject: kind: Node name: ip-10-0-142-100.ec2.internal uid: ip-10-0-142-100.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:38Z" message: 'Node ip-10-0-142-100.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-20T07:02:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:38Z" name: ip-10-0-142-100.ec2.internal.18a7fea3e41dcfba namespace: default resourceVersion: "5281" uid: 72f315a3-6a0f-4898-8db1-cda0a7445f1c reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-142-100.ec2.internal source: component: kubelet host: ip-10-0-142-100.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-20T07:02:37Z" involvedObject: kind: Node name: ip-10-0-142-100.ec2.internal uid: ip-10-0-142-100.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:38Z" message: 'Node ip-10-0-142-100.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-20T07:02:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:38Z" name: ip-10-0-142-100.ec2.internal.18a7fea3e41df241 namespace: default resourceVersion: "5289" uid: ee7a6489-4ce7-42be-9060-bfed18a58204 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-142-100.ec2.internal source: component: kubelet host: ip-10-0-142-100.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:37Z" involvedObject: kind: Node name: ip-10-0-142-100.ec2.internal uid: ip-10-0-142-100.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:02:37Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-20T07:02:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:02:37Z" name: ip-10-0-142-100.ec2.internal.18a7fea3e742b3b5 namespace: default resourceVersion: "5230" uid: 436b5bd8-8652-4cb6-861f-28e3276f64c9 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-142-100.ec2.internal source: component: kubelet host: ip-10-0-142-100.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:38Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-100.ec2.internal resourceVersion: "5231" uid: 59d11bb3-3809-4437-b261-15ca4adeff4c kind: Event lastTimestamp: "2026-04-20T07:02:38Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-20T07:02:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-20T07:02:38Z" name: ip-10-0-142-100.ec2.internal.18a7fea402cb45ea namespace: default resourceVersion: "5297" uid: 55a6e7d1-3f72-439e-ae7d-ebc4236e10cc reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:02:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-100.ec2.internal uid: 59d11bb3-3809-4437-b261-15ca4adeff4c kind: Event lastTimestamp: "2026-04-20T07:02:41Z" message: 'Node ip-10-0-142-100.ec2.internal event: Registered Node ip-10-0-142-100.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:02:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:02:41Z" name: ip-10-0-142-100.ec2.internal.18a7fea4b65b4bcd namespace: default resourceVersion: "5395" uid: 9d523b07-879b-4748-94c6-ff00dcbcf972 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:12Z" involvedObject: kind: Node name: ip-10-0-142-100.ec2.internal uid: ip-10-0-142-100.ec2.internal kind: Event lastTimestamp: "2026-04-20T07:03:12Z" message: 'Node ip-10-0-142-100.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-20T07:03:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-20T07:03:12Z" name: ip-10-0-142-100.ec2.internal.18a7feabe06e1863 namespace: default resourceVersion: "6311" uid: 12241e1b-68c1-40a2-8d4f-8c2391338e4a reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-142-100.ec2.internal source: component: kubelet host: ip-10-0-142-100.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T07:03:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-142-100.ec2.internal uid: 59d11bb3-3809-4437-b261-15ca4adeff4c kind: Event lastTimestamp: "2026-04-20T07:03:49Z" message: 'Node ip-10-0-142-100.ec2.internal event: Registered Node ip-10-0-142-100.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-20T07:03:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-20T07:03:49Z" name: ip-10-0-142-100.ec2.internal.18a7feb4a3ddac51 namespace: default resourceVersion: "7575" uid: ad195b30-84b0-4242-b693-6a2be48fcb72 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-20T07:14:13Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "17175" uid: 3fb759f5-8726-4f1a-aaa0-892a6027bb0c kind: Event lastTimestamp: "2026-04-20T07:14:16Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-20T07:14:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-20T07:14:18Z" name: kuadrant-system.18a7ff45dc071750 namespace: default resourceVersion: "17411" uid: 5a67b3b5-4ae0-40b0-92cf-cb9a34a7a59d reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-20T07:03:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:03:26Z" name: kube-system.18a7feaf42e894fb namespace: default resourceVersion: "6982" uid: d27640d7-4952-4b96-b48c-adba33605af7 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6745c9f984-whf87 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-20T07:03:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:03:41Z" name: kube-system.18a7feb2c1524c21 namespace: default resourceVersion: "7341" uid: 42d68496-fa26-4da5-9150-1d422ecc03c0 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6745c9f984-whf87 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-20T07:03:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:03:41Z" name: kube-system.18a7feb2c19c7a92 namespace: default resourceVersion: "7342" uid: 1d8b94c1-973e-4327-9000-0a2a435b3e3b reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6745c9f984-whf87 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-20T07:03:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:03:41Z" name: kube-system.18a7feb2c1d9b63b namespace: default resourceVersion: "7343" uid: 0523187c-fcb1-4ee1-9014-7047c43809f4 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6745c9f984-whf87 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-20T07:04:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-20T07:04:41Z" name: kube-system.18a7fec0b9f335b8 namespace: default resourceVersion: "9661" uid: 42981314-86d0-4d5b-9493-5a961666b0f8 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-6745c9f984-whf87 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-20T06:58:33Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-20T06:58:33Z" message: readyz=true metadata: creationTimestamp: "2026-04-20T06:58:33Z" name: openshift-kube-apiserver.18a7fe6b080fd8d1 namespace: default resourceVersion: "274" uid: 9c392634-3875-4427-a380-e07553a9fa70 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-6585c95c9d-ncpgl type: Warning kind: EventList metadata: resourceVersion: "43131"