--- apiVersion: v1 items: - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-22T21:16:43Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Auth name: auth resourceVersion: "14779" uid: 515c32b1-f73c-4036-991e-2e0f65595d87 kind: Event lastTimestamp: "2026-04-22T21:16:49Z" message: 'failure deploying resource {map[apiVersion:rbac.authorization.k8s.io/v1 kind:Role metadata:map[annotations:map[platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:auth platform.opendatahub.io/instance.uid:515c32b1-f73c-4036-991e-2e0f65595d87 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1] labels:map[platform.opendatahub.io/part-of:auth] name:data-science-admingroup-role namespace:opendatahub ownerReferences:[map[apiVersion:services.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Auth name:auth uid:515c32b1-f73c-4036-991e-2e0f65595d87]]] rules:[map[apiGroups:[services.platform.opendatahub.io] resources:[auths] verbs:[get list watch patch update]] map[apiGroups:[services.opendatahub.io] resources:[auths/status] verbs:[get]] map[apiGroups:[infrastructure.opendatahub.io] resources:[hardwareprofiles] verbs:[create get list watch patch update delete]] map[apiGroups:[route.openshift.io] resources:[routes] verbs:[get list watch]] map[apiGroups:[batch] resources:[cronjobs] verbs:[get update watch]] map[apiGroups:[image.openshift.io] resources:[imagestreams] verbs:[create get list patch update delete watch]] map[apiGroups:[build.openshift.io] resources:[builds buildconfigs] verbs:[list get watch]] map[apiGroups:[apps] resources:[deployments] verbs:[patch update]] map[apiGroups:[opendatahub.io] resources:[odhdashboardconfigs] verbs:[get list watch create update patch]] map[apiGroups:[dashboard.opendatahub.io] resources:[odhapplications odhdocuments] verbs:[get list watch]] map[apiGroups:[console.openshift.io] resources:[odhquickstarts] verbs:[get list watch]] map[apiGroups:[template.openshift.io] resources:[templates] verbs:[get list watch create patch update delete]] map[apiGroups:[serving.kserve.io] resources:[servingruntimes] verbs:[create]] map[apiGroups:[nim.opendatahub.io] resources:[accounts] verbs:[watch update get list create patch delete]] map[apiGroups:[] resourceNames:[tier-to-group-mapping] resources:[configmaps] verbs:[get list watch patch update]]]]}: apply failed rbac.authorization.k8s.io/v1, Kind=Role: unable to patch rbac.authorization.k8s.io/v1, Kind=Role opendatahub/data-science-admingroup-role: roles.rbac.authorization.k8s.io "data-science-admingroup-role" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion services.platform.opendatahub.io/v1alpha1 Kind Auth: no matches for kind "Auth" in version "services.platform.opendatahub.io/v1alpha1"' metadata: creationTimestamp: "2026-04-22T21:16:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T21:16:49Z" name: auth.18a8ca68a0244533 namespace: default resourceVersion: "14968" uid: 53008826-de6e-4d07-9090-3b4135b8cc40 reason: ProvisioningError reportingComponent: auth reportingInstance: "" source: component: auth type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:45Z" involvedObject: kind: CertificateSigningRequest name: csr-bbqd7 kind: Event lastTimestamp: "2026-04-22T21:09:45Z" message: CSR "csr-bbqd7" has been approved metadata: creationTimestamp: "2026-04-22T21:09:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:45Z" name: csr-bbqd7.18a8ca072fa0f6da namespace: default resourceVersion: "6620" uid: c4c3681b-5cbd-4d74-a3a8-de0d7cfba782 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:38Z" involvedObject: kind: CertificateSigningRequest name: csr-fjsl6 kind: Event lastTimestamp: "2026-04-22T21:09:38Z" message: CSR "csr-fjsl6" has been approved metadata: creationTimestamp: "2026-04-22T21:09:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:38Z" name: csr-fjsl6.18a8ca05a41489ad namespace: default resourceVersion: "6412" uid: 77873145-2a99-441f-b5ce-902e65b2b3f6 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:27Z" involvedObject: kind: CertificateSigningRequest name: csr-g54j6 kind: Event lastTimestamp: "2026-04-22T21:09:27Z" message: CSR "csr-g54j6" has been approved metadata: creationTimestamp: "2026-04-22T21:09:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:27Z" name: csr-g54j6.18a8ca0317f09ace namespace: default resourceVersion: "6101" uid: 214b421c-cc5e-48a6-afe6-bcc94db265a0 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:51Z" involvedObject: kind: CertificateSigningRequest name: csr-j4rtn kind: Event lastTimestamp: "2026-04-22T21:09:51Z" message: CSR "csr-j4rtn" has been approved metadata: creationTimestamp: "2026-04-22T21:09:51Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:51Z" name: csr-j4rtn.18a8ca08aba19541 namespace: default resourceVersion: "6737" uid: e6bc0513-e18e-4b9f-8696-b109199138ed reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:34Z" involvedObject: kind: CertificateSigningRequest name: csr-jsshl kind: Event lastTimestamp: "2026-04-22T21:09:34Z" message: CSR "csr-jsshl" has been approved metadata: creationTimestamp: "2026-04-22T21:09:34Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:34Z" name: csr-jsshl.18a8ca0495c062eb namespace: default resourceVersion: "6251" uid: 971b92ad-5342-49f3-99ef-70ecc362f834 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:44Z" involvedObject: kind: CertificateSigningRequest name: csr-kqptm kind: Event lastTimestamp: "2026-04-22T21:09:44Z" message: CSR "csr-kqptm" has been approved metadata: creationTimestamp: "2026-04-22T21:09:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-22T21:09:44Z" name: csr-kqptm.18a8ca07220ec732 namespace: default resourceVersion: "6603" uid: a68430ca-c964-4372-94ea-8fe8c11589a7 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-22T21:16:43Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14710" uid: 33a0c71a-99ae-4750-b9c1-c500c113c203 kind: Event lastTimestamp: "2026-04-22T21:16:50Z" message: 'failed to create auth proxy secret: secrets "kube-auth-proxy-creds" is forbidden: cannot set blockOwnerDeletion in this case because cannot find RESTMapping for APIVersion services.platform.opendatahub.io/v1alpha1 Kind GatewayConfig: no matches for kind "GatewayConfig" in version "services.platform.opendatahub.io/v1alpha1"' metadata: creationTimestamp: "2026-04-22T21:16:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T21:16:50Z" name: default-gateway.18a8ca6892d83fa1 namespace: default resourceVersion: "14978" uid: da5d7f71-e372-4c61-bd05-70d52f92f1ea reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 4 eventTime: null firstTimestamp: "2026-04-22T21:16:55Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "15270" uid: 33a0c71a-99ae-4750-b9c1-c500c113c203 kind: Event lastTimestamp: "2026-04-22T21:17:05Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-22T21:16:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T21:17:05Z" name: default-gateway.18a8ca6b5a37f331 namespace: default resourceVersion: "15524" uid: 2a88bb0f-0e0d-4e79-97b2-585adcd6604b reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 27 eventTime: null firstTimestamp: "2026-04-22T21:16:53Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15122" uid: c5b97c2e-9948-41e9-a264-e6ec7ba8e1ea kind: Event lastTimestamp: "2026-04-22T21:31:01Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:c5b97c2e-9948-41e9-a264-e6ec7ba8e1ea platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:c5b97c2e-9948-41e9-a264-e6ec7ba8e1ea]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-22T21:16:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T21:31:01Z" name: default-kserve.18a8ca6ad4c3af39 namespace: default resourceVersion: "33347" uid: b1754052-b109-41fb-a6d4-a4f4ea1f8a3d reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 33 eventTime: null firstTimestamp: "2026-04-22T21:16:43Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14673" uid: 96c55e0d-0f7c-4dbe-a747-d8bba3961789 kind: Event lastTimestamp: "2026-04-22T21:34:13Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-22T21:16:43Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-22T21:34:13Z" name: default-monitoring.18a8ca688ec4b665 namespace: default resourceVersion: "36088" uid: ae1822b4-7db7-47c6-9b1f-220a4dbecb13 reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:03Z" involvedObject: kind: Node name: ip-10-0-134-137.ec2.internal uid: ip-10-0-134-137.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:03Z" message: 'Node ip-10-0-134-137.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T21:09:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:03Z" name: ip-10-0-134-137.ec2.internal.18a8c9fd5c9d16ff namespace: default resourceVersion: "5380" uid: 5611bf9d-1577-47f0-bb18-b574c54dd1e2 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-134-137.ec2.internal source: component: kubelet host: ip-10-0-134-137.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:03Z" involvedObject: kind: Node name: ip-10-0-134-137.ec2.internal uid: ip-10-0-134-137.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:03Z" message: 'Node ip-10-0-134-137.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T21:09:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:03Z" name: ip-10-0-134-137.ec2.internal.18a8c9fd5c9d5896 namespace: default resourceVersion: "5382" uid: 77a17075-1467-4659-8e1e-6f0aee137bc1 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-134-137.ec2.internal source: component: kubelet host: ip-10-0-134-137.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:03Z" involvedObject: kind: Node name: ip-10-0-134-137.ec2.internal uid: ip-10-0-134-137.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:03Z" message: 'Node ip-10-0-134-137.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T21:09:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:03Z" name: ip-10-0-134-137.ec2.internal.18a8c9fd5c9d7b19 namespace: default resourceVersion: "5387" uid: 5b7629a4-4024-4095-aa5f-16b5ada87f81 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-134-137.ec2.internal source: component: kubelet host: ip-10-0-134-137.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:03Z" involvedObject: kind: Node name: ip-10-0-134-137.ec2.internal uid: ip-10-0-134-137.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:03Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T21:09:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:03Z" name: ip-10-0-134-137.ec2.internal.18a8c9fd5f0e32e7 namespace: default resourceVersion: "5328" uid: a2851e78-4698-4e12-82c9-c366b3d72ed1 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-134-137.ec2.internal source: component: kubelet host: ip-10-0-134-137.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:03Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-137.ec2.internal resourceVersion: "5331" uid: 3eadc583-dc3b-4cbb-8b5e-62aa826b128d kind: Event lastTimestamp: "2026-04-22T21:09:03Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T21:09:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T21:09:03Z" name: ip-10-0-134-137.ec2.internal.18a8c9fd75274484 namespace: default resourceVersion: "5420" uid: 781f1887-2546-4266-827c-fe453d8511e7 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:06Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-137.ec2.internal uid: 3eadc583-dc3b-4cbb-8b5e-62aa826b128d kind: Event lastTimestamp: "2026-04-22T21:09:06Z" message: 'Node ip-10-0-134-137.ec2.internal event: Registered Node ip-10-0-134-137.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:09:06Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:09:06Z" name: ip-10-0-134-137.ec2.internal.18a8c9fe34116431 namespace: default resourceVersion: "5512" uid: 5e27e81b-9edb-4cac-a8be-833a0a21336f reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:29Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-137.ec2.internal resourceVersion: "6122" uid: 3eadc583-dc3b-4cbb-8b5e-62aa826b128d kind: Event lastTimestamp: "2026-04-22T21:09:29Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-137.ec2.internal, error getting gateway config for node ip-10-0-134-137.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-134-137.ec2.internal", failed to update chassis to local for local node ip-10-0-134-137.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-134-137.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-134-137.ec2.internal]' metadata: creationTimestamp: "2026-04-22T21:09:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-134-137 operation: Update time: "2026-04-22T21:09:29Z" name: ip-10-0-134-137.ec2.internal.18a8ca0375254468 namespace: default resourceVersion: "6130" uid: 9e388609-43aa-4b00-af5f-311d8ab0c5f1 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:35Z" involvedObject: kind: Node name: ip-10-0-134-137.ec2.internal uid: ip-10-0-134-137.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:35Z" message: 'Node ip-10-0-134-137.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T21:09:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:35Z" name: ip-10-0-134-137.ec2.internal.18a8ca04ed66c55e namespace: default resourceVersion: "6303" uid: 8ea9fe56-c2e5-44f7-b6cb-1ad166472c25 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-134-137.ec2.internal source: component: kubelet host: ip-10-0-134-137.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:11:35Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-134-137.ec2.internal uid: 3eadc583-dc3b-4cbb-8b5e-62aa826b128d kind: Event lastTimestamp: "2026-04-22T21:11:35Z" message: 'Node ip-10-0-134-137.ec2.internal event: Registered Node ip-10-0-134-137.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:11:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:11:35Z" name: ip-10-0-134-137.ec2.internal.18a8ca20e362fdf1 namespace: default resourceVersion: "8021" uid: a456dd95-6b20-40c9-881d-61ec7efa8dac reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:19Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:19Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-22T21:09:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:19Z" name: ip-10-0-136-20.ec2.internal.18a8ca013f7b33cb namespace: default resourceVersion: "5756" uid: 4155dc9d-067b-4a88-96b0-7f02cd622506 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:19Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:19Z" message: 'Node ip-10-0-136-20.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T21:09:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:20Z" name: ip-10-0-136-20.ec2.internal.18a8ca01410e6aac namespace: default resourceVersion: "5860" uid: d36479c7-c12f-4e4c-acf6-fe0bd0993bee reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:19Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:19Z" message: 'Node ip-10-0-136-20.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T21:09:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:20Z" name: ip-10-0-136-20.ec2.internal.18a8ca01410eb811 namespace: default resourceVersion: "5863" uid: f6c2038e-23e5-4c31-9559-3b38f3c37423 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:19Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:19Z" message: 'Node ip-10-0-136-20.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T21:09:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:20Z" name: ip-10-0-136-20.ec2.internal.18a8ca01410ef71e namespace: default resourceVersion: "5864" uid: 77d99167-ad4e-4b0f-8c8c-8831e64c4b68 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:19Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:19Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T21:09:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:19Z" name: ip-10-0-136-20.ec2.internal.18a8ca014404c085 namespace: default resourceVersion: "5762" uid: 4c18a91d-b206-4783-9184-99904b94c87e reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-20.ec2.internal resourceVersion: "5766" uid: 55cdbba5-9d87-417c-8ac2-fc6399cc6542 kind: Event lastTimestamp: "2026-04-22T21:09:20Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T21:09:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T21:09:20Z" name: ip-10-0-136-20.ec2.internal.18a8ca0159b25734 namespace: default resourceVersion: "5871" uid: 54407338-7b14-4cf2-ac11-68425f63e5b6 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:21Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-20.ec2.internal uid: 55cdbba5-9d87-417c-8ac2-fc6399cc6542 kind: Event lastTimestamp: "2026-04-22T21:09:21Z" message: 'Node ip-10-0-136-20.ec2.internal event: Registered Node ip-10-0-136-20.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:09:21Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:09:21Z" name: ip-10-0-136-20.ec2.internal.18a8ca01b2465c58 namespace: default resourceVersion: "5893" uid: 309d8aab-ce02-46c5-aaf7-8b7a271aab2a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:46Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-20.ec2.internal resourceVersion: "6665" uid: 55cdbba5-9d87-417c-8ac2-fc6399cc6542 kind: Event lastTimestamp: "2026-04-22T21:09:46Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-136-20.ec2.internal, error getting gateway config for node ip-10-0-136-20.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-136-20.ec2.internal", failed to update chassis to local for local node ip-10-0-136-20.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-136-20.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-136-20.ec2.internal]' metadata: creationTimestamp: "2026-04-22T21:09:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-136-20 operation: Update time: "2026-04-22T21:09:46Z" name: ip-10-0-136-20.ec2.internal.18a8ca078bb49f53 namespace: default resourceVersion: "6669" uid: d15ab342-0288-4b60-a7d8-af17edd5c4bc reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:53Z" involvedObject: kind: Node name: ip-10-0-136-20.ec2.internal uid: ip-10-0-136-20.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:53Z" message: 'Node ip-10-0-136-20.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T21:09:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:53Z" name: ip-10-0-136-20.ec2.internal.18a8ca0906dfa2fe namespace: default resourceVersion: "6758" uid: da8c5f49-9119-4c89-9189-9cb74e44f814 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-136-20.ec2.internal source: component: kubelet host: ip-10-0-136-20.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:11:35Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-20.ec2.internal uid: 55cdbba5-9d87-417c-8ac2-fc6399cc6542 kind: Event lastTimestamp: "2026-04-22T21:11:35Z" message: 'Node ip-10-0-136-20.ec2.internal event: Registered Node ip-10-0-136-20.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:11:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:11:35Z" name: ip-10-0-136-20.ec2.internal.18a8ca20e3641a03 namespace: default resourceVersion: "8028" uid: 902bd9aa-a295-4de3-9787-efc23381fc2a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:13Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:13Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-22T21:09:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:13Z" name: ip-10-0-143-252.ec2.internal.18a8c9ffd8da78ad namespace: default resourceVersion: "5550" uid: de0c3e58-e3b8-46ae-9720-71430ccf956b reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:13Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:13Z" message: 'Node ip-10-0-143-252.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-22T21:09:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:14Z" name: ip-10-0-143-252.ec2.internal.18a8c9ffdaabdebb namespace: default resourceVersion: "5647" uid: dd9d84d5-7461-4c68-84d8-03f71842b6e7 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:13Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:13Z" message: 'Node ip-10-0-143-252.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-22T21:09:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:14Z" name: ip-10-0-143-252.ec2.internal.18a8c9ffdaac7462 namespace: default resourceVersion: "5648" uid: dbfe7775-57ad-4e18-8bf7-4ef63594d12a reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-22T21:09:13Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:13Z" message: 'Node ip-10-0-143-252.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-22T21:09:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:14Z" name: ip-10-0-143-252.ec2.internal.18a8c9ffdaac9bef namespace: default resourceVersion: "5649" uid: aaa86c17-11bc-4b00-889c-fbb19b67863a reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:13Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:13Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-22T21:09:13Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:13Z" name: ip-10-0-143-252.ec2.internal.18a8c9ffdd6e4a21 namespace: default resourceVersion: "5555" uid: 00eaae23-15e4-4eca-8cd3-afe6d5a10f60 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:14Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-252.ec2.internal resourceVersion: "5557" uid: 07a2d55f-1725-4234-a5ed-bea7b187a9a6 kind: Event lastTimestamp: "2026-04-22T21:09:14Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-22T21:09:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-22T21:09:14Z" name: ip-10-0-143-252.ec2.internal.18a8c9fff2653bc1 namespace: default resourceVersion: "5656" uid: d457cad1-c9be-4d0d-836e-94e423eb8fd6 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:16Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-252.ec2.internal uid: 07a2d55f-1725-4234-a5ed-bea7b187a9a6 kind: Event lastTimestamp: "2026-04-22T21:09:16Z" message: 'Node ip-10-0-143-252.ec2.internal event: Registered Node ip-10-0-143-252.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:09:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:09:16Z" name: ip-10-0-143-252.ec2.internal.18a8ca0088396f4f namespace: default resourceVersion: "5717" uid: 808fe4d2-e2da-4be9-8240-53ec1f435667 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:40Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-252.ec2.internal resourceVersion: "6447" uid: 07a2d55f-1725-4234-a5ed-bea7b187a9a6 kind: Event lastTimestamp: "2026-04-22T21:09:40Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-252.ec2.internal, error getting gateway config for node ip-10-0-143-252.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-143-252.ec2.internal", failed to update chassis to local for local node ip-10-0-143-252.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-143-252.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-143-252.ec2.internal]' metadata: creationTimestamp: "2026-04-22T21:09:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-143-252 operation: Update time: "2026-04-22T21:09:40Z" name: ip-10-0-143-252.ec2.internal.18a8ca0600333f82 namespace: default resourceVersion: "6473" uid: 8e891b8a-430d-4016-8dd4-cd67541cd225 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:09:46Z" involvedObject: kind: Node name: ip-10-0-143-252.ec2.internal uid: ip-10-0-143-252.ec2.internal kind: Event lastTimestamp: "2026-04-22T21:09:46Z" message: 'Node ip-10-0-143-252.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-22T21:09:46Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-22T21:09:46Z" name: ip-10-0-143-252.ec2.internal.18a8ca077e69427b namespace: default resourceVersion: "6643" uid: faad5870-8cd1-458f-964b-d138af676618 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-143-252.ec2.internal source: component: kubelet host: ip-10-0-143-252.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:11:35Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-143-252.ec2.internal uid: 07a2d55f-1725-4234-a5ed-bea7b187a9a6 kind: Event lastTimestamp: "2026-04-22T21:11:35Z" message: 'Node ip-10-0-143-252.ec2.internal event: Registered Node ip-10-0-143-252.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-22T21:11:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-22T21:11:35Z" name: ip-10-0-143-252.ec2.internal.18a8ca20e3643993 namespace: default resourceVersion: "8033" uid: 7f37a022-1d77-47ff-89d0-deb9b2092d8a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-22T21:17:55Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "17104" uid: 77db6f16-853c-4914-bcfa-9374278f3de5 kind: Event lastTimestamp: "2026-04-22T21:17:59Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-22T21:17:55Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-22T21:18:00Z" name: kuadrant-system.18a8ca796db7cb73 namespace: default resourceVersion: "17319" uid: b2739702-e9d0-4f15-9a8a-40b741262d78 reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-22T21:11:16Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T21:11:16Z" name: kube-system.18a8ca1c769f8fec namespace: default resourceVersion: "7598" uid: fe7befcb-0ffd-4b1f-be9b-58540c983876 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-599f646696-kt62g type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-22T21:11:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T21:11:31Z" name: kube-system.18a8ca1ff503d711 namespace: default resourceVersion: "7911" uid: 310c1a58-c46b-4244-ac6d-f29e46611c27 reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-599f646696-kt62g type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-22T21:11:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T21:11:31Z" name: kube-system.18a8ca1ff531b308 namespace: default resourceVersion: "7914" uid: aa2de945-abd1-4204-baf8-4c4a560ac8ac reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-599f646696-kt62g type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-22T21:11:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T21:11:31Z" name: kube-system.18a8ca1ff57f43e5 namespace: default resourceVersion: "7915" uid: b59956ae-448e-476f-bdce-dd8253bc90c8 reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-599f646696-kt62g type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-22T21:12:31Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-22T21:12:31Z" name: kube-system.18a8ca2ded86819c namespace: default resourceVersion: "10014" uid: 1aba0d39-d8df-4515-b89a-45bbbc07f54c reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-599f646696-kt62g type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-22T21:04:26Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-22T21:04:26Z" message: readyz=true metadata: creationTimestamp: "2026-04-22T21:04:26Z" name: openshift-kube-apiserver.18a8c9bcdfa31212 namespace: default resourceVersion: "274" uid: a48620a8-473c-4199-9832-9f47d1d80567 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-948f8bb97-gmfxt type: Warning kind: EventList metadata: resourceVersion: "46253"