--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:25Z" involvedObject: kind: CertificateSigningRequest name: csr-5cmqz kind: Event lastTimestamp: "2026-04-16T23:26:25Z" message: CSR "csr-5cmqz" has been approved metadata: creationTimestamp: "2026-04-16T23:26:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:26:25Z" name: csr-5cmqz.18a6fa00f2236538 namespace: default resourceVersion: "6192" uid: 08aff821-fd74-435b-8a2e-66c195e92595 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:27:03Z" involvedObject: kind: CertificateSigningRequest name: csr-5jlzz kind: Event lastTimestamp: "2026-04-16T23:27:03Z" message: CSR "csr-5jlzz" has been approved metadata: creationTimestamp: "2026-04-16T23:27:03Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:27:03Z" name: csr-5jlzz.18a6fa09ff56b9fb namespace: default resourceVersion: "6847" uid: 18097c0a-6990-45e9-b1a6-52c024aa626e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:04Z" involvedObject: kind: CertificateSigningRequest name: csr-69vj4 kind: Event lastTimestamp: "2026-04-16T23:26:04Z" message: CSR "csr-69vj4" has been approved metadata: creationTimestamp: "2026-04-16T23:26:04Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:26:04Z" name: csr-69vj4.18a6f9fc43ae7517 namespace: default resourceVersion: "5803" uid: 661194b2-2676-4dc6-a254-ff034b085131 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:11Z" involvedObject: kind: CertificateSigningRequest name: csr-ldqcp kind: Event lastTimestamp: "2026-04-16T23:26:11Z" message: CSR "csr-ldqcp" has been approved metadata: creationTimestamp: "2026-04-16T23:26:11Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:26:11Z" name: csr-ldqcp.18a6f9fdba08b9d8 namespace: default resourceVersion: "5891" uid: f0bcdafc-f557-4560-9dad-06a12e35ec83 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:30Z" involvedObject: kind: CertificateSigningRequest name: csr-p44pb kind: Event lastTimestamp: "2026-04-16T23:26:30Z" message: CSR "csr-p44pb" has been approved metadata: creationTimestamp: "2026-04-16T23:26:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:26:30Z" name: csr-p44pb.18a6fa0244dbda44 namespace: default resourceVersion: "6288" uid: b9389d5d-2fe7-4c7a-808a-b884d0686c6e reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:58Z" involvedObject: kind: CertificateSigningRequest name: csr-s8c56 kind: Event lastTimestamp: "2026-04-16T23:26:58Z" message: CSR "csr-s8c56" has been approved metadata: creationTimestamp: "2026-04-16T23:26:58Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T23:26:58Z" name: csr-s8c56.18a6fa08cb6fc65c namespace: default resourceVersion: "6791" uid: f479c0d0-48a1-4bed-a26e-76cf3e4b5583 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:31:30Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T23:31:30Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T23:31:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T23:31:30Z" name: d3afc894-2a5a-4d4c-8175-5d9e806e642f namespace: default resourceVersion: "11725" uid: 627ed8bc-40c0-4fc5-ac53-782d08ac8341 reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:32:57Z" involvedObject: apiVersion: v1 kind: Endpoints name: data-science-gateway-data-science-gateway-class kind: Event lastTimestamp: "2026-04-16T23:32:57Z" message: 'Failed to create endpoint for service openshift-ingress/data-science-gateway-data-science-gateway-class: endpoints "data-science-gateway-data-science-gateway-class" already exists' metadata: creationTimestamp: "2026-04-16T23:32:57Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:32:57Z" name: data-science-gateway-data-science-gateway-class.18a6fa5c4299e731 namespace: default resourceVersion: "15355" uid: b10b4bf0-9564-49dc-af3f-285b2f76db79 reason: FailedToCreateEndpoint reportingComponent: endpoint-controller reportingInstance: "" source: component: endpoint-controller type: Warning - apiVersion: v1 count: 11 eventTime: null firstTimestamp: "2026-04-16T23:32:26Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14054" uid: 4933d7f5-56b1-4a9e-849d-bde4a0e6be37 kind: Event lastTimestamp: "2026-04-16T23:32:33Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T23:32:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:32:33Z" name: default-gateway.18a6fa551f5f5238 namespace: default resourceVersion: "14281" uid: ce93c63d-4c9c-491a-ba0f-2c0a2b3c411e reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T23:32:36Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "14423" uid: 9660457f-5298-480a-9876-5815ccadea84 kind: Event lastTimestamp: "2026-04-16T23:33:04Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:9660457f-5298-480a-9876-5815ccadea84 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:9660457f-5298-480a-9876-5815ccadea84]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T23:32:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:33:04Z" name: default-kserve.18a6fa5767527498 namespace: default resourceVersion: "15889" uid: c3fc91af-a5c7-4040-aba3-36430f008d9f reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 29 eventTime: null firstTimestamp: "2026-04-16T23:32:25Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "13915" uid: e920efd9-2a40-46bb-a2f8-63b4e3dd2e33 kind: Event lastTimestamp: "2026-04-16T23:49:33Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T23:32:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T23:49:33Z" name: default-monitoring.18a6fa54e81a70ec namespace: default resourceVersion: "34962" uid: f633f00e-0741-41a9-af6e-2547d58ee5fe reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:25:59Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:59Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T23:25:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:59Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb16d06220 namespace: default resourceVersion: "5570" uid: 6abe67e4-e1a8-4dc5-9140-55708b94f1a7 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:59Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:00Z" message: 'Node ip-10-0-131-43.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:25:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:00Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb18a7aa5d namespace: default resourceVersion: "5681" uid: fe56bd87-3992-49a6-80a6-ea662e4d7d99 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:59Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:00Z" message: 'Node ip-10-0-131-43.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:25:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:00Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb18a7f534 namespace: default resourceVersion: "5682" uid: 870be721-dbed-4bb7-b5c3-43501a6d5794 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:59Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:00Z" message: 'Node ip-10-0-131-43.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:25:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:00Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb18a81e09 namespace: default resourceVersion: "5683" uid: 7eb30893-9b2e-4563-80e5-fc2339dd84ec reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:25:59Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:59Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:25:59Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:59Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb1bb7f7cd namespace: default resourceVersion: "5576" uid: e1e9dffb-270a-4613-bfc9-7813b8f341cc reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:00Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-43.ec2.internal resourceVersion: "5585" uid: d10e400f-a7c0-467e-b41e-b880d41356cb kind: Event lastTimestamp: "2026-04-16T23:26:00Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:26:00Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:26:00Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb2f1d27d6 namespace: default resourceVersion: "5686" uid: 8102e005-b282-4e26-a141-aeb175187a09 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:01Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-43.ec2.internal uid: d10e400f-a7c0-467e-b41e-b880d41356cb kind: Event lastTimestamp: "2026-04-16T23:26:01Z" message: 'Node ip-10-0-131-43.ec2.internal event: Registered Node ip-10-0-131-43.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:26:01Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:26:01Z" name: ip-10-0-131-43.ec2.internal.18a6f9fb632b6960 namespace: default resourceVersion: "5742" uid: 5c9e7956-909a-47be-96dc-e92ac54ebd17 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:26Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-43.ec2.internal resourceVersion: "6212" uid: d10e400f-a7c0-467e-b41e-b880d41356cb kind: Event lastTimestamp: "2026-04-16T23:26:26Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-131-43.ec2.internal, error getting gateway config for node ip-10-0-131-43.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-131-43.ec2.internal", failed to update chassis to local for local node ip-10-0-131-43.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-131-43.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-131-43.ec2.internal]' metadata: creationTimestamp: "2026-04-16T23:26:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-131-43 operation: Update time: "2026-04-16T23:26:26Z" name: ip-10-0-131-43.ec2.internal.18a6fa014d2c4541 namespace: default resourceVersion: "6213" uid: c0fc0390-8e22-483f-917a-41ff131d6267 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-131-43.ec2.internal uid: ip-10-0-131-43.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:32Z" message: 'Node ip-10-0-131-43.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:32Z" name: ip-10-0-131-43.ec2.internal.18a6fa029efaae4e namespace: default resourceVersion: "6302" uid: 6511ae8d-7977-4ad1-954a-a3836789e6e0 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-131-43.ec2.internal source: component: kubelet host: ip-10-0-131-43.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:28:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-131-43.ec2.internal uid: d10e400f-a7c0-467e-b41e-b880d41356cb kind: Event lastTimestamp: "2026-04-16T23:28:33Z" message: 'Node ip-10-0-131-43.ec2.internal event: Registered Node ip-10-0-131-43.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:28:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:28:33Z" name: ip-10-0-131-43.ec2.internal.18a6fa1ef453c04e namespace: default resourceVersion: "8112" uid: 434138d5-2905-444a-8823-83e176f8d23b reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:39Z" involvedObject: kind: Node name: ip-10-0-136-147.ec2.internal uid: ip-10-0-136-147.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:40Z" message: 'Node ip-10-0-136-147.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:25:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:40Z" name: ip-10-0-136-147.ec2.internal.18a6f9f6722dfc78 namespace: default resourceVersion: "5257" uid: b146eb05-46e8-4799-94ad-c1805df9cb1e reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-136-147.ec2.internal source: component: kubelet host: ip-10-0-136-147.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:39Z" involvedObject: kind: Node name: ip-10-0-136-147.ec2.internal uid: ip-10-0-136-147.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:40Z" message: 'Node ip-10-0-136-147.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:25:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:40Z" name: ip-10-0-136-147.ec2.internal.18a6f9f6722e3eb6 namespace: default resourceVersion: "5258" uid: dc72a199-891f-49d8-a92b-f647fca3b891 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-136-147.ec2.internal source: component: kubelet host: ip-10-0-136-147.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:25:39Z" involvedObject: kind: Node name: ip-10-0-136-147.ec2.internal uid: ip-10-0-136-147.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:40Z" message: 'Node ip-10-0-136-147.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:25:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:40Z" name: ip-10-0-136-147.ec2.internal.18a6f9f6722e620b namespace: default resourceVersion: "5260" uid: 13aae6f5-4ac1-4140-9543-c7dd2d35f6d0 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-136-147.ec2.internal source: component: kubelet host: ip-10-0-136-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:25:39Z" involvedObject: kind: Node name: ip-10-0-136-147.ec2.internal uid: ip-10-0-136-147.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:25:39Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:25:39Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:25:39Z" name: ip-10-0-136-147.ec2.internal.18a6f9f67522601c namespace: default resourceVersion: "5225" uid: 7afc3f5a-bff3-472c-bae3-4cbb9539cd8a reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-136-147.ec2.internal source: component: kubelet host: ip-10-0-136-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:25:40Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-147.ec2.internal resourceVersion: "5226" uid: 4fe2dc3e-d34f-4335-8fe0-22a198e48990 kind: Event lastTimestamp: "2026-04-16T23:25:40Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:25:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:25:40Z" name: ip-10-0-136-147.ec2.internal.18a6f9f692a6cfea namespace: default resourceVersion: "5319" uid: ee61e488-2701-47a6-a9de-af23f9ac8ec2 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:25:41Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-147.ec2.internal uid: 4fe2dc3e-d34f-4335-8fe0-22a198e48990 kind: Event lastTimestamp: "2026-04-16T23:25:41Z" message: 'Node ip-10-0-136-147.ec2.internal event: Registered Node ip-10-0-136-147.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:25:41Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:25:41Z" name: ip-10-0-136-147.ec2.internal.18a6f9f6baedb404 namespace: default resourceVersion: "5343" uid: 36fa76a9-3f60-4a9b-869e-698aa08dbac1 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:12Z" involvedObject: kind: Node name: ip-10-0-136-147.ec2.internal uid: ip-10-0-136-147.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:12Z" message: 'Node ip-10-0-136-147.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:26:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:12Z" name: ip-10-0-136-147.ec2.internal.18a6f9fe172ddb81 namespace: default resourceVersion: "5901" uid: 727b1bcd-abd7-4079-a6fb-561559785ef6 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-136-147.ec2.internal source: component: kubelet host: ip-10-0-136-147.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:28:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-147.ec2.internal uid: 4fe2dc3e-d34f-4335-8fe0-22a198e48990 kind: Event lastTimestamp: "2026-04-16T23:28:33Z" message: 'Node ip-10-0-136-147.ec2.internal event: Registered Node ip-10-0-136-147.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:28:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:28:33Z" name: ip-10-0-136-147.ec2.internal.18a6fa1ef452a8a4 namespace: default resourceVersion: "8101" uid: bb7a2497-8b86-4d54-a70f-169b1fd2977a reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:32Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:32Z" name: ip-10-0-136-153.ec2.internal.18a6fa02c7e98667 namespace: default resourceVersion: "6329" uid: edc14307-9dc2-4bf9-a07b-3f2d1b6d7895 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:33Z" message: 'Node ip-10-0-136-153.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:33Z" name: ip-10-0-136-153.ec2.internal.18a6fa02c99971aa namespace: default resourceVersion: "6381" uid: 1cccb363-70f3-48f8-94c1-7b28b4e37ecc reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:33Z" message: 'Node ip-10-0-136-153.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:33Z" name: ip-10-0-136-153.ec2.internal.18a6fa02c999aabd namespace: default resourceVersion: "6382" uid: 03eee477-aa4c-4f0c-83a3-995634cc8e56 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:33Z" message: 'Node ip-10-0-136-153.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:33Z" name: ip-10-0-136-153.ec2.internal.18a6fa02c999ce92 namespace: default resourceVersion: "6383" uid: 6068273f-872a-4edc-94c1-ffb18db2b1c9 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:32Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:26:32Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T23:26:32Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:26:32Z" name: ip-10-0-136-153.ec2.internal.18a6fa02cc65420e namespace: default resourceVersion: "6336" uid: f316b4d1-d8b4-4551-89ac-3b97edfd2e25 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-153.ec2.internal resourceVersion: "6338" uid: 00bdef91-e4ba-4523-af57-63774f97658e kind: Event lastTimestamp: "2026-04-16T23:26:33Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T23:26:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T23:26:33Z" name: ip-10-0-136-153.ec2.internal.18a6fa02df2bcf01 namespace: default resourceVersion: "6392" uid: 81dca583-fe11-449c-bac5-34e12cbd409b reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:26:36Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-153.ec2.internal uid: 00bdef91-e4ba-4523-af57-63774f97658e kind: Event lastTimestamp: "2026-04-16T23:26:36Z" message: 'Node ip-10-0-136-153.ec2.internal event: Registered Node ip-10-0-136-153.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:26:36Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:26:36Z" name: ip-10-0-136-153.ec2.internal.18a6fa0389aa95ce namespace: default resourceVersion: "6516" uid: 239a52cd-7776-42ba-a030-a75e160da7de reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:27:05Z" involvedObject: kind: Node name: ip-10-0-136-153.ec2.internal uid: ip-10-0-136-153.ec2.internal kind: Event lastTimestamp: "2026-04-16T23:27:05Z" message: 'Node ip-10-0-136-153.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T23:27:05Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T23:27:05Z" name: ip-10-0-136-153.ec2.internal.18a6fa0a4f7761fa namespace: default resourceVersion: "6861" uid: fc2281dd-91dd-4d9d-82f0-c4b72faa7800 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-136-153.ec2.internal source: component: kubelet host: ip-10-0-136-153.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:28:33Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-136-153.ec2.internal uid: 00bdef91-e4ba-4523-af57-63774f97658e kind: Event lastTimestamp: "2026-04-16T23:28:33Z" message: 'Node ip-10-0-136-153.ec2.internal event: Registered Node ip-10-0-136-153.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T23:28:33Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T23:28:33Z" name: ip-10-0-136-153.ec2.internal.18a6fa1ef453a8f2 namespace: default resourceVersion: "8106" uid: 833a1958-5d70-42f5-bfb6-6183ddba5e63 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T23:33:37Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "16139" uid: 3263013d-0c50-4c7b-9c00-d6a2ca13cde8 kind: Event lastTimestamp: "2026-04-16T23:33:41Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T23:33:37Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T23:33:43Z" name: kuadrant-system.18a6fa65c18ecb1d namespace: default resourceVersion: "16382" uid: c0aa8c8b-b9b9-4b85-ac6b-bd891dfe04ae reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T23:28:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:28:15Z" name: kube-system.18a6fa1ac6dce2fb namespace: default resourceVersion: "7632" uid: ee687324-6f96-44d9-9578-107dff3d2327 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5f46c475db-q59n4 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T23:28:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:28:30Z" name: kube-system.18a6fa1e453f1f7b namespace: default resourceVersion: "7949" uid: 34595267-9b23-4bb7-8e76-a6c0cbdd577c reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5f46c475db-q59n4 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T23:28:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:28:30Z" name: kube-system.18a6fa1e4571d864 namespace: default resourceVersion: "7950" uid: 40a96a6c-a036-48f0-80ef-ea0662eeab0a reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5f46c475db-q59n4 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T23:28:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:28:30Z" name: kube-system.18a6fa1e45aa8946 namespace: default resourceVersion: "7952" uid: 699cb12f-3c63-458a-b8f2-8d30e70be50e reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5f46c475db-q59n4 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T23:29:30Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T23:29:30Z" name: kube-system.18a6fa2c3dbebea3 namespace: default resourceVersion: "10153" uid: 71774668-3364-49fb-84d3-bd017822735a reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5f46c475db-q59n4 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T23:21:23Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T23:21:23Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T23:21:23Z" name: openshift-kube-apiserver.18a6f9baba63a2a7 namespace: default resourceVersion: "274" uid: d0b44b9c-8d30-4d1b-abaa-47bbd7116108 reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-b9fb9fd98-tbjrm type: Warning kind: EventList metadata: resourceVersion: "46154"