--- apiVersion: v1 items: - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:37:35Z" involvedObject: apiVersion: operator.openshift.io/v1alpha1 kind: IstioCSR kind: Event lastTimestamp: "2026-04-16T14:37:35Z" message: controller is starting metadata: creationTimestamp: "2026-04-16T14:37:35Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: cert-manager-operator operation: Update time: "2026-04-16T14:37:35Z" name: 2105048a-56b8-4f7d-8aff-beed669dabf3 namespace: default resourceVersion: "12364" uid: 0f9be518-8b1c-490d-a388-a97ff3b1f69c reason: ControllerStarted reportingComponent: cert-manager-istio-csr-controller reportingInstance: "" source: component: cert-manager-istio-csr-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:19Z" involvedObject: kind: CertificateSigningRequest name: csr-5vbj2 kind: Event lastTimestamp: "2026-04-16T14:30:19Z" message: CSR "csr-5vbj2" has been approved metadata: creationTimestamp: "2026-04-16T14:30:19Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:19Z" name: csr-5vbj2.18a6dcbfcdc97783 namespace: default resourceVersion: "6593" uid: 79f4f592-d285-414e-b2ff-5cf85ddcf93c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:12Z" involvedObject: kind: CertificateSigningRequest name: csr-cdtg4 kind: Event lastTimestamp: "2026-04-16T14:30:12Z" message: CSR "csr-cdtg4" has been approved metadata: creationTimestamp: "2026-04-16T14:30:12Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:12Z" name: csr-cdtg4.18a6dcbe3bc0adf2 namespace: default resourceVersion: "6330" uid: 8873776c-0c37-4665-b67c-184cba2c4639 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:09Z" involvedObject: kind: CertificateSigningRequest name: csr-fzlb4 kind: Event lastTimestamp: "2026-04-16T14:30:09Z" message: CSR "csr-fzlb4" has been approved metadata: creationTimestamp: "2026-04-16T14:30:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:09Z" name: csr-fzlb4.18a6dcbd97184832 namespace: default resourceVersion: "6275" uid: f27bcf39-f2c0-4514-b234-17a363236ce2 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:15Z" involvedObject: kind: CertificateSigningRequest name: csr-p5529 kind: Event lastTimestamp: "2026-04-16T14:30:15Z" message: CSR "csr-p5529" has been approved metadata: creationTimestamp: "2026-04-16T14:30:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:15Z" name: csr-p5529.18a6dcbefcbcc793 namespace: default resourceVersion: "6489" uid: 173e0349-67f5-4968-9161-a00520c42e7d reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:25Z" involvedObject: kind: CertificateSigningRequest name: csr-r58qf kind: Event lastTimestamp: "2026-04-16T14:30:25Z" message: CSR "csr-r58qf" has been approved metadata: creationTimestamp: "2026-04-16T14:30:25Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:25Z" name: csr-r58qf.18a6dcc11e2d9fbb namespace: default resourceVersion: "6692" uid: 06469a11-02dc-465d-b8f3-94266fe1ab0c reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:07Z" involvedObject: kind: CertificateSigningRequest name: csr-t9xnd kind: Event lastTimestamp: "2026-04-16T14:30:07Z" message: CSR "csr-t9xnd" has been approved metadata: creationTimestamp: "2026-04-16T14:30:07Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ovnkube-identity operation: Update time: "2026-04-16T14:30:07Z" name: csr-t9xnd.18a6dcbd1e6ddbc4 namespace: default resourceVersion: "6223" uid: bfbe4236-17d2-4339-bd39-5a4f992bda08 reason: CSRApproved reportingComponent: ovnkube-csr-approver-controller reportingInstance: "" source: component: ovnkube-csr-approver-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:38:27Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14636" uid: a0e3bd84-df15-480e-bf29-73637bd8b4b7 kind: Event lastTimestamp: "2026-04-16T14:38:27Z" message: 'failed to create OAuth client: failed to get auth proxy secret openshift-ingress/kube-auth-proxy-creds: Secret "kube-auth-proxy-creds" not found' metadata: creationTimestamp: "2026-04-16T14:38:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T14:38:27Z" name: default-gateway.18a6dd3182ca41de namespace: default resourceVersion: "14640" uid: 6e69338e-99d6-4590-9362-b22559189aa6 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 12 eventTime: null firstTimestamp: "2026-04-16T14:38:29Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: GatewayConfig name: default-gateway resourceVersion: "14929" uid: a0e3bd84-df15-480e-bf29-73637bd8b4b7 kind: Event lastTimestamp: "2026-04-16T14:38:43Z" message: 'failed to lookup object openshift-ingress/data-science-tls-rule: no matches for kind "DestinationRule" in version "networking.istio.io/v1"' metadata: creationTimestamp: "2026-04-16T14:38:29Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T14:38:43Z" name: default-gateway.18a6dd31f1e367b6 namespace: default resourceVersion: "15441" uid: 37d22d8b-af49-4862-9698-0d917aebf3f9 reason: ProvisioningError reportingComponent: gatewayconfig reportingInstance: "" source: component: gatewayconfig type: Warning - apiVersion: v1 count: 13 eventTime: null firstTimestamp: "2026-04-16T14:38:38Z" involvedObject: apiVersion: components.platform.opendatahub.io/v1alpha1 kind: Kserve name: default-kserve resourceVersion: "15352" uid: 1d16a466-1b1e-4245-8e9f-98a0a63dc311 kind: Event lastTimestamp: "2026-04-16T14:39:03Z" message: "failure deploying resource {map[apiVersion:serving.kserve.io/v1alpha1 kind:LLMInferenceServiceConfig metadata:map[annotations:map[internal.config.kubernetes.io/previousKinds:LLMInferenceServiceConfig internal.config.kubernetes.io/previousNames:kserve-config-llm-decode-template internal.config.kubernetes.io/previousNamespaces:opendatahub platform.opendatahub.io/instance.generation:1 platform.opendatahub.io/instance.name:default-kserve platform.opendatahub.io/instance.uid:1d16a466-1b1e-4245-8e9f-98a0a63dc311 platform.opendatahub.io/type:Open Data Hub platform.opendatahub.io/version:3.4.0-ea.1 serving.kserve.io/well-known-config:true] labels:map[app.kubernetes.io/part-of:kserve app.opendatahub.io/kserve:true platform.opendatahub.io/part-of:kserve] name:v3-4-0-ea-1-kserve-config-llm-decode-template namespace:opendatahub ownerReferences:[map[apiVersion:components.platform.opendatahub.io/v1alpha1 blockOwnerDeletion:%!s(bool=true) controller:%!s(bool=true) kind:Kserve name:default-kserve uid:1d16a466-1b1e-4245-8e9f-98a0a63dc311]]] spec:map[template:map[containers:[map[args:[if [ \"$KSERVE_INFER_ROCE\" = \"true\" ]; then\n echo \"Trying to infer RoCE configs ... \"\n grep -H . /sys/class/infiniband/*/ports/*/gids/* 2>/dev/null\n grep -H . /sys/class/infiniband/*/ports/*/gid_attrs/types/* 2>/dev/null\n\n cat /proc/driver/nvidia/params\n\n \ KSERVE_INFER_IB_GID_INDEX_GREP=${KSERVE_INFER_IB_GID_INDEX_GREP:-\"RoCE v2\"}\n\n \ echo \"[Infer RoCE] Discovering active HCAs ...\"\n active_hcas=()\n # Loop through all mlx5 devices found in sysfs\n for hca_dir in /sys/class/infiniband/mlx5_*; do\n # Ensure it's a directory before proceeding\n if [ -d \"$hca_dir\" ]; then\n hca_name=$(basename \"$hca_dir\")\n port_state_file=\"$hca_dir/ports/1/state\" # Assume port 1\n type_file=\"$hca_dir/ports/1/gid_attrs/types/*\"\n\n \ echo \"[Infer RoCE] Check if the port state file ${port_state_file} exists and contains 'ACTIVE'\"\n if [ -f \"$port_state_file\" ] && grep -q \"ACTIVE\" \"$port_state_file\" && grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" ${type_file} 2>/dev/null; then\n echo \"[Infer RoCE] Found active HCA: $hca_name\"\n active_hcas+=(\"$hca_name\")\n else\n \ echo \"[Infer RoCE] Skipping inactive or down HCA: $hca_name\"\n \ fi\n fi\n done\n\n ucx_hcas=()\n for hca in \"${active_hcas[@]}\"; do\n ucx_hcas+=(\"${hca}:1\")\n done\n\n # Check if we found any active HCAs\n \ if [ ${#active_hcas[@]} -gt 0 ]; then\n # Join the array elements with a comma\n hcas=$(IFS=,; echo \"${active_hcas[*]}\")\n echo \"[Infer RoCE] Setting active HCAs: ${hcas}\"\n export NCCL_IB_HCA=${NCCL_IB_HCA:-${hcas}}\n \ export NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST:-${ucx_hcas}}\n export UCX_NET_DEVICES=${UCX_NET_DEVICES:-${ucx_hcas}}\n\n \ echo \"[Infer RoCE] NCCL_IB_HCA=${NCCL_IB_HCA}\"\n echo \"[Infer RoCE] NVSHMEM_HCA_LIST=${NVSHMEM_HCA_LIST}\"\n else\n echo \"[Infer RoCE] WARNING: No active RoCE HCAs found. NCCL_IB_HCA will not be set.\"\n fi\n\n if [ ${#active_hcas[@]} -gt 0 ]; then\n echo \"[Infer RoCE] Finding GID_INDEX for each active HCA (SR-IOV compatible)...\"\n\n # For SR-IOV environments, find the most common IPv4 RoCE v2 GID index across all HCAs\n declare -A gid_index_count\n declare -A hca_gid_index\n\n for hca_name in \"${active_hcas[@]}\"; do\n echo \"[Infer RoCE] Processing HCA: ${hca_name}\"\n\n # Find all RoCE v2 IPv4 GIDs for this HCA and count by index\n for tpath in /sys/class/infiniband/${hca_name}/ports/1/gid_attrs/types/*; do\n if grep -q \"${KSERVE_INFER_IB_GID_INDEX_GREP}\" \"$tpath\" 2>/dev/null; then\n idx=$(basename \"$tpath\")\n gid_file=\"/sys/class/infiniband/${hca_name}/ports/1/gids/${idx}\"\n \ # Check for IPv4 GID (contains ffff:)\n if [ -f \"$gid_file\" ] && grep -q \"ffff:\" \"$gid_file\"; then\n gid_value=$(cat \"$gid_file\" 2>/dev/null || echo \"\")\n echo \"[Infer RoCE] Found IPv4 RoCE v2 GID for ${hca_name}: index=${idx}, gid=${gid_value}\"\n hca_gid_index[\"${hca_name}\"]=\"${idx}\"\n \ gid_index_count[\"${idx}\"]=$((${gid_index_count[\"${idx}\"]} + 1))\n break # Use first found IPv4 GID per HCA\n fi\n \ fi\n done\n done\n\n # Find the most common GID index (most likely to be consistent across nodes)\n best_gid_index=\"\"\n \ max_count=0\n for idx in \"${!gid_index_count[@]}\"; do\n count=${gid_index_count[\"${idx}\"]}\n \ echo \"[Infer RoCE] GID_INDEX ${idx} found on ${count} HCAs\"\n if [ $count -gt $max_count ]; then\n max_count=$count\n best_gid_index=\"$idx\"\n \ fi\n done\n\n # Use deterministic fallback if counts are equal - prefer lower index number \n if [ ${#gid_index_count[@]} -gt 1 ]; then\n \ echo \"[Infer RoCE] Multiple GID indices found, selecting most common: ${best_gid_index}\"\n # If there's a tie, prefer index 3 as it's most common in SR-IOV setups\n if [ -n \"${gid_index_count['3']}\" ] && [ \"${gid_index_count['3']}\" -eq \"$max_count\" ]; then\n best_gid_index=\"3\"\n \ echo \"[Infer RoCE] Using deterministic fallback: GID_INDEX=3 (SR-IOV standard)\"\n fi\n fi\n\n # Check if GID_INDEX is already set via environment variables\n if [ -n \"${NCCL_IB_GID_INDEX}\" ]; then\n echo \"[Infer RoCE] Using pre-configured NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX} from environment\"\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n \ export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$NCCL_IB_GID_INDEX}\n echo \"[Infer RoCE] Using hardcoded GID_INDEX=${NCCL_IB_GID_INDEX} for NCCL, NVSHMEM, and UCX\"\n elif [ -n \"$best_gid_index\" ]; then\n echo \"[Infer RoCE] Selected GID_INDEX: ${best_gid_index} (found on ${max_count} HCAs)\"\n\n \ export NCCL_IB_GID_INDEX=${NCCL_IB_GID_INDEX:-$best_gid_index}\n export NVSHMEM_IB_GID_INDEX=${NVSHMEM_IB_GID_INDEX:-$best_gid_index}\n export UCX_IB_GID_INDEX=${UCX_IB_GID_INDEX:-$best_gid_index}\n\n echo \"[Infer RoCE] Exported GID_INDEX=${best_gid_index} for NCCL, NVSHMEM, and UCX\"\n else\n \ echo \"[Infer RoCE] ERROR: No valid IPv4 ${KSERVE_INFER_IB_GID_INDEX_GREP} GID_INDEX found on any HCA.\"\n fi\n else\n echo \"[Infer RoCE] No active HCAs found, skipping GID_INDEX inference.\"\n fi\nfi\n\neval \"vllm serve /mnt/models \\\n --served-model-name \"{{ .Spec.Model.Name }}\" \\\n --port 8001 \\\n ${VLLM_ADDITIONAL_ARGS} \\\n --enable-ssl-refresh \\\n --ssl-certfile /var/run/kserve/tls/tls.crt \\\n --ssl-keyfile /var/run/kserve/tls/tls.key\"] command:[/bin/bash -c] env:[map[name:HOME value:/home] map[name:VLLM_LOGGING_LEVEL value:INFO] map[name:HF_HUB_CACHE value:/models]] image:registry.redhat.io/rhaiis/vllm-cuda-rhel9@sha256:fc68d623d1bfc36c8cb2fe4a71f19c8578cfb420ce8ce07b20a02c1ee0be0cf3 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=120) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:main ports:[map[containerPort:%!s(int64=8001) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=60) httpGet:map[path:/health port:%!s(int64=8001) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true) seccompProfile:map[type:RuntimeDefault]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/home name:home] map[mountPath:/dev/shm name:dshm] map[mountPath:/models name:model-cache] map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] initContainers:[map[args:[--port=8000 --vllm-port=8001 --connector=nixlv2 --secure-proxy=true --cert-path=/var/run/kserve/tls --decoder-use-tls=true --prefiller-use-tls=true --enable-ssrf-protection=true --pool-group=inference.networking.x-k8s.io] env:[map[name:INFERENCE_POOL_NAMESPACE valueFrom:map[fieldRef:map[fieldPath:metadata.namespace]]] map[name:SSL_CERT_DIR value:/var/run/kserve/tls:/var/run/secrets/kubernetes.io/serviceaccount:/etc/pki/tls/certs]] image:quay.io/opendatahub/llm-d-routing-sidecar:release-v0.4 imagePullPolicy:IfNotPresent livenessProbe:map[failureThreshold:%!s(int64=3) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=10)] name:llm-d-routing-sidecar ports:[map[containerPort:%!s(int64=8000) protocol:TCP]] readinessProbe:map[failureThreshold:%!s(int64=10) httpGet:map[path:/health port:%!s(int64=8000) scheme:HTTPS] initialDelaySeconds:%!s(int64=10) periodSeconds:%!s(int64=10) timeoutSeconds:%!s(int64=5)] resources:map[] restartPolicy:Always securityContext:map[allowPrivilegeEscalation:%!s(bool=false) capabilities:map[drop:[ALL]] readOnlyRootFilesystem:%!s(bool=false) runAsNonRoot:%!s(bool=true)] terminationMessagePath:/dev/termination-log terminationMessagePolicy:FallbackToLogsOnError volumeMounts:[map[mountPath:/var/run/kserve/tls name:tls-certs readOnly:%!s(bool=true)]]]] terminationGracePeriodSeconds:%!s(int64=30) volumes:[map[emptyDir:map[] name:home] map[emptyDir:map[medium:Memory sizeLimit:1Gi] name:dshm] map[emptyDir:map[] name:model-cache] map[name:tls-certs secret:map[secretName:{{ ChildName .ObjectMeta.Name `-kserve-self-signed-certs` }}]]]]]]}: apply failed serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig: unable to patch serving.kserve.io/v1alpha1, Kind=LLMInferenceServiceConfig opendatahub/v3-4-0-ea-1-kserve-config-llm-decode-template: Internal error occurred: failed calling webhook \"llminferenceserviceconfig.kserve-webhook-server.validator\": failed to call webhook: Post \"https://kserve-webhook-server-service.opendatahub.svc:443/validate-serving-kserve-io-v1alpha1-llminferenceserviceconfig?timeout=10s\": no endpoints available for service \"kserve-webhook-server-service\"" metadata: creationTimestamp: "2026-04-16T14:38:38Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T14:39:03Z" name: default-kserve.18a6dd34073cda8c namespace: default resourceVersion: "16830" uid: 994b3156-95f4-4cf0-8d35-5ea4f5521d91 reason: ProvisioningError reportingComponent: kserve reportingInstance: "" source: component: kserve type: Warning - apiVersion: v1 count: 25 eventTime: null firstTimestamp: "2026-04-16T14:38:27Z" involvedObject: apiVersion: services.platform.opendatahub.io/v1alpha1 kind: Monitoring name: default-monitoring resourceVersion: "14591" uid: 21c4620d-7156-4ca0-9560-30219265806e kind: Event lastTimestamp: "2026-04-16T14:39:00Z" message: 'error fetching list of deployments: unable to list: opendatahub-monitoring because of unknown namespace for the cache' metadata: creationTimestamp: "2026-04-16T14:38:27Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: manager operation: Update time: "2026-04-16T14:39:00Z" name: default-monitoring.18a6dd317cc36331 namespace: default resourceVersion: "16305" uid: 44d3f9d2-1a54-4e68-a483-4f674a25405e reason: ProvisioningError reportingComponent: monitoring reportingInstance: "" source: component: monitoring type: Warning - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:44Z" involvedObject: kind: Node name: ip-10-0-128-173.ec2.internal uid: ip-10-0-128-173.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:44Z" message: 'Node ip-10-0-128-173.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T14:29:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:44Z" name: ip-10-0-128-173.ec2.internal.18a6dcb7b68cdf1e namespace: default resourceVersion: "5575" uid: 2fa993e4-e2f3-4720-9829-df3225d2993d reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-128-173.ec2.internal source: component: kubelet host: ip-10-0-128-173.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:44Z" involvedObject: kind: Node name: ip-10-0-128-173.ec2.internal uid: ip-10-0-128-173.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:44Z" message: 'Node ip-10-0-128-173.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T14:29:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:45Z" name: ip-10-0-128-173.ec2.internal.18a6dcb7b68d2974 namespace: default resourceVersion: "5576" uid: 08c0de55-1752-4943-90b6-2833c2cd5b9e reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-128-173.ec2.internal source: component: kubelet host: ip-10-0-128-173.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:44Z" involvedObject: kind: Node name: ip-10-0-128-173.ec2.internal uid: ip-10-0-128-173.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:44Z" message: 'Node ip-10-0-128-173.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T14:29:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:45Z" name: ip-10-0-128-173.ec2.internal.18a6dcb7b68d4fb2 namespace: default resourceVersion: "5578" uid: d7f8c420-1be7-4ead-8ef1-a28a2fde30f8 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-128-173.ec2.internal source: component: kubelet host: ip-10-0-128-173.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:44Z" involvedObject: kind: Node name: ip-10-0-128-173.ec2.internal uid: ip-10-0-128-173.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:44Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T14:29:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:44Z" name: ip-10-0-128-173.ec2.internal.18a6dcb7b92094ef namespace: default resourceVersion: "5481" uid: a58df631-b5b0-4f4c-b57e-06e357af4852 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-128-173.ec2.internal source: component: kubelet host: ip-10-0-128-173.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:45Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-173.ec2.internal resourceVersion: "5482" uid: 06087004-107c-4ed7-a7f2-01e53500aa43 kind: Event lastTimestamp: "2026-04-16T14:29:45Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T14:29:45Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T14:29:45Z" name: ip-10-0-128-173.ec2.internal.18a6dcb7ce56b4c6 namespace: default resourceVersion: "5583" uid: 56739e86-a50f-4b3f-ac1d-2b4996968fa7 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:49Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-173.ec2.internal uid: 06087004-107c-4ed7-a7f2-01e53500aa43 kind: Event lastTimestamp: "2026-04-16T14:29:49Z" message: 'Node ip-10-0-128-173.ec2.internal event: Registered Node ip-10-0-128-173.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:29:49Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:29:49Z" name: ip-10-0-128-173.ec2.internal.18a6dcb8c484d887 namespace: default resourceVersion: "5669" uid: 1dcb379e-d1bd-44dc-b66d-74bbd631b930 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:17Z" involvedObject: kind: Node name: ip-10-0-128-173.ec2.internal uid: ip-10-0-128-173.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:30:17Z" message: 'Node ip-10-0-128-173.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T14:30:17Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:30:17Z" name: ip-10-0-128-173.ec2.internal.18a6dcbf59dee494 namespace: default resourceVersion: "6534" uid: 4a064b1c-9106-4e57-8ece-cbe2eeb86fe4 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-128-173.ec2.internal source: component: kubelet host: ip-10-0-128-173.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:32:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-128-173.ec2.internal uid: 06087004-107c-4ed7-a7f2-01e53500aa43 kind: Event lastTimestamp: "2026-04-16T14:32:15Z" message: 'Node ip-10-0-128-173.ec2.internal event: Registered Node ip-10-0-128-173.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:32:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:32:15Z" name: ip-10-0-128-173.ec2.internal.18a6dcdad3031f22 namespace: default resourceVersion: "7964" uid: b9c5de53-5a4e-4ecf-8cd2-ccc80a1d6305 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:53Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:53Z" message: Starting kubelet. metadata: creationTimestamp: "2026-04-16T14:29:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:53Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9d9e97099 namespace: default resourceVersion: "5746" uid: c475c87a-989a-4cbe-8f88-65307b00c4d6 reason: Starting reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:53Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:54Z" message: 'Node ip-10-0-140-144.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T14:29:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:54Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9dbd88824 namespace: default resourceVersion: "5844" uid: bb22e0a1-5c13-47d5-ae2d-d08387ba05c5 reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:53Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:54Z" message: 'Node ip-10-0-140-144.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T14:29:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:54Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9dbd8e416 namespace: default resourceVersion: "5848" uid: 7a43081e-d168-4a0c-8292-0bd8f2ee4400 reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:53Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:54Z" message: 'Node ip-10-0-140-144.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T14:29:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:54Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9dbd90f56 namespace: default resourceVersion: "5851" uid: c2488a02-2a63-4b8e-9941-e41f4c1a9eaa reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:53Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:53Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T14:29:53Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:53Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9de90c08c namespace: default resourceVersion: "5751" uid: 248301b5-821f-4c94-b87e-7750faccc1a6 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-144.ec2.internal uid: dbc9ed74-ceb3-433d-8bfb-c1237addf952 kind: Event lastTimestamp: "2026-04-16T14:29:54Z" message: 'Node ip-10-0-140-144.ec2.internal event: Registered Node ip-10-0-140-144.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:29:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:29:54Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9ee91f535 namespace: default resourceVersion: "5810" uid: 4c67148c-f4ab-46f4-a3fe-d4eb1337db80 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:54Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-144.ec2.internal resourceVersion: "5752" uid: dbc9ed74-ceb3-433d-8bfb-c1237addf952 kind: Event lastTimestamp: "2026-04-16T14:29:54Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T14:29:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T14:29:54Z" name: ip-10-0-140-144.ec2.internal.18a6dcb9f3b42aca namespace: default resourceVersion: "5849" uid: 3d3595db-6474-4236-925b-8d2130180b0c reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:20Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-144.ec2.internal resourceVersion: "6621" uid: dbc9ed74-ceb3-433d-8bfb-c1237addf952 kind: Event lastTimestamp: "2026-04-16T14:30:20Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-140-144.ec2.internal, error getting gateway config for node ip-10-0-140-144.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-140-144.ec2.internal", failed to update chassis to local for local node ip-10-0-140-144.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-140-144.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-140-144.ec2.internal]' metadata: creationTimestamp: "2026-04-16T14:30:20Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-140-144 operation: Update time: "2026-04-16T14:30:20Z" name: ip-10-0-140-144.ec2.internal.18a6dcc029c5efa2 namespace: default resourceVersion: "6624" uid: c5e4ea1e-3393-4c43-9d95-c7e64931c753 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:26Z" involvedObject: kind: Node name: ip-10-0-140-144.ec2.internal uid: ip-10-0-140-144.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:30:26Z" message: 'Node ip-10-0-140-144.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T14:30:26Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:30:26Z" name: ip-10-0-140-144.ec2.internal.18a6dcc1609c6076 namespace: default resourceVersion: "6727" uid: 5d0519d2-c3f0-4c20-ac54-49f26b3d7365 reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-140-144.ec2.internal source: component: kubelet host: ip-10-0-140-144.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:32:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-140-144.ec2.internal uid: dbc9ed74-ceb3-433d-8bfb-c1237addf952 kind: Event lastTimestamp: "2026-04-16T14:32:15Z" message: 'Node ip-10-0-140-144.ec2.internal event: Registered Node ip-10-0-140-144.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:32:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:32:15Z" name: ip-10-0-140-144.ec2.internal.18a6dcdad30335a2 namespace: default resourceVersion: "7977" uid: c15e0993-000c-4bc2-ab65-18002fb37bdd reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:42Z" involvedObject: kind: Node name: ip-10-0-141-239.ec2.internal uid: ip-10-0-141-239.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:42Z" message: 'Node ip-10-0-141-239.ec2.internal status is now: NodeHasSufficientMemory' metadata: creationTimestamp: "2026-04-16T14:29:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:42Z" name: ip-10-0-141-239.ec2.internal.18a6dcb7268c7506 namespace: default resourceVersion: "5366" uid: 865d3a84-425c-40eb-8540-8bbd51a2145f reason: NodeHasSufficientMemory reportingComponent: kubelet reportingInstance: ip-10-0-141-239.ec2.internal source: component: kubelet host: ip-10-0-141-239.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:42Z" involvedObject: kind: Node name: ip-10-0-141-239.ec2.internal uid: ip-10-0-141-239.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:42Z" message: 'Node ip-10-0-141-239.ec2.internal status is now: NodeHasNoDiskPressure' metadata: creationTimestamp: "2026-04-16T14:29:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:42Z" name: ip-10-0-141-239.ec2.internal.18a6dcb7268cb6b6 namespace: default resourceVersion: "5367" uid: e991bd1a-50fd-498f-b3f0-af91157f4bbc reason: NodeHasNoDiskPressure reportingComponent: kubelet reportingInstance: ip-10-0-141-239.ec2.internal source: component: kubelet host: ip-10-0-141-239.ec2.internal type: Normal - apiVersion: v1 count: 6 eventTime: null firstTimestamp: "2026-04-16T14:29:42Z" involvedObject: kind: Node name: ip-10-0-141-239.ec2.internal uid: ip-10-0-141-239.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:42Z" message: 'Node ip-10-0-141-239.ec2.internal status is now: NodeHasSufficientPID' metadata: creationTimestamp: "2026-04-16T14:29:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:42Z" name: ip-10-0-141-239.ec2.internal.18a6dcb7268cde42 namespace: default resourceVersion: "5369" uid: 0ffcc2ea-a43d-448e-a027-9ab1be5f64d0 reason: NodeHasSufficientPID reportingComponent: kubelet reportingInstance: ip-10-0-141-239.ec2.internal source: component: kubelet host: ip-10-0-141-239.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:42Z" involvedObject: kind: Node name: ip-10-0-141-239.ec2.internal uid: ip-10-0-141-239.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:29:42Z" message: Updated Node Allocatable limit across pods metadata: creationTimestamp: "2026-04-16T14:29:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:29:42Z" name: ip-10-0-141-239.ec2.internal.18a6dcb7298a7871 namespace: default resourceVersion: "5319" uid: e7ebb6be-b64b-4630-8e1f-a0d47c259877 reason: NodeAllocatableEnforced reportingComponent: kubelet reportingInstance: ip-10-0-141-239.ec2.internal source: component: kubelet host: ip-10-0-141-239.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:42Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-239.ec2.internal resourceVersion: "5321" uid: 8dab1a35-308c-49df-b687-fdc08ebe6058 kind: Event lastTimestamp: "2026-04-16T14:29:42Z" message: Node synced successfully metadata: creationTimestamp: "2026-04-16T14:29:42Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: aws-cloud-controller-manager operation: Update time: "2026-04-16T14:29:42Z" name: ip-10-0-141-239.ec2.internal.18a6dcb73ff7a2f3 namespace: default resourceVersion: "5389" uid: bcc21690-a661-46f9-95fa-d9f975ef9371 reason: Synced reportingComponent: cloud-node-controller reportingInstance: "" source: component: cloud-node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:29:44Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-239.ec2.internal uid: 8dab1a35-308c-49df-b687-fdc08ebe6058 kind: Event lastTimestamp: "2026-04-16T14:29:44Z" message: 'Node ip-10-0-141-239.ec2.internal event: Registered Node ip-10-0-141-239.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:29:44Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:29:44Z" name: ip-10-0-141-239.ec2.internal.18a6dcb79a6c1ea6 namespace: default resourceVersion: "5470" uid: 66b522ff-e7ff-48c1-ae5d-f4d8ec61bfbf reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:09Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-239.ec2.internal resourceVersion: "6249" uid: 8dab1a35-308c-49df-b687-fdc08ebe6058 kind: Event lastTimestamp: "2026-04-16T14:30:09Z" message: '[k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-239.ec2.internal, error getting gateway config for node ip-10-0-141-239.ec2.internal: k8s.ovn.org/l3-gateway-config annotation not found for node "ip-10-0-141-239.ec2.internal", failed to update chassis to local for local node ip-10-0-141-239.ec2.internal, error: failed to parse node chassis-id for node - ip-10-0-141-239.ec2.internal, error: k8s.ovn.org/node-chassis-id annotation not found for node ip-10-0-141-239.ec2.internal]' metadata: creationTimestamp: "2026-04-16T14:30:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: ip-10-0-141-239 operation: Update time: "2026-04-16T14:30:09Z" name: ip-10-0-141-239.ec2.internal.18a6dcbd79d34028 namespace: default resourceVersion: "6255" uid: 86f0aef9-50bd-48b9-88aa-c604de176e02 reason: ErrorAddingResource reportingComponent: ovnk-controlplane reportingInstance: "" source: component: ovnk-controlplane type: Warning - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:30:14Z" involvedObject: kind: Node name: ip-10-0-141-239.ec2.internal uid: ip-10-0-141-239.ec2.internal kind: Event lastTimestamp: "2026-04-16T14:30:14Z" message: 'Node ip-10-0-141-239.ec2.internal status is now: NodeReady' metadata: creationTimestamp: "2026-04-16T14:30:14Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:reportingInstance: {} f:source: f:component: {} f:host: {} f:type: {} manager: kubelet operation: Update time: "2026-04-16T14:30:14Z" name: ip-10-0-141-239.ec2.internal.18a6dcbe8fe448cd namespace: default resourceVersion: "6383" uid: 19cf1935-f4f5-41b0-bc04-cdeecf3354da reason: NodeReady reportingComponent: kubelet reportingInstance: ip-10-0-141-239.ec2.internal source: component: kubelet host: ip-10-0-141-239.ec2.internal type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:32:15Z" involvedObject: apiVersion: v1 kind: Node name: ip-10-0-141-239.ec2.internal uid: 8dab1a35-308c-49df-b687-fdc08ebe6058 kind: Event lastTimestamp: "2026-04-16T14:32:15Z" message: 'Node ip-10-0-141-239.ec2.internal event: Registered Node ip-10-0-141-239.ec2.internal in Controller' metadata: creationTimestamp: "2026-04-16T14:32:15Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: kube-controller-manager operation: Update time: "2026-04-16T14:32:15Z" name: ip-10-0-141-239.ec2.internal.18a6dcdad302749e namespace: default resourceVersion: "7961" uid: e277dd06-fe39-4ab3-8eda-1db2994ed6c4 reason: RegisteredNode reportingComponent: node-controller reportingInstance: "" source: component: node-controller type: Normal - apiVersion: v1 count: 10 eventTime: null firstTimestamp: "2026-04-16T14:39:40Z" involvedObject: apiVersion: v1 kind: Namespace name: kuadrant-system resourceVersion: "17242" uid: ba4237d4-527f-427d-80d8-451de4275d44 kind: Event lastTimestamp: "2026-04-16T14:39:43Z" message: 'error using catalogsource kuadrant-system/kuadrant-operator-catalog: failed to list bundles: rpc error: code = Unavailable desc = connection error: desc = "error reading server preface: http2: frame too large"' metadata: creationTimestamp: "2026-04-16T14:39:40Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:count: {} f:firstTimestamp: {} f:involvedObject: {} f:lastTimestamp: {} f:message: {} f:reason: {} f:reportingComponent: {} f:source: f:component: {} f:type: {} manager: catalog operation: Update time: "2026-04-16T14:39:44Z" name: kuadrant-system.18a6dd425a78c151 namespace: default resourceVersion: "17460" uid: ef2ec6a5-739b-4ee3-b1a8-a360d08a9ddf reason: ResolutionFailed reportingComponent: operator-lifecycle-manager reportingInstance: "" source: component: operator-lifecycle-manager type: Warning - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Received signal to terminate, becoming unready, but keeping serving metadata: creationTimestamp: "2026-04-16T14:31:54Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T14:31:54Z" name: kube-system.18a6dcd5dea796fb namespace: default resourceVersion: "7576" uid: 49c2e91e-585e-466f-bf87-3a3702b94302 reason: TerminationStart reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5d8cbffb68-p8w52 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: The minimal shutdown duration of 15s finished metadata: creationTimestamp: "2026-04-16T14:32:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T14:32:09Z" name: kube-system.18a6dcd95d20885c namespace: default resourceVersion: "7844" uid: 1543c20b-9507-4865-be90-410d4558785e reason: TerminationMinimalShutdownDurationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5d8cbffb68-p8w52 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: Server has stopped listening metadata: creationTimestamp: "2026-04-16T14:32:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T14:32:09Z" name: kube-system.18a6dcd95d65125a namespace: default resourceVersion: "7845" uid: 56b78328-8bd4-477b-aaad-80c1e2a59ebe reason: TerminationStoppedServing reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5d8cbffb68-p8w52 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pre-shutdown hooks have been finished metadata: creationTimestamp: "2026-04-16T14:32:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T14:32:09Z" name: kube-system.18a6dcd95da882e5 namespace: default resourceVersion: "7846" uid: 49c83a9d-40ee-4993-8208-268777dba02d reason: TerminationPreShutdownHooksFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5d8cbffb68-p8w52 type: Normal - apiVersion: v1 eventTime: null firstTimestamp: null involvedObject: apiVersion: v1 kind: Namespace name: kube-system namespace: default kind: Event lastTimestamp: null message: All pending requests processed metadata: creationTimestamp: "2026-04-16T14:33:09Z" managedFields: - apiVersion: v1 fieldsType: FieldsV1 fieldsV1: f:involvedObject: {} f:message: {} f:reason: {} f:source: f:component: {} f:host: {} f:type: {} manager: openshift-apiserver operation: Update time: "2026-04-16T14:33:09Z" name: kube-system.18a6dce755c131fe namespace: default resourceVersion: "10168" uid: 3e0d3ae7-8278-4eb8-92e0-c2176a64e2e5 reason: TerminationGracefulTerminationFinished reportingComponent: "" reportingInstance: "" source: component: apiserver host: openshift-apiserver-5d8cbffb68-p8w52 type: Normal - apiVersion: v1 count: 1 eventTime: null firstTimestamp: "2026-04-16T14:24:57Z" involvedObject: apiVersion: v1 kind: Namespace name: openshift-kube-apiserver namespace: default kind: Event lastTimestamp: "2026-04-16T14:24:57Z" message: readyz=true metadata: creationTimestamp: "2026-04-16T14:24:57Z" name: openshift-kube-apiserver.18a6dc74dbe3a009 namespace: default resourceVersion: "277" uid: 85a4de0d-a432-4d60-a1b0-257cbf69c18b reason: KubeAPIReadyz reportingComponent: "" reportingInstance: "" source: component: apiserver host: kube-apiserver-5987f78478-rdr7v type: Warning kind: EventList metadata: resourceVersion: "29286"