level=info ts=2026-02-16T13:38:04.760136717Z caller=main.go:205 msg="Starting Prometheus Operator" version="(version=0.75.2, branch=HEAD, revision=8555b1dd)" level=info ts=2026-02-16T13:38:04.760171514Z caller=main.go:206 build_context="(go=go1.22.12 (Red Hat 1.22.12-3.el9_5) X:loopvar,strictfipsruntime, platform=linux/amd64, user=root, date=20260128-10:16:26, tags=strictfipsruntime)" level=info ts=2026-02-16T13:38:04.760178419Z caller=main.go:207 feature_gates="PrometheusAgentDaemonSet=false" level=info ts=2026-02-16T13:38:04.760545538Z caller=cpu.go:28 msg="Leaving GOMAXPROCS=16: CPU quota undefined" level=info ts=2026-02-16T13:38:04.760575969Z caller=main.go:220 msg="namespaces filtering configuration " config="{allow_list=\"\",deny_list=\"\",prometheus_allow_list=\"openshift-monitoring\",alertmanager_allow_list=\"openshift-monitoring\",alertmanagerconfig_allow_list=\"\",thanosruler_allow_list=\"openshift-monitoring\"}" level=info ts=2026-02-16T13:38:04.777081364Z caller=main.go:254 msg="connection established" cluster-version=v1.30.14 level=warn ts=2026-02-16T13:38:04.791911701Z caller=main.go:79 msg="resource \"scrapeconfigs\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" level=info ts=2026-02-16T13:38:04.825484343Z caller=operator.go:361 component=prometheus-controller msg="Kubernetes API capabilities" endpointslices=true level=warn ts=2026-02-16T13:38:04.827684019Z caller=main.go:79 msg="resource \"prometheusagents\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" level=info ts=2026-02-16T13:38:04.888996699Z caller=server.go:298 msg="starting insecure server" address=127.0.0.1:8080 level=info ts=2026-02-16T13:38:04.989423979Z caller=operator.go:283 component=thanos-controller msg="successfully synced all caches" level=info ts=2026-02-16T13:38:05.090041872Z caller=operator.go:313 component=alertmanager-controller msg="successfully synced all caches" level=info ts=2026-02-16T13:38:05.090074511Z caller=operator.go:418 component=prometheus-controller msg="successfully synced all caches" level=info ts=2026-02-16T13:38:08.023933377Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.100222984Z caller=operator.go:691 component=alertmanager-controller msg="StatefulSet not found" key=openshift-monitoring/alertmanager-main level=info ts=2026-02-16T13:38:08.117380906Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.117515246Z caller=operator.go:691 component=alertmanager-controller msg="StatefulSet not found" key=openshift-monitoring/alertmanager-main level=info ts=2026-02-16T13:38:08.219397543Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.293604978Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.465268966Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.76691363Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.826989628Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.89328356Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:08.959296725Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:10.046550451Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:10.123270927Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:11.69769927Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:11.757035547Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:11.939713479Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:12.485443008Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:12.910945501Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:12.977747697Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:13.001455404Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:13.137141932Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:13.192871967Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:13.204664175Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:13.390232012Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:13.532308685Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:14.167532153Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:14.485793189Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:17.354272982Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:17.51123947Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:17.552435565Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:20.068431291Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:20.188663481Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:20.248305963Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:20.469575034Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:27.538230171Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:27.771145022Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:28.037260203Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:28.227563196Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:48.056927715Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:48.223576127Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:48.260681943Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:53.548433494Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:38:53.666756409Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:38:53.709232248Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:39:27.53387479Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:39:27.615388442Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:39:27.712226657Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:39:31.736434726Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:39:31.900129301Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:39:32.081527838Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=warn ts=2026-02-16T13:41:22.118594051Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.Alertmanager ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.11862407Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.StatefulSet ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118607651Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.ThanosRuler ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118610648Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.Prometheus ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118654309Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:409: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118662047Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PrometheusRule ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118674118Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.ServiceMonitor ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118650414Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PartialObjectMetadata ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118673636Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.Probe ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118698354Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PartialObjectMetadata ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118690284Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1alpha1.AlertmanagerConfig ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118711341Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PartialObjectMetadata ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.11871162Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.StatefulSet ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118701801Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:411: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118688373Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PrometheusRule ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118690316Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PartialObjectMetadata ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118728783Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/thanos/operator.go:328: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118687959Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:512: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118731728Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.StatefulSet ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118710624Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/informers/informers.go:118: watch of *v1.PodMonitor ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.118737558Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/thanos/operator.go:326: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=warn ts=2026-02-16T13:41:22.11874444Z caller=klog.go:118 component=k8s_client_runtime func=Warningf msg="github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:514: watch of *v1.Namespace ended with: an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" level=error ts=2026-02-16T13:41:22.118768104Z caller=klog.go:126 component=k8s_client_runtime func=ErrorDepth msg="status \"openshift-monitoring/k8s\" failed: failed to get prometheus status: failed to retrieve statefulset state: Get \"https://172.30.0.1:443/api/v1/namespaces/openshift-monitoring/pods?labelSelector=app.kubernetes.io%2Finstance%3Dk8s%2Capp.kubernetes.io%2Fmanaged-by%3Dprometheus-operator%2Capp.kubernetes.io%2Fname%3Dprometheus%2Coperator.prometheus.io%2Fname%3Dk8s%2Coperator.prometheus.io%2Fshard%3D0%2Cprometheus%3Dk8s\": http2: client connection lost" level=error ts=2026-02-16T13:41:22.118773129Z caller=controller.go:189 component=kubelet_endpoints kubelet_object=kube-system/kubelet msg="Failed to synchronize nodes" err="listing nodes failed: Get \"https://172.30.0.1:443/api/v1/nodes\": http2: client connection lost" level=info ts=2026-02-16T13:49:37.130956039Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:49:37.646733445Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:49:38.264831302Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:49:39.087929666Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:40.68479595Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:40.836128134Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:51:41.041269318Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:42.809719438Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:43.109786312Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:43.432989151Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:49.63457719Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:49.775065414Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:51:49.850307365Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:50.146061697Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:59.085722707Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:51:59.257964997Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:51:59.310662966Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:54:14.58076453Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:54:14.727106965Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:54:14.794116866Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:06.361719247Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:06.491697616Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:55:06.556820621Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:06.688052201Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:55:06.740815308Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:06.873361061Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:55:06.923385687Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:07.058097774Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:55:07.108258419Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:24.343192797Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:55:24.534708484Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:55:24.659849346Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:56:16.933073024Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:56:17.064263293Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:56:17.116777057Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:08.120206392Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:08.383485684Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:33.486099032Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:33.632369694Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:57:33.716383589Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:33.741042379Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:57:55.640975249Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:55.794969505Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T13:57:55.850763279Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T13:57:55.862789643Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:29:12.778638149Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T14:29:12.778645626Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:29:13.674944499Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:29:13.674957992Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T14:29:14.940009684Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T14:29:14.940009397Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:29:15.3016371Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:29:15.694928636Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T14:38:21.706229834Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus" level=info ts=2026-02-16T14:38:21.706230099Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:38:21.806542899Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:38:21.903087189Z caller=operator.go:572 component=alertmanager-controller key=openshift-monitoring/main msg="sync alertmanager" level=info ts=2026-02-16T14:38:21.944289829Z caller=operator.go:808 component=prometheus-controller key=openshift-monitoring/k8s msg="sync prometheus"