ts=2026-04-23T21:00:50.717692367Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:218 msg="Starting Prometheus Operator" version="(version=0.85.0, branch=HEAD, revision=2740c4be8)" build_context="(go=go1.24.6 (Red Hat 1.24.6-1.el9_6) X:strictfipsruntime, platform=linux/amd64, user=root, date=20251208-13:40:21, tags=strictfipsruntime)" feature_gates="PrometheusAgentDaemonSet=false,PrometheusShardRetentionPolicy=false,PrometheusTopologySharding=false,StatusForConfigurationResources=false" ts=2026-04-23T21:00:50.717729427Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:219 msg="Operator's configuration" watch_referenced_objects_in_all_namespaces=true controller_id=openshift-monitoring/prometheus-operator enable_config_reloader_probes=false ts=2026-04-23T21:00:50.717914361Z level=info caller=/go/src/github.com/coreos/prometheus-operator/internal/goruntime/cpu.go:27 msg="Leaving GOMAXPROCS=16: CPU quota undefined" ts=2026-04-23T21:00:50.717963033Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:233 msg="Namespaces filtering configuration " config="{allow_list=\"\",deny_list=\"\",prometheus_allow_list=\"openshift-monitoring\",alertmanager_allow_list=\"openshift-monitoring\",alertmanagerconfig_allow_list=\"\",thanosruler_allow_list=\"openshift-monitoring\"}" ts=2026-04-23T21:00:50.733349893Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:274 msg="connection established" kubernetes_version=1.33.6 ts=2026-04-23T21:00:50.746980029Z level=warn caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:81 msg="resource \"scrapeconfigs\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" ts=2026-04-23T21:00:50.747015318Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:359 msg="Kubernetes API capabilities" endpointslices=true ts=2026-04-23T21:00:50.77381666Z level=warn caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:81 msg="resource \"prometheusagents\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" ts=2026-04-23T21:00:50.824061426Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/kubelet/controller.go:214 msg="Starting controller" component=kubelet_endpoints kubelet_object=kube-system/kubelet ts=2026-04-23T21:00:50.824138619Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/server/server.go:293 msg="starting insecure server" address=127.0.0.1:8080 ts=2026-04-23T21:00:50.824132618Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.825426551Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.826352441Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.862719143Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-04-23T21:00:50.869816145Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-04-23T21:00:50.926369144Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926410504Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926418385Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926427441Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926432855Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926440719Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926446239Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926473424Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926481927Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926489808Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926494895Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926502614Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926508351Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926516586Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926522017Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926530317Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926535618Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926543882Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-04-23T21:00:50.926548901Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-04-23T21:00:50.926555431Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:439 msg="successfully synced all caches" component=prometheus-controller ts=2026-04-23T21:00:50.927287343Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927323826Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.927331288Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927340133Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.927345447Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927353522Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.927358805Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927366591Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.927374177Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927382812Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-04-23T21:00:50.927388129Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-04-23T21:00:50.927394228Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/thanos/operator.go:318 msg="successfully synced all caches" component=thanos-controller ts=2026-04-23T21:00:50.930723844Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.93075727Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930764914Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930774103Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930779771Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930787219Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930792536Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930799841Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930806258Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930818707Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930827229Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930835809Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-04-23T21:00:50.930841095Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-04-23T21:00:50.930847369Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:367 msg="successfully synced all caches" component=alertmanager-controller ts=2026-04-23T21:00:53.874733814Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:53.946264514Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:722 msg="StatefulSet not found" component=alertmanager-controller key=openshift-monitoring/alertmanager-main ts=2026-04-23T21:00:53.960958712Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.255268421Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.426268199Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.604092134Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.678733285Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.753889656Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.822024762Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:54.905221925Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:55.977650244Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:56.246698613Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:56.375081261Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:57.271041498Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:57.585761534Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:57.700809604Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:57.781497674Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:58.899380464Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:58.962754315Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:00:58.980199573Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:59.088433598Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:59.153828473Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:59.193882556Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:00:59.220243147Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:59.326016985Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:00:59.416535422Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:00:59.610810661Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:07.985887358Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:08.11324336Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:08.155691044Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:08.261554607Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:08.328509073Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:08.386210769Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:22.315594145Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:22.462719497Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:22.517528769Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:33.903480056Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:34.028824244Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:34.109459387Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:34.158088691Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:34.171899856Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:53.851742612Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:01:53.975053723Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:54.066962248Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:01:54.102766859Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:12.324536072Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:12.324555397Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:12.394331033Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:12.484495264Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:12.712513417Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:12.712636993Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:17.387693355Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:17.387700563Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:21.483499753Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:21.594590229Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:21.655777287Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:02:21.701058317Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-04-23T21:02:21.707847825Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-04-23T21:03:50.846118148Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-04-23T21:03:50.850304356Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-04-23T21:05:08.970005426Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PodMonitor err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970030509Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.ThanosRuler err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970050198Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Probe err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970054109Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/thanos/operator.go:368 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970055981Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PrometheusRule err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970079589Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970079994Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1alpha1.AlertmanagerConfig err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970094959Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.ServiceMonitor err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970093583Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Alertmanager err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970111568Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970005548Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:486 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970114738Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PrometheusRule err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970123403Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970031461Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970134977Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970144311Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:571 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970005668Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Prometheus err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970101126Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:573 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970135969Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970140321Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970157542Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:484 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970447574Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/thanos/operator.go:370 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-04-23T21:05:08.970579037Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding"