ts=2026-03-19T14:00:45.198574978Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:218 msg="Starting Prometheus Operator" version="(version=0.85.0, branch=HEAD, revision=2740c4be8)" build_context="(go=go1.24.6 (Red Hat 1.24.6-1.el9_6) X:strictfipsruntime, platform=linux/amd64, user=root, date=20251208-13:40:21, tags=strictfipsruntime)" feature_gates="PrometheusAgentDaemonSet=false,PrometheusShardRetentionPolicy=false,PrometheusTopologySharding=false,StatusForConfigurationResources=false" ts=2026-03-19T14:00:45.1989052Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:219 msg="Operator's configuration" watch_referenced_objects_in_all_namespaces=true controller_id=openshift-monitoring/prometheus-operator enable_config_reloader_probes=false ts=2026-03-19T14:00:45.199476081Z level=info caller=/go/src/github.com/coreos/prometheus-operator/internal/goruntime/cpu.go:27 msg="Leaving GOMAXPROCS=8: CPU quota undefined" ts=2026-03-19T14:00:45.199514612Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:233 msg="Namespaces filtering configuration " config="{allow_list=\"\",deny_list=\"\",prometheus_allow_list=\"openshift-monitoring\",alertmanager_allow_list=\"openshift-monitoring\",alertmanagerconfig_allow_list=\"\",thanosruler_allow_list=\"openshift-monitoring\"}" ts=2026-03-19T14:00:45.216003485Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:274 msg="connection established" kubernetes_version=1.33.6 ts=2026-03-19T14:00:45.228109425Z level=warn caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:81 msg="resource \"scrapeconfigs\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" ts=2026-03-19T14:00:45.228132398Z level=info caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:359 msg="Kubernetes API capabilities" endpointslices=true ts=2026-03-19T14:00:45.254002705Z level=warn caller=/go/src/github.com/coreos/prometheus-operator/cmd/operator/main.go:81 msg="resource \"prometheusagents\" (group: \"monitoring.coreos.com/v1alpha1\") not installed in the cluster" ts=2026-03-19T14:00:45.303537429Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/kubelet/controller.go:214 msg="Starting controller" component=kubelet_endpoints kubelet_object=kube-system/kubelet ts=2026-03-19T14:00:45.303582648Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.303588656Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.304571405Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/server/server.go:293 msg="starting insecure server" address=127.0.0.1:8080 ts=2026-03-19T14:00:45.306066494Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.351016722Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:00:45.357729831Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:00:45.403863054Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403907371Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.403915822Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403937539Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.403946416Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403954996Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.403960812Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403968338Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.403973668Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403981471Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=thanos ts=2026-03-19T14:00:45.403986667Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=thanos ts=2026-03-19T14:00:45.403993872Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/thanos/operator.go:318 msg="successfully synced all caches" component=thanos-controller ts=2026-03-19T14:00:45.40603469Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406080744Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406089847Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406099508Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406105521Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406114798Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406120328Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406127737Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406133282Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406140839Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406145993Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406153126Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406158264Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.40616674Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406172465Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406179617Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406184974Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406192465Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=prometheus ts=2026-03-19T14:00:45.406197594Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=prometheus ts=2026-03-19T14:00:45.406204954Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:439 msg="successfully synced all caches" component=prometheus-controller ts=2026-03-19T14:00:45.406195953Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.40623469Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.507042185Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507086129Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.50709839Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507131724Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.507142235Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507150937Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.507156469Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507164668Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.507170125Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507180607Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:350 msg="Waiting for caches to sync" controller=alertmanager ts=2026-03-19T14:00:45.507190267Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/shared_informer.go:357 msg="Caches are synced" controller=alertmanager ts=2026-03-19T14:00:45.507200836Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:367 msg="successfully synced all caches" component=alertmanager-controller ts=2026-03-19T14:00:47.968985554Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.036344577Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:722 msg="StatefulSet not found" component=alertmanager-controller key=openshift-monitoring/alertmanager-main ts=2026-03-19T14:00:48.050907841Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.053159247Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:722 msg="StatefulSet not found" component=alertmanager-controller key=openshift-monitoring/alertmanager-main ts=2026-03-19T14:00:48.255666467Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.335802684Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.41117686Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.590708033Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.74991417Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.801515083Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:48.856846424Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:49.010021805Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:50.163221468Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:50.215082448Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:50.461631796Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:51.484131005Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:51.689673531Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:51.784431185Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:52.16838656Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:52.473163903Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:52.994166225Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.051287396Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:00:53.055679199Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.163279406Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.223162988Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.267993786Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:00:53.285571662Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.39814989Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:00:53.493078666Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:00:53.66446462Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:05.366178852Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:05.479901804Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:05.598947037Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:05.65021033Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:06.0844662Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:06.217580006Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:06.253662168Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:06.364995553Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:06.442264532Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:06.499760749Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:08.574682674Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:08.680875978Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:08.762865901Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:08.798820274Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:25.527928897Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:25.683712063Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:25.727077457Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:37.996266831Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:38.108830941Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:38.172203321Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:01:38.214079014Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:01:38.227513495Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:03:45.323388706Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:03:45.327878344Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:04:31.355799428Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355918284Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.ServiceMonitor err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355818177Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:573 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355818997Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355798853Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Alertmanager err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355824462Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PrometheusRule err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355819835Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355795277Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Probe err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355853951Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355798973Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.StatefulSet err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355866077Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355869411Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.ThanosRuler err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355866554Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PodMonitor err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355880702Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1alpha1.AlertmanagerConfig err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.35588885Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:571 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355891135Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/thanos/operator.go:370 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355901378Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/thanos/operator.go:368 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355904592Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355908981Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.Prometheus err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355916464Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:484 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355800751Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PrometheusRule err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355938694Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:486 type=*v1.Namespace err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:04:31.355943802Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/tools/cache/reflector.go:556 msg="Warning: watch ended with error" reflector=github.com/coreos/prometheus-operator/pkg/informers/informers.go:137 type=*v1.PartialObjectMetadata err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" ts=2026-03-19T14:06:45.409919306Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:06:45.420953623Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:07:02.902791818Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:07:28.854013285Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:07:28.993811029Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:07:29.043964167Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:07:29.194669826Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:07:29.241681985Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:07:29.389483604Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:07:29.43147437Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:08:40.467659996Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:08:40.620213182Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/alertmanager/operator.go:603 msg="sync alertmanager" component=alertmanager-controller key=openshift-monitoring/main ts=2026-03-19T14:08:40.673090557Z level=info caller=/go/src/github.com/coreos/prometheus-operator/pkg/prometheus/server/operator.go:832 msg="sync prometheus" component=prometheus-controller key=openshift-monitoring/k8s ts=2026-03-19T14:09:45.331578409Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:09:45.337967502Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:12:45.326007445Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" ts=2026-03-19T14:12:45.332367113Z level=info caller=/go/src/github.com/coreos/prometheus-operator/vendor/k8s.io/client-go/rest/warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice"