time=2026-05-01T20:06:00.925Z level=INFO source=main.go:275 msg="Experimental delayed compaction is enabled." time=2026-05-01T20:06:00.925Z level=INFO source=main.go:303 msg="Experimental Uncached IO is enabled." time=2026-05-01T20:06:00.987Z level=INFO source=main.go:1556 msg="updated GOGC" old=100 new=75 time=2026-05-01T20:06:00.988Z level=INFO source=main.go:684 msg="Leaving GOMAXPROCS=16: CPU quota undefined" component=automaxprocs time=2026-05-01T20:06:00.988Z level=INFO source=memlimit.go:198 msg="GOMEMLIMIT is updated" component=automemlimit package=github.com/KimMachineGun/automemlimit/memlimit GOMEMLIMIT=59290100121 previous=9223372036854775807 time=2026-05-01T20:06:00.988Z level=INFO source=main.go:780 msg="Starting Prometheus Server" mode=server version="(version=3.7.3, branch=HEAD, revision=10a5d8a6ea41d870da11c2b5836248d92ecdfb57)" time=2026-05-01T20:06:00.988Z level=INFO source=main.go:785 msg="operational information" build_context="(go=go1.24.13 (Red Hat 1.24.13-3.el9_6) X:strictfipsruntime, platform=linux/amd64, user=reproducible@reproducible, date=20260317-12:36:42, tags=netgo,builtinassets,strictfipsruntime)" host_details="(Linux 5.14.0-570.104.1.el9_6.x86_64 #1 SMP PREEMPT_DYNAMIC Wed Mar 25 21:36:26 EDT 2026 x86_64 prometheus-k8s-0 (none))" fd_limits="(soft=524287, hard=524288)" vm_limits="(soft=unlimited, hard=unlimited)" time=2026-05-01T20:06:00.990Z level=INFO source=web.go:660 msg="Start listening for connections" component=web address=127.0.0.1:9090 time=2026-05-01T20:06:00.990Z level=INFO source=main.go:1300 msg="Starting TSDB ..." time=2026-05-01T20:06:00.993Z level=INFO source=tls_config.go:346 msg="Listening on" component=web address=127.0.0.1:9090 time=2026-05-01T20:06:00.993Z level=INFO source=tls_config.go:385 msg="TLS is disabled." component=web http2=false address=127.0.0.1:9090 time=2026-05-01T20:06:00.996Z level=INFO source=head.go:669 msg="Replaying on-disk memory mappable chunks if any" component=tsdb time=2026-05-01T20:06:00.996Z level=INFO source=head.go:755 msg="On-disk memory mappable chunks replay completed" component=tsdb duration=1.264µs time=2026-05-01T20:06:00.996Z level=INFO source=head.go:763 msg="Replaying WAL, this may take a while" component=tsdb time=2026-05-01T20:06:00.997Z level=INFO source=head.go:836 msg="WAL segment loaded" component=tsdb segment=0 maxSegment=0 duration=749.559µs time=2026-05-01T20:06:00.997Z level=INFO source=head.go:873 msg="WAL replay completed" component=tsdb checkpoint_replay_duration=22.15µs wal_replay_duration=770.93µs wbl_replay_duration=268ns chunk_snapshot_load_duration=0s mmap_chunk_replay_duration=1.264µs total_replay_duration=811.967µs time=2026-05-01T20:06:00.998Z level=INFO source=main.go:1321 msg="filesystem information" fs_type=XFS_SUPER_MAGIC time=2026-05-01T20:06:00.998Z level=INFO source=main.go:1324 msg="TSDB started" time=2026-05-01T20:06:00.998Z level=INFO source=main.go:1509 msg="Loading configuration file" filename=/etc/prometheus/config_out/prometheus.env.yaml time=2026-05-01T20:06:01.013Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-apiserver-operator/openshift-apiserver-operator/0 time=2026-05-01T20:06:01.014Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-authentication-operator/authentication-operator/0 time=2026-05-01T20:06:01.014Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-cluster-storage-operator/cluster-storage-operator/0 time=2026-05-01T20:06:01.014Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-dns-operator/dns-operator/0 time=2026-05-01T20:06:01.015Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-monitoring/prometheus-operator/0 time=2026-05-01T20:06:01.015Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-monitoring/thanos-sidecar/0 time=2026-05-01T20:06:01.015Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-network-diagnostics/network-check-source/0 time=2026-05-01T20:06:01.015Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-image-registry/image-registry/0 time=2026-05-01T20:06:01.016Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-kube-controller-manager-operator/kube-controller-manager-operator/0 time=2026-05-01T20:06:01.016Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-monitoring/kubelet/3 time=2026-05-01T20:06:01.016Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-multus/monitor-network/0 time=2026-05-01T20:06:01.017Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-dns/dns-default/0 time=2026-05-01T20:06:01.017Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-config-operator/config-operator/0 time=2026-05-01T20:06:01.017Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-insights/insights-operator/0 time=2026-05-01T20:06:01.018Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-ovn-kubernetes/monitor-ovn-node/0 time=2026-05-01T20:06:01.019Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-network-operator/network-operator/0 time=2026-05-01T20:06:01.020Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-cluster-node-tuning-operator/node-tuning-operator/0 time=2026-05-01T20:06:01.020Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-console-operator/console-operator/0 time=2026-05-01T20:06:01.021Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-ingress-operator/ingress-operator/0 time=2026-05-01T20:06:01.021Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-ingress/router-default/0 time=2026-05-01T20:06:01.021Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-kube-apiserver/openshift-kube-apiserver/0 time=2026-05-01T20:06:01.022Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-cloud-credential-operator/cloud-credential-operator/0 time=2026-05-01T20:06:01.022Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-cluster-machine-approver/cluster-machine-approver/0 time=2026-05-01T20:06:01.022Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-cluster-samples-operator/cluster-samples-operator/0 time=2026-05-01T20:06:01.023Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-console/console/0 time=2026-05-01T20:06:01.023Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-service-ca-operator/service-ca-operator/0 time=2026-05-01T20:06:01.026Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager notify" discovery=kubernetes config=config-0 time=2026-05-01T20:06:01.102Z level=INFO source=warnings.go:110 msg="Warning: v1 Endpoints is deprecated in v1.33+; use discovery.k8s.io/v1 EndpointSlice" component=k8s_client_runtime time=2026-05-01T20:06:01.115Z level=INFO source=main.go:1549 msg="Completed loading of configuration file" db_storage=1.14µs remote_storage=1.145µs web_handler=1.035µs query_engine=921ns scrape=252.873µs scrape_sd=12.491725ms notify=464.996µs notify_sd=657.056µs rules=88.527572ms tracing=4.952µs filename=/etc/prometheus/config_out/prometheus.env.yaml totalDuration=116.708646ms time=2026-05-01T20:06:01.115Z level=INFO source=main.go:1285 msg="Server is ready to receive web requests." time=2026-05-01T20:06:01.115Z level=INFO source=main.go:1509 msg="Loading configuration file" filename=/etc/prometheus/config_out/prometheus.env.yaml time=2026-05-01T20:06:01.115Z level=INFO source=manager.go:190 msg="Starting rule manager..." component="rule manager" time=2026-05-01T20:06:01.211Z level=INFO source=main.go:1549 msg="Completed loading of configuration file" db_storage=1.654µs remote_storage=2.394µs web_handler=539ns query_engine=776ns scrape=91.705µs scrape_sd=842.519µs notify=524.946µs notify_sd=11.67µs rules=70.062653ms tracing=6.465µs filename=/etc/prometheus/config_out/prometheus.env.yaml totalDuration=96.111681ms time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:449 type=*v1.EndpointSlice err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:450 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:450 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:449 type=*v1.EndpointSlice err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.125Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:451 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:451 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:516 type=*v1.Service err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:517 type=*v1.Pod err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:432 type=*v1.Node err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:11:58.126Z level=INFO source=reflector.go:568 msg="Warning: watch ended with error" component=k8s_client_runtime reflector=github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:515 type=*v1.Endpoints err="an error on the server (\"unable to decode an event from the watch stream: http2: client connection lost\") has prevented the request from succeeding" time=2026-05-01T20:18:30.857Z level=INFO source=main.go:1509 msg="Loading configuration file" filename=/etc/prometheus/config_out/prometheus.env.yaml time=2026-05-01T20:18:30.875Z level=INFO source=kubernetes.go:313 msg="Using pod service account via in-cluster config" component="discovery manager scrape" discovery=kubernetes config=serviceMonitor/openshift-pipelines/openshift-chains-monitor/0 time=2026-05-01T20:18:30.952Z level=INFO source=main.go:1549 msg="Completed loading of configuration file" db_storage=2.038µs remote_storage=1.812µs web_handler=698ns query_engine=1.227µs scrape=2.1604ms scrape_sd=1.359647ms notify=378.886µs notify_sd=11.787µs rules=75.182744ms tracing=10.018µs filename=/etc/prometheus/config_out/prometheus.env.yaml totalDuration=94.55991ms time=2026-05-01T20:21:30.857Z level=INFO source=main.go:1509 msg="Loading configuration file" filename=/etc/prometheus/config_out/prometheus.env.yaml time=2026-05-01T20:21:30.938Z level=INFO source=main.go:1549 msg="Completed loading of configuration file" db_storage=1.602µs remote_storage=1.552µs web_handler=1.156µs query_engine=1.05µs scrape=2.620188ms scrape_sd=781.647µs notify=432.925µs notify_sd=10.194µs rules=61.387935ms tracing=7.062µs filename=/etc/prometheus/config_out/prometheus.env.yaml totalDuration=80.271432ms