diff --git a/app/vmagent/README.md b/app/vmagent/README.md index d167bf9958..2912be4a26 100644 --- a/app/vmagent/README.md +++ b/app/vmagent/README.md @@ -183,7 +183,7 @@ Read more about relabeling in the following articles: `vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. It is recommended setting up regular scraping of this page either via `vmagent` itself or via Prometheus, so the exported metrics could be analyzed later. -`vmagent` also exports target statuses at `http://vmagent-host:8429/targets` page in plaintext format. +`vmagent` also exports target statuses at `http://vmagent-host:8429/targets` page in plaintext format. This page also exports information on improperly configured scrape configs. ### Troubleshooting diff --git a/docs/vmagent.md b/docs/vmagent.md index d167bf9958..2912be4a26 100644 --- a/docs/vmagent.md +++ b/docs/vmagent.md @@ -183,7 +183,7 @@ Read more about relabeling in the following articles: `vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. It is recommended setting up regular scraping of this page either via `vmagent` itself or via Prometheus, so the exported metrics could be analyzed later. -`vmagent` also exports target statuses at `http://vmagent-host:8429/targets` page in plaintext format. +`vmagent` also exports target statuses at `http://vmagent-host:8429/targets` page in plaintext format. This page also exports information on improperly configured scrape configs. ### Troubleshooting diff --git a/lib/promscrape/targetstatus.go b/lib/promscrape/targetstatus.go index b362d9a84b..081588a2c4 100644 --- a/lib/promscrape/targetstatus.go +++ b/lib/promscrape/targetstatus.go @@ -86,6 +86,7 @@ func (tsm *targetStatusMap) WriteHumanReadable(w io.Writer) { return jss[i].job < jss[j].job }) + targetsByEndpoint := make(map[string]int) for _, js := range jss { sts := js.statuses sort.Slice(sts, func(i, j int) bool { @@ -115,8 +116,20 @@ func (tsm *targetStatusMap) WriteHumanReadable(w io.Writer) { } fmt.Fprintf(w, "\tstate=%s, endpoint=%s, labels=%s, last_scrape=%.3fs ago, scrape_duration=%.3fs, error=%q\n", state, st.sw.ScrapeURL, labelsStr, lastScrape.Seconds(), float64(st.scrapeDuration)/1000, errMsg) + key := fmt.Sprintf("endpoint=%s, labels=%s", st.sw.ScrapeURL, labelsStr) + targetsByEndpoint[key]++ } } + fmt.Fprintf(w, "\n") + + // Check whether there are targets with duplicate endpoints and labels. + for key, n := range targetsByEndpoint { + if n <= 1 { + continue + } + fmt.Fprintf(w, "!!! Scrape config error: %d duplicate targets with identical endpoint and labels found:\n", n) + fmt.Fprintf(w, "\t%s\n", key) + } } type jobStatus struct {