diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 539d0150cf..d8a30e08ba 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -6,6 +6,8 @@ Though [InfluxDB line protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/) denies multiple whitespace chars between these entities, some apps improperly put multiple whitespace chars. This workaround allows accepting data from such apps. * FEATURE: export `vm_promscrape_active_scrapers{type=""}` metric for tracking the number of active scrapers per each service discovery type. +* FEATURE: export `vm_promscrape_scrapers_started_total{type=""}` and `vm_promscrape_scrapers_stopped_total{type=""}` metrics for tracking churn rate for scrapers + per each service discovery type. * BUGFIX: prevent from duplicate `name` tag returned from `/tags/autoComplete/tags` handler. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/942 * BUGFIX: do not enable strict parsing for `-promscrape.config` if `-promscrape.config.dryRun` comand-line flag is set. Strict parsing can be enabled with `-promscrape.config.strictParse` command-line flag. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/944 diff --git a/lib/promscrape/scraper.go b/lib/promscrape/scraper.go index db5f3dde3a..13fdeff654 100644 --- a/lib/promscrape/scraper.go +++ b/lib/promscrape/scraper.go @@ -250,8 +250,10 @@ type scraperGroup struct { m map[string]*scraper pushData func(wr *prompbmarshal.WriteRequest) - changesCount *metrics.Counter - activeScrapers *metrics.Counter + changesCount *metrics.Counter + activeScrapers *metrics.Counter + scrapersStarted *metrics.Counter + scrapersStopped *metrics.Counter } func newScraperGroup(name string, pushData func(wr *prompbmarshal.WriteRequest)) *scraperGroup { @@ -260,8 +262,10 @@ func newScraperGroup(name string, pushData func(wr *prompbmarshal.WriteRequest)) m: make(map[string]*scraper), pushData: pushData, - changesCount: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_config_changes_total{type=%q}`, name)), - activeScrapers: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_active_scrapers{type=%q}`, name)), + changesCount: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_config_changes_total{type=%q}`, name)), + activeScrapers: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_active_scrapers{type=%q}`, name)), + scrapersStarted: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_scrapers_started_total{type=%q}`, name)), + scrapersStopped: metrics.NewCounter(fmt.Sprintf(`vm_promscrape_scrapers_stopped_total{type=%q}`, name)), } metrics.NewGauge(fmt.Sprintf(`vm_promscrape_targets{type=%q, status="up"}`, name), func() float64 { return float64(tsmGlobal.StatusByGroup(sg.name, true)) @@ -313,12 +317,14 @@ func (sg *scraperGroup) update(sws []ScrapeWork) { // Start a scraper for the missing key. sc := newScraper(sw, sg.name, sg.pushData) sg.activeScrapers.Inc() + sg.scrapersStarted.Inc() sg.wg.Add(1) go func() { defer sg.wg.Done() sc.sw.run(sc.stopCh) tsmGlobal.Unregister(sw) sg.activeScrapers.Dec() + sg.scrapersStopped.Inc() }() tsmGlobal.Register(sw) sg.m[key] = sc