From 97de72054e54bcff4e9f96217f57cd9602f3a3c5 Mon Sep 17 00:00:00 2001 From: Aliaksandr Valialkin Date: Thu, 27 May 2021 15:03:30 +0300 Subject: [PATCH] docs: document f0c21b6300b6df4f4d661dc90cd718bf7b6d28b5 --- app/vmagent/README.md | 2 +- docs/CHANGELOG.md | 5 +++-- docs/vmagent.md | 2 +- lib/promscrape/client.go | 4 ++-- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/app/vmagent/README.md b/app/vmagent/README.md index 6477bfb592..1ea295f635 100644 --- a/app/vmagent/README.md +++ b/app/vmagent/README.md @@ -252,7 +252,7 @@ By default `vmagent` reads the full response from scrape target into memory, the 'match[]': ['{__name__!=""}'] ``` -Note that `sample_limit` option doesn't work if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. Therefore the `sample_limit` option doesn't make sense during stream parsing. +Note that `sample_limit` option doesn't prevent from data push to remote storage if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. ## Scraping big number of targets diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index e4299a7f1d..f04c803b22 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -6,9 +6,10 @@ sort: 15 ## tip -FEATURE: vmalert: add a command-line flag `-rule.configCheckInterval` for automatic re-reading of `-rule` files without the need to send SIGHUP signal. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/512). +* FEATURE: vmalert: add a command-line flag `-rule.configCheckInterval` for automatic re-reading of `-rule` files without the need to send SIGHUP signal. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/512). +* FEATURE: vmagent: respect the `sample_limit` and `-promscrape.maxScrapeSize` values when scraping targets in [stream parsing mode](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode). See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/1331). -BUGFIX: reduce CPU usage by up to 2x during querying a database with big number of active daily time series. The issue has been introduced in `v1.59.0`. +* BUGFIX: reduce CPU usage by up to 2x during querying a database with big number of active daily time series. The issue has been introduced in `v1.59.0`. ## [v1.60.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.60.0) diff --git a/docs/vmagent.md b/docs/vmagent.md index 0fc739f4fc..73cb3030cc 100644 --- a/docs/vmagent.md +++ b/docs/vmagent.md @@ -256,7 +256,7 @@ By default `vmagent` reads the full response from scrape target into memory, the 'match[]': ['{__name__!=""}'] ``` -Note that `sample_limit` option doesn't work if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. Therefore the `sample_limit` option doesn't make sense during stream parsing. +Note that `sample_limit` option doesn't prevent from data push to remote storage if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. ## Scraping big number of targets diff --git a/lib/promscrape/client.go b/lib/promscrape/client.go index fc21f967f7..55e856cea2 100644 --- a/lib/promscrape/client.go +++ b/lib/promscrape/client.go @@ -340,8 +340,8 @@ type streamReader struct { func (sr *streamReader) Read(p []byte) (int, error) { n, err := sr.r.Read(p) sr.bytesRead += int64(n) - if sr.bytesRead > sr.maxBodySize { - return 0, fmt.Errorf("the response from %q exceeds -promscrape.maxScrapeSize=%d; "+ + if err == nil && sr.bytesRead > sr.maxBodySize { + err = fmt.Errorf("the response from %q exceeds -promscrape.maxScrapeSize=%d; "+ "either reduce the response size for the target or increase -promscrape.maxScrapeSize", sr.scrapeURL, sr.maxBodySize) } return n, err