mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-12-15 00:13:30 +01:00
docs: document f0c21b6300
This commit is contained in:
parent
b801b299f0
commit
97de72054e
@ -252,7 +252,7 @@ By default `vmagent` reads the full response from scrape target into memory, the
|
|||||||
'match[]': ['{__name__!=""}']
|
'match[]': ['{__name__!=""}']
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that `sample_limit` option doesn't work if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. Therefore the `sample_limit` option doesn't make sense during stream parsing.
|
Note that `sample_limit` option doesn't prevent from data push to remote storage if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed.
|
||||||
|
|
||||||
|
|
||||||
## Scraping big number of targets
|
## Scraping big number of targets
|
||||||
|
@ -6,9 +6,10 @@ sort: 15
|
|||||||
|
|
||||||
## tip
|
## tip
|
||||||
|
|
||||||
FEATURE: vmalert: add a command-line flag `-rule.configCheckInterval` for automatic re-reading of `-rule` files without the need to send SIGHUP signal. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/512).
|
* FEATURE: vmalert: add a command-line flag `-rule.configCheckInterval` for automatic re-reading of `-rule` files without the need to send SIGHUP signal. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/512).
|
||||||
|
* FEATURE: vmagent: respect the `sample_limit` and `-promscrape.maxScrapeSize` values when scraping targets in [stream parsing mode](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode). See [this pull request](https://github.com/VictoriaMetrics/VictoriaMetrics/pull/1331).
|
||||||
|
|
||||||
BUGFIX: reduce CPU usage by up to 2x during querying a database with big number of active daily time series. The issue has been introduced in `v1.59.0`.
|
* BUGFIX: reduce CPU usage by up to 2x during querying a database with big number of active daily time series. The issue has been introduced in `v1.59.0`.
|
||||||
|
|
||||||
|
|
||||||
## [v1.60.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.60.0)
|
## [v1.60.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.60.0)
|
||||||
|
@ -256,7 +256,7 @@ By default `vmagent` reads the full response from scrape target into memory, the
|
|||||||
'match[]': ['{__name__!=""}']
|
'match[]': ['{__name__!=""}']
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that `sample_limit` option doesn't work if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed. Therefore the `sample_limit` option doesn't make sense during stream parsing.
|
Note that `sample_limit` option doesn't prevent from data push to remote storage if stream parsing is enabled because the parsed data is pushed to remote storage as soon as it is parsed.
|
||||||
|
|
||||||
|
|
||||||
## Scraping big number of targets
|
## Scraping big number of targets
|
||||||
|
@ -340,8 +340,8 @@ type streamReader struct {
|
|||||||
func (sr *streamReader) Read(p []byte) (int, error) {
|
func (sr *streamReader) Read(p []byte) (int, error) {
|
||||||
n, err := sr.r.Read(p)
|
n, err := sr.r.Read(p)
|
||||||
sr.bytesRead += int64(n)
|
sr.bytesRead += int64(n)
|
||||||
if sr.bytesRead > sr.maxBodySize {
|
if err == nil && sr.bytesRead > sr.maxBodySize {
|
||||||
return 0, fmt.Errorf("the response from %q exceeds -promscrape.maxScrapeSize=%d; "+
|
err = fmt.Errorf("the response from %q exceeds -promscrape.maxScrapeSize=%d; "+
|
||||||
"either reduce the response size for the target or increase -promscrape.maxScrapeSize", sr.scrapeURL, sr.maxBodySize)
|
"either reduce the response size for the target or increase -promscrape.maxScrapeSize", sr.scrapeURL, sr.maxBodySize)
|
||||||
}
|
}
|
||||||
return n, err
|
return n, err
|
||||||
|
Loading…
Reference in New Issue
Block a user