mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2025-01-05 14:22:15 +01:00
lib/promscrape: add ability to configure scrape_timeout and scrape_interval via relabeling
See https://github.com/prometheus/prometheus/pull/8911
This commit is contained in:
parent
09670479cd
commit
6c97388dde
@ -10,6 +10,7 @@ sort: 15
|
|||||||
* FEATURE: vmagent: send stale markers for disappeared metrics like Prometheus does. Previously stale markers were sent only when the scrape target disappears or when it becomes temporarily unavailable. See [these docs](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) for details.
|
* FEATURE: vmagent: send stale markers for disappeared metrics like Prometheus does. Previously stale markers were sent only when the scrape target disappears or when it becomes temporarily unavailable. See [these docs](https://docs.victoriametrics.com/vmagent.html#prometheus-staleness-markers) for details.
|
||||||
* FEATURE: vmagent: add ability to set `series_limit` option for a particular scrape target via `__series_limit__` label. This allows setting the limit on the number of time series on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#cardinality-limiter) for details.
|
* FEATURE: vmagent: add ability to set `series_limit` option for a particular scrape target via `__series_limit__` label. This allows setting the limit on the number of time series on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#cardinality-limiter) for details.
|
||||||
* FEATURE: vmagent: add ability to set `stream_parse` option for a particular scrape target via `__stream_parse__` label. This allows managing the stream parsing mode on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode) for details.
|
* FEATURE: vmagent: add ability to set `stream_parse` option for a particular scrape target via `__stream_parse__` label. This allows managing the stream parsing mode on a per-target basis. See [these docs](https://docs.victoriametrics.com/vmagent.html#stream-parsing-mode) for details.
|
||||||
|
* FEATURE: vmagent: add ability to set `scrape_interval` and `scrape_timeout` options for a particular target via `__scrape_interval__` and `__scrape_timeout__` labels in the same way as Prometheus does. See [this pull request](https://github.com/prometheus/prometheus/pull/8911).
|
||||||
* FEATURE: add new relabeling actions: `keep_metrics` and `drop_metrics`. This simplifies metrics filtering by metric names. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
|
* FEATURE: add new relabeling actions: `keep_metrics` and `drop_metrics`. This simplifies metrics filtering by metric names. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
|
||||||
* FEATURE: allow splitting long `regex` in relabeling filters into an array of shorter regexps, which can be put into multiple lines for better readability and maintainability. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
|
* FEATURE: allow splitting long `regex` in relabeling filters into an array of shorter regexps, which can be put into multiple lines for better readability and maintainability. See [these docs](https://docs.victoriametrics.com/vmagent.html#relabeling) for more details.
|
||||||
* FEATURE: vmagent: reduce CPU usage when calculating the number of newly added series per scrape (this number is sent to remote storage in `scrape_series_added` metric).
|
* FEATURE: vmagent: reduce CPU usage when calculating the number of newly added series per scrape (this number is sent to remote storage in `scrape_series_added` metric).
|
||||||
|
@ -986,7 +986,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
labels := mergeLabels(swc.jobName, swc.scheme, target, swc.metricsPath, extraLabels, swc.externalLabels, metaLabels, swc.params)
|
labels := mergeLabels(swc, target, extraLabels, metaLabels)
|
||||||
var originalLabels []prompbmarshal.Label
|
var originalLabels []prompbmarshal.Label
|
||||||
if !*dropOriginalLabels {
|
if !*dropOriginalLabels {
|
||||||
originalLabels = append([]prompbmarshal.Label{}, labels...)
|
originalLabels = append([]prompbmarshal.Label{}, labels...)
|
||||||
@ -1049,6 +1049,23 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
|||||||
})
|
})
|
||||||
promrelabel.SortLabels(labels)
|
promrelabel.SortLabels(labels)
|
||||||
}
|
}
|
||||||
|
// Read __scrape_interval__ and __scrape_timeout__ from labels.
|
||||||
|
scrapeInterval := swc.scrapeInterval
|
||||||
|
if s := promrelabel.GetLabelValueByName(labels, "__scrape_interval__"); len(s) > 0 {
|
||||||
|
d, err := time.ParseDuration(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse __scrape_interval__=%q: %w", s, err)
|
||||||
|
}
|
||||||
|
scrapeInterval = d
|
||||||
|
}
|
||||||
|
scrapeTimeout := swc.scrapeTimeout
|
||||||
|
if s := promrelabel.GetLabelValueByName(labels, "__scrape_timeout__"); len(s) > 0 {
|
||||||
|
d, err := time.ParseDuration(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot parse __scrape_timeout__=%q: %w", s, err)
|
||||||
|
}
|
||||||
|
scrapeTimeout = d
|
||||||
|
}
|
||||||
// Read series_limit option from __series_limit__ label.
|
// Read series_limit option from __series_limit__ label.
|
||||||
// See https://docs.victoriametrics.com/vmagent.html#cardinality-limiter
|
// See https://docs.victoriametrics.com/vmagent.html#cardinality-limiter
|
||||||
seriesLimit := swc.seriesLimit
|
seriesLimit := swc.seriesLimit
|
||||||
@ -1073,8 +1090,8 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
|||||||
internLabelStrings(labels)
|
internLabelStrings(labels)
|
||||||
sw := &ScrapeWork{
|
sw := &ScrapeWork{
|
||||||
ScrapeURL: scrapeURL,
|
ScrapeURL: scrapeURL,
|
||||||
ScrapeInterval: swc.scrapeInterval,
|
ScrapeInterval: scrapeInterval,
|
||||||
ScrapeTimeout: swc.scrapeTimeout,
|
ScrapeTimeout: scrapeTimeout,
|
||||||
HonorLabels: swc.honorLabels,
|
HonorLabels: swc.honorLabels,
|
||||||
HonorTimestamps: swc.honorTimestamps,
|
HonorTimestamps: swc.honorTimestamps,
|
||||||
DenyRedirects: swc.denyRedirects,
|
DenyRedirects: swc.denyRedirects,
|
||||||
@ -1144,17 +1161,19 @@ func getParamsFromLabels(labels []prompbmarshal.Label, paramsOrig map[string][]s
|
|||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
func mergeLabels(job, scheme, target, metricsPath string, extraLabels, externalLabels, metaLabels map[string]string, params map[string][]string) []prompbmarshal.Label {
|
func mergeLabels(swc *scrapeWorkConfig, target string, extraLabels, metaLabels map[string]string) []prompbmarshal.Label {
|
||||||
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
// See https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
||||||
m := make(map[string]string, 4+len(externalLabels)+len(params)+len(extraLabels)+len(metaLabels))
|
m := make(map[string]string, 4+len(swc.externalLabels)+len(swc.params)+len(extraLabels)+len(metaLabels))
|
||||||
for k, v := range externalLabels {
|
for k, v := range swc.externalLabels {
|
||||||
m[k] = v
|
m[k] = v
|
||||||
}
|
}
|
||||||
m["job"] = job
|
m["job"] = swc.jobName
|
||||||
m["__address__"] = target
|
m["__address__"] = target
|
||||||
m["__scheme__"] = scheme
|
m["__scheme__"] = swc.scheme
|
||||||
m["__metrics_path__"] = metricsPath
|
m["__metrics_path__"] = swc.metricsPath
|
||||||
for k, args := range params {
|
m["__scrape_interval__"] = swc.scrapeInterval.String()
|
||||||
|
m["__scrape_timeout__"] = swc.scrapeTimeout.String()
|
||||||
|
for k, args := range swc.params {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -152,6 +152,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "8.8.8.8",
|
Value: "8.8.8.8",
|
||||||
@ -581,6 +589,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "__vm_filepath",
|
Name: "__vm_filepath",
|
||||||
Value: "",
|
Value: "",
|
||||||
@ -621,6 +637,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "__vm_filepath",
|
Name: "__vm_filepath",
|
||||||
Value: "",
|
Value: "",
|
||||||
@ -661,6 +685,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "__vm_filepath",
|
Name: "__vm_filepath",
|
||||||
Value: "",
|
Value: "",
|
||||||
@ -723,6 +755,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:1234",
|
Value: "foo.bar:1234",
|
||||||
@ -766,6 +806,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "datacenter",
|
Name: "datacenter",
|
||||||
Value: "foobar",
|
Value: "foobar",
|
||||||
@ -794,7 +842,7 @@ global:
|
|||||||
scrape_timeout: 34s
|
scrape_timeout: 34s
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: foo
|
- job_name: foo
|
||||||
scrape_interval: 543s
|
scrape_interval: 54s
|
||||||
scrape_timeout: 12s
|
scrape_timeout: 12s
|
||||||
metrics_path: /foo/bar
|
metrics_path: /foo/bar
|
||||||
scheme: https
|
scheme: https
|
||||||
@ -809,6 +857,7 @@ scrape_configs:
|
|||||||
- targets: ["foo.bar", "aaa"]
|
- targets: ["foo.bar", "aaa"]
|
||||||
labels:
|
labels:
|
||||||
x: y
|
x: y
|
||||||
|
__scrape_timeout__: "5s"
|
||||||
- job_name: qwer
|
- job_name: qwer
|
||||||
tls_config:
|
tls_config:
|
||||||
server_name: foobar
|
server_name: foobar
|
||||||
@ -821,8 +870,8 @@ scrape_configs:
|
|||||||
`, []*ScrapeWork{
|
`, []*ScrapeWork{
|
||||||
{
|
{
|
||||||
ScrapeURL: "https://foo.bar:443/foo/bar?p=x%26y&p=%3D",
|
ScrapeURL: "https://foo.bar:443/foo/bar?p=x%26y&p=%3D",
|
||||||
ScrapeInterval: 543 * time.Second,
|
ScrapeInterval: 54 * time.Second,
|
||||||
ScrapeTimeout: 12 * time.Second,
|
ScrapeTimeout: 5 * time.Second,
|
||||||
HonorLabels: true,
|
HonorLabels: true,
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
DenyRedirects: true,
|
DenyRedirects: true,
|
||||||
@ -843,6 +892,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "https",
|
Value: "https",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "54s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "5s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:443",
|
Value: "foo.bar:443",
|
||||||
@ -863,8 +920,8 @@ scrape_configs:
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D",
|
ScrapeURL: "https://aaa:443/foo/bar?p=x%26y&p=%3D",
|
||||||
ScrapeInterval: 543 * time.Second,
|
ScrapeInterval: 54 * time.Second,
|
||||||
ScrapeTimeout: 12 * time.Second,
|
ScrapeTimeout: 5 * time.Second,
|
||||||
HonorLabels: true,
|
HonorLabels: true,
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
DenyRedirects: true,
|
DenyRedirects: true,
|
||||||
@ -885,6 +942,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "https",
|
Value: "https",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "54s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "5s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "aaa:443",
|
Value: "aaa:443",
|
||||||
@ -920,6 +985,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "8s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "8s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "1.2.3.4:80",
|
Value: "1.2.3.4:80",
|
||||||
@ -953,6 +1026,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "8s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "8s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foobar:80",
|
Value: "foobar:80",
|
||||||
@ -1024,6 +1105,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "hash",
|
Name: "hash",
|
||||||
Value: "82",
|
Value: "82",
|
||||||
@ -1095,6 +1184,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "mailto",
|
Value: "mailto",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "fake.addr",
|
Value: "fake.addr",
|
||||||
@ -1180,6 +1277,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:1234",
|
Value: "foo.bar:1234",
|
||||||
@ -1221,6 +1326,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:1234",
|
Value: "foo.bar:1234",
|
||||||
@ -1258,6 +1371,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:1234",
|
Value: "foo.bar:1234",
|
||||||
@ -1313,6 +1434,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "foo",
|
Name: "foo",
|
||||||
Value: "bar",
|
Value: "bar",
|
||||||
@ -1386,6 +1515,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "__series_limit__",
|
Name: "__series_limit__",
|
||||||
Value: "1234",
|
Value: "1234",
|
||||||
@ -1441,6 +1578,14 @@ scrape_configs:
|
|||||||
Name: "__scheme__",
|
Name: "__scheme__",
|
||||||
Value: "http",
|
Value: "http",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_interval__",
|
||||||
|
Value: "1m0s",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "__scrape_timeout__",
|
||||||
|
Value: "10s",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "instance",
|
Name: "instance",
|
||||||
Value: "foo.bar:1234",
|
Value: "foo.bar:1234",
|
||||||
|
@ -69,6 +69,8 @@ type ScrapeWork struct {
|
|||||||
// * __address__
|
// * __address__
|
||||||
// * __scheme__
|
// * __scheme__
|
||||||
// * __metrics_path__
|
// * __metrics_path__
|
||||||
|
// * __scrape_interval__
|
||||||
|
// * __scrape_timeout__
|
||||||
// * __param_<name>
|
// * __param_<name>
|
||||||
// * __meta_*
|
// * __meta_*
|
||||||
// * user-defined labels set via `relabel_configs` section in `scrape_config`
|
// * user-defined labels set via `relabel_configs` section in `scrape_config`
|
||||||
|
Loading…
Reference in New Issue
Block a user