mirror of
https://github.com/VictoriaMetrics/VictoriaMetrics.git
synced 2024-11-23 12:31:07 +01:00
lib/promscrape: allow specifying full target url in __address__
label
Previously the `__address__` label could contain only `host:port` part of the target url, while the scheme and metrics path were obtained from `__scheme__` and `__metrics_path__` labels. Now it is possible to set the full url in `__address__` label. This makes valid the following scrape config, which is frequently used by novice users: scrape_configs: - job_name: foo static_configs: - targets: - http://host1/metrics1 - https://host2/metrics2
This commit is contained in:
parent
8322760647
commit
f9df0cae16
@ -15,6 +15,23 @@ The following tip changes can be tested by building VictoriaMetrics components f
|
||||
|
||||
## tip
|
||||
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): drop all the labels with `__` prefix from discovered targets in the same way as Prometheus does according to [this article](https://www.robustperception.io/life-of-a-label/). Previously the following labels were available during [metric-level relabeling](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#metric_relabel_configs): `__address__`, `__scheme__`, `__metrics_path__`, `__scrape_interval__`, `__scrape_timeout__`, `__param_*`. Now these labels are available only during [target-level relabeling](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config). This should reduce CPU usage and memory usage for `vmagent` setups, which scrape big number of targets.
|
||||
* FEATURE: [vmagent](https://docs.victoriametrics.com/vmagent.html): allow specifying full url in scrape target addresses (aka `__address__` label). This makes valid the following `-promscrape.config`:
|
||||
|
||||
```yml
|
||||
scrape_configs:
|
||||
- job_name: abc
|
||||
metrics_path: /foo/bar
|
||||
scheme: https
|
||||
static_configs:
|
||||
- targets:
|
||||
# the following targets are scraped by the provided full urls
|
||||
- 'http://host1/metric/path1'
|
||||
- 'https://host2/metric/path2'
|
||||
- 'http://host3:1234/metric/path3?arg1=value1'
|
||||
# the following target is scraped by <scheme>://host4:1234<metrics_path>
|
||||
- host4:1234
|
||||
```
|
||||
|
||||
## [v1.82.0](https://github.com/VictoriaMetrics/VictoriaMetrics/releases/tag/v1.82.0)
|
||||
|
||||
|
@ -249,7 +249,7 @@ See also [useful tips for target relabeling](#useful-tips-for-target-relabeling)
|
||||
Single-node VictoriaMetrics and [vmagent](https://docs.victoriametrics.com/vmagent.html) automatically add `instance` and `job` labels per each discovered target:
|
||||
|
||||
* The `job` label is set to `job_name` value specified in the corresponding [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs).
|
||||
* The `instance` label is set to the final `__address__` label value after target-level relabeling.
|
||||
* The `instance` label is set to the host:port part of `__address__` label value after target-level relabeling.
|
||||
The `__address__` label value is automatically set to the most suitable value depending
|
||||
on the used [service discovery type](https://docs.victoriametrics.com/sd_configs.html#supported-service-discovery-configs).
|
||||
The `__address__` label can be overriden during relabeling - see [these docs](#how-to-modify-scrape-urls-in-targets).
|
||||
@ -284,8 +284,10 @@ URLs for scrape targets are composed of the following parts:
|
||||
just update the `__address__` label during relabeling to the needed value.
|
||||
The port part is optional. If it is missing, then it is automatically set either to `80` or `443` depending
|
||||
on the used scheme (`http` or `https`).
|
||||
The final `__address__` label is automatically converted into `instance` label per each target unless the `instance`
|
||||
The host:port part from the final `__address__` label is automatically set to `instance` label unless the `instance`
|
||||
label is explicitly set during relabeling.
|
||||
The `__address__` label can contain the full scrape url, e.g. `http://host:port/metrics/path?query_args`.
|
||||
In this case the `__scheme__` and `__metrics_path__` labels are ignored.
|
||||
* URL path (e.g. `/metrics`). This information is available during target relabeling in a special label - `__metrics_path__`.
|
||||
By default the `__metrics_path__` is set to `/metrics`. It can be overriden either by specifying the `metrics_path`
|
||||
option at [scrape_config](https://docs.victoriametrics.com/sd_configs.html#scrape_configs)
|
||||
|
@ -987,6 +987,8 @@ scrape_configs:
|
||||
#
|
||||
# Alternatively the scheme and path can be changed via `relabel_configs` section at `scrape_config` level.
|
||||
# See https://docs.victoriametrics.com/vmagent.html#relabeling .
|
||||
#
|
||||
# It is also possible specifying full target urls here, e.g. "http://host:port/metrics/path?query_args"
|
||||
- targets:
|
||||
- "vmsingle1:8428"
|
||||
- "vmsingleN:8428"
|
||||
|
@ -51,7 +51,7 @@ func (prc *parsedRelabelConfig) String() string {
|
||||
//
|
||||
// If isFinalize is set, then FinalizeLabels is called on the labels[labelsOffset:].
|
||||
//
|
||||
// The returned labels at labels[labelsOffset:] are sorted.
|
||||
// The returned labels at labels[labelsOffset:] are sorted by name.
|
||||
func (pcs *ParsedConfigs) Apply(labels []prompbmarshal.Label, labelsOffset int, isFinalize bool) []prompbmarshal.Label {
|
||||
var inStr string
|
||||
relabelDebug := false
|
||||
@ -121,25 +121,36 @@ func removeEmptyLabels(labels []prompbmarshal.Label, labelsOffset int) []prompbm
|
||||
//
|
||||
// See https://www.robustperception.io/life-of-a-label fo details.
|
||||
func RemoveMetaLabels(dst, src []prompbmarshal.Label) []prompbmarshal.Label {
|
||||
for i := range src {
|
||||
label := &src[i]
|
||||
for _, label := range src {
|
||||
if strings.HasPrefix(label.Name, "__meta_") {
|
||||
continue
|
||||
}
|
||||
dst = append(dst, *label)
|
||||
dst = append(dst, label)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
// RemoveLabelsWithDoubleDashPrefix removes labels with "__" prefix from src, appends the remaining lables to dst and returns the result.
|
||||
func RemoveLabelsWithDoubleDashPrefix(dst, src []prompbmarshal.Label) []prompbmarshal.Label {
|
||||
for _, label := range src {
|
||||
name := label.Name
|
||||
// A hack: do not delete __vm_filepath label, since it is used by internal logic for FileSDConfig.
|
||||
if strings.HasPrefix(name, "__") && name != "__vm_filepath" {
|
||||
continue
|
||||
}
|
||||
dst = append(dst, label)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
// FinalizeLabels removes labels with "__" in the beginning (except of "__name__").
|
||||
func FinalizeLabels(dst, src []prompbmarshal.Label) []prompbmarshal.Label {
|
||||
for i := range src {
|
||||
label := &src[i]
|
||||
for _, label := range src {
|
||||
name := label.Name
|
||||
if strings.HasPrefix(name, "__") && name != "__name__" {
|
||||
continue
|
||||
}
|
||||
dst = append(dst, *label)
|
||||
dst = append(dst, label)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
@ -1193,6 +1193,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
||||
originalLabels = append([]prompbmarshal.Label{}, lctx.labels...)
|
||||
}
|
||||
lctx.labels = swc.relabelConfigs.Apply(lctx.labels, 0, false)
|
||||
// Remove labels starting from "__meta_" prefix according to https://www.robustperception.io/life-of-a-label/
|
||||
lctx.labels = promrelabel.RemoveMetaLabels(lctx.labels[:0], lctx.labels)
|
||||
// Remove references to already deleted labels, so GC could clean strings for label name and label value past len(labels).
|
||||
// This should reduce memory usage when relabeling creates big number of temporary labels with long names and/or values.
|
||||
@ -1224,56 +1225,68 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
||||
return nil, nil
|
||||
}
|
||||
// See https://www.robustperception.io/life-of-a-label
|
||||
schemeRelabeled := promrelabel.GetLabelValueByName(labels, "__scheme__")
|
||||
if len(schemeRelabeled) == 0 {
|
||||
schemeRelabeled = "http"
|
||||
scheme := promrelabel.GetLabelValueByName(labels, "__scheme__")
|
||||
if len(scheme) == 0 {
|
||||
scheme = "http"
|
||||
}
|
||||
addressRelabeled := promrelabel.GetLabelValueByName(labels, "__address__")
|
||||
if len(addressRelabeled) == 0 {
|
||||
metricsPath := promrelabel.GetLabelValueByName(labels, "__metrics_path__")
|
||||
if len(metricsPath) == 0 {
|
||||
metricsPath = "/metrics"
|
||||
}
|
||||
address := promrelabel.GetLabelValueByName(labels, "__address__")
|
||||
if len(address) == 0 {
|
||||
// Drop target without scrape address.
|
||||
droppedTargetsMap.Register(originalLabels)
|
||||
return nil, nil
|
||||
}
|
||||
if strings.Contains(addressRelabeled, "/") {
|
||||
// Drop target with '/'
|
||||
droppedTargetsMap.Register(originalLabels)
|
||||
return nil, nil
|
||||
// Usability extension to Prometheus behavior: extract optional scheme and metricsPath from __address__.
|
||||
// Prometheus silently drops targets with __address__ containing scheme or metricsPath
|
||||
// according to https://www.robustperception.io/life-of-a-label/ .
|
||||
if strings.HasPrefix(address, "http://") {
|
||||
scheme = "http"
|
||||
address = address[len("http://"):]
|
||||
} else if strings.HasPrefix(address, "https://") {
|
||||
scheme = "https"
|
||||
address = address[len("https://"):]
|
||||
}
|
||||
addressRelabeled = addMissingPort(addressRelabeled, schemeRelabeled == "https")
|
||||
metricsPathRelabeled := promrelabel.GetLabelValueByName(labels, "__metrics_path__")
|
||||
if metricsPathRelabeled == "" {
|
||||
metricsPathRelabeled = "/metrics"
|
||||
if n := strings.IndexByte(address, '/'); n >= 0 {
|
||||
metricsPath = address[n:]
|
||||
address = address[:n]
|
||||
}
|
||||
address = addMissingPort(address, scheme == "https")
|
||||
|
||||
var at *auth.Token
|
||||
tenantID := promrelabel.GetLabelValueByName(labels, "__tenant_id__")
|
||||
if tenantID != "" {
|
||||
if len(tenantID) > 0 {
|
||||
newToken, err := auth.NewToken(tenantID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot parse __tenant_id__=%q for job=%s, err: %w", tenantID, swc.jobName, err)
|
||||
return nil, fmt.Errorf("cannot parse __tenant_id__=%q for job=%q: %w", tenantID, swc.jobName, err)
|
||||
}
|
||||
at = newToken
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(metricsPathRelabeled, "/") {
|
||||
metricsPathRelabeled = "/" + metricsPathRelabeled
|
||||
if !strings.HasPrefix(metricsPath, "/") {
|
||||
metricsPath = "/" + metricsPath
|
||||
}
|
||||
paramsRelabeled := getParamsFromLabels(labels, swc.params)
|
||||
optionalQuestion := "?"
|
||||
if len(paramsRelabeled) == 0 || strings.Contains(metricsPathRelabeled, "?") {
|
||||
optionalQuestion = ""
|
||||
params := getParamsFromLabels(labels, swc.params)
|
||||
optionalQuestion := ""
|
||||
if len(params) > 0 {
|
||||
optionalQuestion = "?"
|
||||
if strings.Contains(metricsPath, "?") {
|
||||
optionalQuestion = "&"
|
||||
}
|
||||
}
|
||||
paramsStr := url.Values(paramsRelabeled).Encode()
|
||||
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", schemeRelabeled, addressRelabeled, metricsPathRelabeled, optionalQuestion, paramsStr)
|
||||
paramsStr := url.Values(params).Encode()
|
||||
scrapeURL := fmt.Sprintf("%s://%s%s%s%s", scheme, address, metricsPath, optionalQuestion, paramsStr)
|
||||
if _, err := url.Parse(scrapeURL); err != nil {
|
||||
return nil, fmt.Errorf("invalid url %q for scheme=%q (%q), target=%q (%q), metrics_path=%q (%q) for `job_name` %q: %w",
|
||||
scrapeURL, swc.scheme, schemeRelabeled, target, addressRelabeled, swc.metricsPath, metricsPathRelabeled, swc.jobName, err)
|
||||
return nil, fmt.Errorf("invalid url %q for scheme=%q, target=%q, address=%q, metrics_path=%q for job=%q: %w",
|
||||
scrapeURL, scheme, target, address, metricsPath, swc.jobName, err)
|
||||
}
|
||||
// Set missing "instance" label according to https://www.robustperception.io/life-of-a-label
|
||||
if promrelabel.GetLabelByName(labels, "instance") == nil {
|
||||
labels = append(labels, prompbmarshal.Label{
|
||||
Name: "instance",
|
||||
Value: addressRelabeled,
|
||||
Value: address,
|
||||
})
|
||||
promrelabel.SortLabels(labels)
|
||||
}
|
||||
@ -1314,8 +1327,15 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel
|
||||
}
|
||||
streamParse = b
|
||||
}
|
||||
// Remove labels with "__" prefix according to https://www.robustperception.io/life-of-a-label/
|
||||
labels = promrelabel.RemoveLabelsWithDoubleDashPrefix(labels[:0], labels)
|
||||
// Remove references to deleted labels, so GC could clean strings for label name and label value past len(labels).
|
||||
// This should reduce memory usage when relabeling creates big number of temporary labels with long names and/or values.
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/825 for details.
|
||||
labels = append([]prompbmarshal.Label{}, labels...)
|
||||
// Reduce memory usage by interning all the strings in labels.
|
||||
internLabelStrings(labels)
|
||||
|
||||
sw := &ScrapeWork{
|
||||
ScrapeURL: scrapeURL,
|
||||
ScrapeInterval: scrapeInterval,
|
||||
|
@ -214,6 +214,116 @@ func TestLoadConfig(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddressWithFullURL(t *testing.T) {
|
||||
data := `
|
||||
scrape_configs:
|
||||
- job_name: abc
|
||||
metrics_path: /foo/bar
|
||||
scheme: https
|
||||
params:
|
||||
x: [y]
|
||||
static_configs:
|
||||
- targets:
|
||||
# the following targets are scraped by the provided urls
|
||||
- 'http://host1/metric/path1'
|
||||
- 'https://host2/metric/path2'
|
||||
- 'http://host3:1234/metric/path3?arg1=value1'
|
||||
# the following target is scraped by <scheme>://host4:1234<metrics_path>
|
||||
- host4:1234
|
||||
`
|
||||
var cfg Config
|
||||
allData, err := cfg.parseData([]byte(data), "sss")
|
||||
if err != nil {
|
||||
t.Fatalf("cannot parase data: %s", err)
|
||||
}
|
||||
if string(allData) != data {
|
||||
t.Fatalf("invalid data returned from parseData;\ngot\n%s\nwant\n%s", allData, data)
|
||||
}
|
||||
sws := cfg.getStaticScrapeWork()
|
||||
resetNonEssentialFields(sws)
|
||||
swsExpected := []*ScrapeWork{
|
||||
{
|
||||
ScrapeURL: "http://host1:80/metric/path1?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "host1:80",
|
||||
},
|
||||
{
|
||||
Name: "job",
|
||||
Value: "abc",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
ProxyAuthConfig: &promauth.Config{},
|
||||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "https://host2:443/metric/path2?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "host2:443",
|
||||
},
|
||||
{
|
||||
Name: "job",
|
||||
Value: "abc",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
ProxyAuthConfig: &promauth.Config{},
|
||||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "http://host3:1234/metric/path3?arg1=value1&x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "host3:1234",
|
||||
},
|
||||
{
|
||||
Name: "job",
|
||||
Value: "abc",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
ProxyAuthConfig: &promauth.Config{},
|
||||
jobNameOriginal: "abc",
|
||||
},
|
||||
{
|
||||
ScrapeURL: "https://host4:1234/foo/bar?x=y",
|
||||
ScrapeInterval: defaultScrapeInterval,
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "host4:1234",
|
||||
},
|
||||
{
|
||||
Name: "job",
|
||||
Value: "abc",
|
||||
},
|
||||
},
|
||||
AuthConfig: &promauth.Config{},
|
||||
ProxyAuthConfig: &promauth.Config{},
|
||||
jobNameOriginal: "abc",
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(sws, swsExpected) {
|
||||
t.Fatalf("unexpected scrapeWork;\ngot\n%#v\nwant\n%#v", sws, swsExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBlackboxExporter(t *testing.T) {
|
||||
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/684
|
||||
data := `
|
||||
@ -249,34 +359,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "black:9115",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/probe",
|
||||
},
|
||||
{
|
||||
Name: "__param_module",
|
||||
Value: "dns_udp_example",
|
||||
},
|
||||
{
|
||||
Name: "__param_target",
|
||||
Value: "8.8.8.8",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "8.8.8.8",
|
||||
@ -718,26 +800,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "host1",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/abc/de",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "__vm_filepath",
|
||||
Value: "",
|
||||
@ -765,26 +827,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "host2",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/abc/de",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "__vm_filepath",
|
||||
Value: "",
|
||||
@ -812,26 +854,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "localhost:9090",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/abc/de",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "__vm_filepath",
|
||||
Value: "",
|
||||
@ -881,26 +903,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -931,26 +933,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1015,30 +997,6 @@ scrape_configs:
|
||||
HonorTimestamps: false,
|
||||
DenyRedirects: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/foo/bar",
|
||||
},
|
||||
{
|
||||
Name: "__param_p",
|
||||
Value: "x&y",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "https",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "54s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "5s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:443",
|
||||
@ -1065,30 +1023,6 @@ scrape_configs:
|
||||
HonorTimestamps: false,
|
||||
DenyRedirects: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "aaa",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/foo/bar",
|
||||
},
|
||||
{
|
||||
Name: "__param_p",
|
||||
Value: "x&y",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "https",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "54s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "5s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "aaa:443",
|
||||
@ -1113,26 +1047,6 @@ scrape_configs:
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "1.2.3.4",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "8s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "8s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "1.2.3.4:80",
|
||||
@ -1155,26 +1069,6 @@ scrape_configs:
|
||||
ScrapeTimeout: 8 * time.Second,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foobar",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "8s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "8s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foobar:80",
|
||||
@ -1231,30 +1125,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__param_x",
|
||||
Value: "keep_me",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "hash",
|
||||
Value: "82",
|
||||
@ -1311,30 +1181,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/abc.de",
|
||||
},
|
||||
{
|
||||
Name: "__param_a",
|
||||
Value: "b",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "mailto",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "fake.addr",
|
||||
@ -1376,10 +1222,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1410,26 +1252,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1456,26 +1278,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1502,26 +1304,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1562,30 +1344,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "pp",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__param_a",
|
||||
Value: "c",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "foo",
|
||||
Value: "bar",
|
||||
@ -1677,42 +1435,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "127.0.0.1:9116",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/snmp",
|
||||
},
|
||||
{
|
||||
Name: "__param_module",
|
||||
Value: "if_mib",
|
||||
},
|
||||
{
|
||||
Name: "__param_target",
|
||||
Value: "192.168.1.2",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "__series_limit__",
|
||||
Value: "1234",
|
||||
},
|
||||
{
|
||||
Name: "__stream_parse__",
|
||||
Value: "true",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "192.168.1.2",
|
||||
@ -1749,26 +1471,6 @@ scrape_configs:
|
||||
ScrapeTimeout: defaultScrapeTimeout,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "metricspath",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "1m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "10s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
@ -1802,26 +1504,6 @@ scrape_configs:
|
||||
ScrapeOffset: time.Hour * 24 * 2,
|
||||
HonorTimestamps: true,
|
||||
Labels: []prompbmarshal.Label{
|
||||
{
|
||||
Name: "__address__",
|
||||
Value: "foo.bar:1234",
|
||||
},
|
||||
{
|
||||
Name: "__metrics_path__",
|
||||
Value: "/metrics",
|
||||
},
|
||||
{
|
||||
Name: "__scheme__",
|
||||
Value: "http",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_interval__",
|
||||
Value: "168h0m0s",
|
||||
},
|
||||
{
|
||||
Name: "__scrape_timeout__",
|
||||
Value: "24h0m0s",
|
||||
},
|
||||
{
|
||||
Name: "instance",
|
||||
Value: "foo.bar:1234",
|
||||
|
@ -72,19 +72,15 @@ type ScrapeWork struct {
|
||||
|
||||
// Labels to add to the scraped metrics.
|
||||
//
|
||||
// The list contains at least the following labels according to https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config
|
||||
// The list contains at least the following labels according to https://www.robustperception.io/life-of-a-label/
|
||||
//
|
||||
// * job
|
||||
// * __address__
|
||||
// * __scheme__
|
||||
// * __metrics_path__
|
||||
// * __scrape_interval__
|
||||
// * __scrape_timeout__
|
||||
// * __param_<name>
|
||||
// * __meta_*
|
||||
// * instance
|
||||
// * user-defined labels set via `relabel_configs` section in `scrape_config`
|
||||
//
|
||||
// See also https://prometheus.io/docs/concepts/jobs_instances/
|
||||
//
|
||||
// Labels are already sorted by name.
|
||||
Labels []prompbmarshal.Label
|
||||
|
||||
// ExternalLabels contains labels from global->external_labels section of -promscrape.config
|
||||
@ -164,8 +160,7 @@ func (sw *ScrapeWork) Job() string {
|
||||
|
||||
// LabelsString returns labels in Prometheus format for the given sw.
|
||||
func (sw *ScrapeWork) LabelsString() string {
|
||||
labelsFinalized := promrelabel.FinalizeLabels(nil, sw.Labels)
|
||||
return promLabelsString(labelsFinalized)
|
||||
return promLabelsString(sw.Labels)
|
||||
}
|
||||
|
||||
func promLabelsString(labels []prompbmarshal.Label) string {
|
||||
|
@ -196,8 +196,7 @@ func (tsm *targetStatusMap) WriteActiveTargetsJSON(w io.Writer) {
|
||||
fmt.Fprintf(w, `{"discoveredLabels":`)
|
||||
writeLabelsJSON(w, ts.sw.Config.OriginalLabels)
|
||||
fmt.Fprintf(w, `,"labels":`)
|
||||
labelsFinalized := promrelabel.FinalizeLabels(nil, ts.sw.Config.Labels)
|
||||
writeLabelsJSON(w, labelsFinalized)
|
||||
writeLabelsJSON(w, ts.sw.Config.Labels)
|
||||
fmt.Fprintf(w, `,"scrapePool":%q`, ts.sw.Config.Job())
|
||||
fmt.Fprintf(w, `,"scrapeUrl":%q`, ts.sw.Config.ScrapeURL)
|
||||
errMsg := ""
|
||||
|
Loading…
Reference in New Issue
Block a user