From a6a71ef861444eb11fe8ec6d2387f0fc0c4aea87 Mon Sep 17 00:00:00 2001 From: Aliaksandr Valialkin Date: Fri, 12 Mar 2021 03:35:49 +0200 Subject: [PATCH] lib/promscrape: add ability to configure proxy options via `proxy_tls_config`, `proxy_basic_auth`, `proxy_bearer_token` and `proxy_bearer_token_file` Updates https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1116 --- app/vmagent/README.md | 35 +++++++++++++++++++++++++++++++++++ docs/CHANGELOG.md | 1 + docs/vmagent.md | 35 +++++++++++++++++++++++++++++++++++ lib/promscrape/client.go | 2 +- lib/promscrape/config.go | 21 ++++++++++++++++----- lib/promscrape/config_test.go | 32 +++++++++++++++++++++++++++++++- lib/promscrape/scrapework.go | 12 ++++++++---- lib/proxy/proxy.go | 21 +++++++++++++++++++++ 8 files changed, 148 insertions(+), 11 deletions(-) diff --git a/app/vmagent/README.md b/app/vmagent/README.md index 22aaa2360..2606690c5 100644 --- a/app/vmagent/README.md +++ b/app/vmagent/README.md @@ -255,6 +255,41 @@ If each target is scraped by multiple `vmagent` instances, then data deduplicati See [these docs](https://victoriametrics.github.io/#deduplication) for details. +## Scraping targets via a proxy + +`vmagent` supports scraping targets via http and https proxies. Proxy address must be specified in `proxy_url` option. For example, the following scrape config instructs +target scraping via https proxy at `https://proxy-addr:1234`: + +```yml +scrape_configs: +- job_name: foo + proxy_url: https://proxy-addr:1234 +``` + +Proxy can be configured with the following optional settings: + +* `proxy_bearer_token` and `proxy_bearer_token_file` for Bearer token authorization +* `proxy_basic_auth` for Basic authorization. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config). +* `proxy_tls_config` for TLS config. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config). + +For example: + +```yml +scrape_configs: +- job_name: foo + proxy_url: https://proxy-addr:1234 + proxy_basic_auth: + username: foobar + password: secret + proxy_tls_config: + insecure_skip_verify: true + cert_file: /path/to/cert + key_file: /path/to/key + ca_file: /path/to/ca + server_name: real-server-name +``` + + ## Monitoring `vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index b574ded3d..3b34a78e1 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -10,6 +10,7 @@ * FEATURE: export `vm_available_memory_bytes` and `vm_available_cpu_cores` metrics, which show the number of available RAM and available CPU cores for VictoriaMetrics apps. * FEATURE: vmagent: add ability to replicate scrape targets among `vmagent` instances in the cluster with `-promscrape.cluster.replicationFactor` command-line flag. See [these docs](https://victoriametrics.github.io/vmagent.html#scraping-big-number-of-targets). * FATURE: vmagent: accept `scrape_offset` option at `scrape_config`. This option may be useful when scrapes must start at the specified offset of every scrape interval. See [these docs](https://victoriametrics.github.io/vmagent.html#troubleshooting) for details. +* FEATURE: vmagent: support `proxy_tls_config`, `proxy_basic_auth`, `proxy_bearer_token` and `proxy_bearer_token_file` options at `scrape_config` section for configuring proxies specified via `proxy_url`. See [these docs](https://victoriametrics.github.io/vmagent.html#scraping-targets-via-a-proxy). * FEATURE: vmauth: allow using regexp paths in `url_map`. See [this issue](https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1112) for details. * BUGFIX: vmagent: prevent from high CPU usage bug during failing scrapes with small `scrape_timeout` (less than a few seconds). diff --git a/docs/vmagent.md b/docs/vmagent.md index 22aaa2360..2606690c5 100644 --- a/docs/vmagent.md +++ b/docs/vmagent.md @@ -255,6 +255,41 @@ If each target is scraped by multiple `vmagent` instances, then data deduplicati See [these docs](https://victoriametrics.github.io/#deduplication) for details. +## Scraping targets via a proxy + +`vmagent` supports scraping targets via http and https proxies. Proxy address must be specified in `proxy_url` option. For example, the following scrape config instructs +target scraping via https proxy at `https://proxy-addr:1234`: + +```yml +scrape_configs: +- job_name: foo + proxy_url: https://proxy-addr:1234 +``` + +Proxy can be configured with the following optional settings: + +* `proxy_bearer_token` and `proxy_bearer_token_file` for Bearer token authorization +* `proxy_basic_auth` for Basic authorization. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config). +* `proxy_tls_config` for TLS config. See [these docs](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tls_config). + +For example: + +```yml +scrape_configs: +- job_name: foo + proxy_url: https://proxy-addr:1234 + proxy_basic_auth: + username: foobar + password: secret + proxy_tls_config: + insecure_skip_verify: true + cert_file: /path/to/cert + key_file: /path/to/key + ca_file: /path/to/ca + server_name: real-server-name +``` + + ## Monitoring `vmagent` exports various metrics in Prometheus exposition format at `http://vmagent-host:8429/metrics` page. We recommend setting up regular scraping of this page diff --git a/lib/promscrape/client.go b/lib/promscrape/client.go index 958c06055..f6a264802 100644 --- a/lib/promscrape/client.go +++ b/lib/promscrape/client.go @@ -67,7 +67,7 @@ func newClient(sw *ScrapeWork) *client { host += ":443" } } - dialFunc, err := newStatDialFunc(sw.ProxyURL, sw.AuthConfig) + dialFunc, err := newStatDialFunc(sw.ProxyURL, sw.ProxyAuthConfig) if err != nil { logger.Fatalf("cannot create dial func: %s", err) } diff --git a/lib/promscrape/config.go b/lib/promscrape/config.go index 8d558862f..b637b8316 100644 --- a/lib/promscrape/config.go +++ b/lib/promscrape/config.go @@ -110,11 +110,15 @@ type ScrapeConfig struct { SampleLimit int `yaml:"sample_limit,omitempty"` // These options are supported only by lib/promscrape. - DisableCompression bool `yaml:"disable_compression,omitempty"` - DisableKeepAlive bool `yaml:"disable_keepalive,omitempty"` - StreamParse bool `yaml:"stream_parse,omitempty"` - ScrapeAlignInterval time.Duration `yaml:"scrape_align_interval,omitempty"` - ScrapeOffset time.Duration `yaml:"scrape_offset,omitempty"` + DisableCompression bool `yaml:"disable_compression,omitempty"` + DisableKeepAlive bool `yaml:"disable_keepalive,omitempty"` + StreamParse bool `yaml:"stream_parse,omitempty"` + ScrapeAlignInterval time.Duration `yaml:"scrape_align_interval,omitempty"` + ScrapeOffset time.Duration `yaml:"scrape_offset,omitempty"` + ProxyTLSConfig *promauth.TLSConfig `yaml:"proxy_tls_config,omitempty"` + ProxyBasicAuth *promauth.BasicAuthConfig `yaml:"proxy_basic_auth,omitempty"` + ProxyBearerToken string `yaml:"proxy_bearer_token,omitempty"` + ProxyBearerTokenFile string `yaml:"proxy_bearer_token_file,omitempty"` // This is set in loadConfig swc *scrapeWorkConfig @@ -543,6 +547,10 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf if err != nil { return nil, fmt.Errorf("cannot parse auth config for `job_name` %q: %w", jobName, err) } + proxyAC, err := promauth.NewConfig(baseDir, sc.ProxyBasicAuth, sc.ProxyBearerToken, sc.ProxyBearerTokenFile, sc.ProxyTLSConfig) + if err != nil { + return nil, fmt.Errorf("cannot parse proxy auth config for `job_name` %q: %w", jobName, err) + } relabelConfigs, err := promrelabel.ParseRelabelConfigs(sc.RelabelConfigs) if err != nil { return nil, fmt.Errorf("cannot parse `relabel_configs` for `job_name` %q: %w", jobName, err) @@ -559,6 +567,7 @@ func getScrapeWorkConfig(sc *ScrapeConfig, baseDir string, globalCfg *GlobalConf scheme: scheme, params: params, proxyURL: sc.ProxyURL, + proxyAuthConfig: proxyAC, authConfig: ac, honorLabels: honorLabels, honorTimestamps: honorTimestamps, @@ -583,6 +592,7 @@ type scrapeWorkConfig struct { scheme string params map[string][]string proxyURL proxy.URL + proxyAuthConfig *promauth.Config authConfig *promauth.Config honorLabels bool honorTimestamps bool @@ -849,6 +859,7 @@ func (swc *scrapeWorkConfig) getScrapeWork(target string, extraLabels, metaLabel OriginalLabels: originalLabels, Labels: labels, ProxyURL: swc.proxyURL, + ProxyAuthConfig: swc.proxyAuthConfig, AuthConfig: swc.authConfig, MetricRelabelConfigs: swc.metricRelabelConfigs, SampleLimit: swc.sampleLimit, diff --git a/lib/promscrape/config_test.go b/lib/promscrape/config_test.go index f75dae834..ac7ce98fb 100644 --- a/lib/promscrape/config_test.go +++ b/lib/promscrape/config_test.go @@ -10,6 +10,7 @@ import ( "github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth" "github.com/VictoriaMetrics/VictoriaMetrics/lib/prompbmarshal" + "github.com/VictoriaMetrics/VictoriaMetrics/lib/proxy" ) func TestNeedSkipScrapeWork(t *testing.T) { @@ -154,6 +155,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "blackbox", }} if !reflect.DeepEqual(sws, swsExpected) { @@ -548,6 +550,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, { @@ -587,6 +590,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, { @@ -626,6 +630,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -679,6 +684,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -729,6 +735,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -748,6 +755,10 @@ scrape_configs: p: ["x&y", "="] xaa: bearer_token: xyz + proxy_url: http://foo.bar + proxy_basic_auth: + username: foo + password: bar static_configs: - targets: ["foo.bar", "aaa"] labels: @@ -801,6 +812,10 @@ scrape_configs: AuthConfig: &promauth.Config{ Authorization: "Bearer xyz", }, + ProxyAuthConfig: &promauth.Config{ + Authorization: "Basic Zm9vOmJhcg==", + }, + ProxyURL: proxy.MustNewURL("http://foo.bar"), jobNameOriginal: "foo", }, { @@ -842,6 +857,10 @@ scrape_configs: AuthConfig: &promauth.Config{ Authorization: "Bearer xyz", }, + ProxyAuthConfig: &promauth.Config{ + Authorization: "Basic Zm9vOmJhcg==", + }, + ProxyURL: proxy.MustNewURL("http://foo.bar"), jobNameOriginal: "foo", }, { @@ -877,6 +896,7 @@ scrape_configs: TLSServerName: "foobar", TLSInsecureSkipVerify: true, }, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "qwer", }, }) @@ -955,6 +975,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1017,6 +1038,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1060,6 +1082,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1099,7 +1122,8 @@ scrape_configs: Value: "foo", }, }, - AuthConfig: &promauth.Config{}, + AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, MetricRelabelConfigs: mustParseRelabelConfigs(` - source_labels: [foo] target_label: abc @@ -1145,6 +1169,7 @@ scrape_configs: AuthConfig: &promauth.Config{ Authorization: "Basic eHl6OnNlY3JldC1wYXNz", }, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1184,6 +1209,7 @@ scrape_configs: AuthConfig: &promauth.Config{ Authorization: "Bearer secret-pass", }, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1229,6 +1255,7 @@ scrape_configs: AuthConfig: &promauth.Config{ TLSCertificate: &snakeoilCert, }, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "foo", }, }) @@ -1291,6 +1318,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, jobNameOriginal: "aaa", }, }) @@ -1352,6 +1380,7 @@ scrape_configs: }, }, AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, SampleLimit: 100, DisableKeepAlive: true, DisableCompression: true, @@ -1398,6 +1427,7 @@ scrape_configs: }, jobNameOriginal: "path wo slash", AuthConfig: &promauth.Config{}, + ProxyAuthConfig: &promauth.Config{}, }, }) } diff --git a/lib/promscrape/scrapework.go b/lib/promscrape/scrapework.go index 003b0348a..d19ae8b34 100644 --- a/lib/promscrape/scrapework.go +++ b/lib/promscrape/scrapework.go @@ -68,12 +68,15 @@ type ScrapeWork struct { // See also https://prometheus.io/docs/concepts/jobs_instances/ Labels []prompbmarshal.Label - // Auth config - AuthConfig *promauth.Config - // ProxyURL HTTP proxy url ProxyURL proxy.URL + // Auth config for ProxyUR: + ProxyAuthConfig *promauth.Config + + // Auth config + AuthConfig *promauth.Config + // Optional `metric_relabel_configs`. MetricRelabelConfigs *promrelabel.ParsedConfigs @@ -105,9 +108,10 @@ type ScrapeWork struct { func (sw *ScrapeWork) key() string { // Do not take into account OriginalLabels. key := fmt.Sprintf("ScrapeURL=%s, ScrapeInterval=%s, ScrapeTimeout=%s, HonorLabels=%v, HonorTimestamps=%v, Labels=%s, "+ - "AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+ + "ProxyURL=%s, ProxyAuthConfig=%s, AuthConfig=%s, MetricRelabelConfigs=%s, SampleLimit=%d, DisableCompression=%v, DisableKeepAlive=%v, StreamParse=%v, "+ "ScrapeAlignInterval=%s, ScrapeOffset=%s", sw.ScrapeURL, sw.ScrapeInterval, sw.ScrapeTimeout, sw.HonorLabels, sw.HonorTimestamps, sw.LabelsString(), + sw.ProxyURL.String(), sw.ProxyAuthConfig.String(), sw.AuthConfig.String(), sw.MetricRelabelConfigs.String(), sw.SampleLimit, sw.DisableCompression, sw.DisableKeepAlive, sw.StreamParse, sw.ScrapeAlignInterval, sw.ScrapeOffset) return key diff --git a/lib/proxy/proxy.go b/lib/proxy/proxy.go index 8915ba2e1..8582a83ae 100644 --- a/lib/proxy/proxy.go +++ b/lib/proxy/proxy.go @@ -10,6 +10,7 @@ import ( "strings" "time" + "github.com/VictoriaMetrics/VictoriaMetrics/lib/logger" "github.com/VictoriaMetrics/VictoriaMetrics/lib/netutil" "github.com/VictoriaMetrics/VictoriaMetrics/lib/promauth" "github.com/VictoriaMetrics/fasthttp" @@ -20,6 +21,17 @@ type URL struct { url *url.URL } +// MustNewURL returns new URL for the given u. +func MustNewURL(u string) URL { + pu, err := url.Parse(u) + if err != nil { + logger.Panicf("BUG: cannot parse u=%q: %s", u, err) + } + return URL{ + url: pu, + } +} + // URL return the underlying url. func (u *URL) URL() *url.URL { if u == nil || u.url == nil { @@ -28,6 +40,15 @@ func (u *URL) URL() *url.URL { return u.url } +// String returns string representation of u. +func (u *URL) String() string { + pu := u.URL() + if pu == nil { + return "" + } + return pu.String() +} + // MarshalYAML implements yaml.Marshaler interface. func (u *URL) MarshalYAML() (interface{}, error) { if u.url == nil {