lib/promscrape: move duplicate code from functions, which collect ScrapeWork lists for distinct SD types into Config.getScrapeWorkGeneric()

This removes more than 200 lines of duplicate code
This commit is contained in:
Aliaksandr Valialkin 2023-10-25 23:02:30 +02:00
parent 19940b5629
commit 194deeea1b
No known key found for this signature in database
GPG Key ID: A72BEC6CD3D0DED1

View File

@ -137,9 +137,10 @@ func (cfg *Config) mustStart() {
} }
jobNames := cfg.getJobNames() jobNames := cfg.getJobNames()
tsmGlobal.registerJobNames(jobNames) tsmGlobal.registerJobNames(jobNames)
logger.Infof("started service discovery routines in %.3f seconds", time.Since(startTime).Seconds()) logger.Infof("started %d service discovery routines in %.3f seconds", len(cfg.ScrapeConfigs), time.Since(startTime).Seconds())
} }
// mustRestart restarts service discovery routines at cfg if they were changed comparing to prevCfg.
func (cfg *Config) mustRestart(prevCfg *Config) { func (cfg *Config) mustRestart(prevCfg *Config) {
startTime := time.Now() startTime := time.Now()
logger.Infof("restarting service discovery routines...") logger.Infof("restarting service discovery routines...")
@ -176,7 +177,7 @@ func (cfg *Config) mustRestart(prevCfg *Config) {
restarted++ restarted++
} }
} }
// Stop preious jobs which weren't found in the current configuration. // Stop previous jobs which weren't found in the current configuration.
for _, scPrev := range prevCfg.ScrapeConfigs { for _, scPrev := range prevCfg.ScrapeConfigs {
if _, ok := currentJobNames[scPrev.JobName]; !ok { if _, ok := currentJobNames[scPrev.JobName]; !ok {
scPrev.mustStop() scPrev.mustStop()
@ -226,7 +227,7 @@ func (cfg *Config) mustStop() {
for _, sc := range cfg.ScrapeConfigs { for _, sc := range cfg.ScrapeConfigs {
sc.mustStop() sc.mustStop()
} }
logger.Infof("stopped service discovery routines in %.3f seconds", time.Since(startTime).Seconds()) logger.Infof("stopped %d service discovery routines in %.3f seconds", len(cfg.ScrapeConfigs), time.Since(startTime).Seconds())
} }
// getJobNames returns all the scrape job names from the cfg. // getJobNames returns all the scrape job names from the cfg.
@ -308,7 +309,7 @@ func (sc *ScrapeConfig) mustStart(baseDir string) {
target := metaLabels.Get("__address__") target := metaLabels.Get("__address__")
sw, err := sc.swc.getScrapeWork(target, nil, metaLabels) sw, err := sc.swc.getScrapeWork(target, nil, metaLabels)
if err != nil { if err != nil {
logger.Errorf("cannot create kubernetes_sd_config target %q for job_name %q: %s", target, sc.swc.jobName, err) logger.Errorf("cannot create kubernetes_sd_config target %q for job_name=%s: %s", target, sc.swc.jobName, err)
return nil return nil
} }
return sw return sw
@ -521,245 +522,92 @@ func getSWSByJob(sws []*ScrapeWork) map[string][]*ScrapeWork {
// getAzureSDScrapeWork returns `azure_sd_configs` ScrapeWork from cfg. // getAzureSDScrapeWork returns `azure_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getAzureSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getAzureSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.AzureSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.AzureSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.AzureSDConfigs {
sdc := &sc.AzureSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "azure_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "azure_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering azure targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getConsulSDScrapeWork returns `consul_sd_configs` ScrapeWork from cfg. // getConsulSDScrapeWork returns `consul_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getConsulSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getConsulSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.ConsulSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.ConsulSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.ConsulSDConfigs {
sdc := &sc.ConsulSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "consul_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "consul_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering consul targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getConsulAgentSDScrapeWork returns `consulagent_sd_configs` ScrapeWork from cfg. // getConsulAgentSDScrapeWork returns `consulagent_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getConsulAgentSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getConsulAgentSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.ConsulAgentSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.ConsulAgentSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.ConsulAgentSDConfigs {
sdc := &sc.ConsulAgentSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "consulagent_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "consulagent_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering consulagent targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getDigitalOceanDScrapeWork returns `digitalocean_sd_configs` ScrapeWork from cfg. // getDigitalOceanDScrapeWork returns `digitalocean_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getDigitalOceanDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getDigitalOceanDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.DigitaloceanSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.DigitaloceanSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.DigitaloceanSDConfigs {
sdc := &sc.DigitaloceanSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "digitalocean_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "digitalocean_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering digitalocean targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getDNSSDScrapeWork returns `dns_sd_configs` ScrapeWork from cfg. // getDNSSDScrapeWork returns `dns_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getDNSSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getDNSSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.DNSSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.DNSSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.DNSSDConfigs {
sdc := &sc.DNSSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "dns_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "dns_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering dns targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getDockerSDScrapeWork returns `docker_sd_configs` ScrapeWork from cfg. // getDockerSDScrapeWork returns `docker_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getDockerSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getDockerSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.DockerSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.DockerSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.DockerSDConfigs {
sdc := &sc.DockerSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "docker_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "docker_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering docker targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getDockerSwarmSDScrapeWork returns `dockerswarm_sd_configs` ScrapeWork from cfg. // getDockerSwarmSDScrapeWork returns `dockerswarm_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getDockerSwarmSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getDockerSwarmSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.DockerSwarmSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.DockerSwarmSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.DockerSwarmSDConfigs {
sdc := &sc.DockerSwarmSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "dockerswarm_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "dockerswarm_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering dockerswarm targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getEC2SDScrapeWork returns `ec2_sd_configs` ScrapeWork from cfg. // getEC2SDScrapeWork returns `ec2_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getEC2SDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getEC2SDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.EC2SDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.EC2SDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.EC2SDConfigs {
sdc := &sc.EC2SDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "ec2_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "ec2_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering ec2 targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getEurekaSDScrapeWork returns `eureka_sd_configs` ScrapeWork from cfg. // getEurekaSDScrapeWork returns `eureka_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getEurekaSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getEurekaSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.EurekaSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.EurekaSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.EurekaSDConfigs {
sdc := &sc.EurekaSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "eureka_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "eureka_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering eureka targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg. // getFileSDScrapeWork returns `file_sd_configs` ScrapeWork from cfg.
@ -776,60 +624,27 @@ func (cfg *Config) getFileSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
// getGCESDScrapeWork returns `gce_sd_configs` ScrapeWork from cfg. // getGCESDScrapeWork returns `gce_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getGCESDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getGCESDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.GCESDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.GCESDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.GCESDConfigs {
sdc := &sc.GCESDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "gce_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "gce_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering gce targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getHTTPDScrapeWork returns `http_sd_configs` ScrapeWork from cfg. // getHTTPDScrapeWork returns `http_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getHTTPDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getHTTPDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.HTTPSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.HTTPSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.HTTPSDConfigs {
sdc := &sc.HTTPSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "http_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "http_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering http targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getKubernetesSDScrapeWork returns `kubernetes_sd_configs` ScrapeWork from cfg. // getKubernetesSDScrapeWork returns `kubernetes_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getKubernetesSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getKubernetesSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
const discoveryType = "kubernetes_sd_config"
swsPrevByJob := getSWSByJob(prev) swsPrevByJob := getSWSByJob(prev)
dst := make([]*ScrapeWork, 0, len(prev)) dst := make([]*ScrapeWork, 0, len(prev))
for _, sc := range cfg.ScrapeConfigs { for _, sc := range cfg.ScrapeConfigs {
@ -839,7 +654,7 @@ func (cfg *Config) getKubernetesSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
sdc := &sc.KubernetesSDConfigs[j] sdc := &sc.KubernetesSDConfigs[j]
swos, err := sdc.GetScrapeWorkObjects() swos, err := sdc.GetScrapeWorkObjects()
if err != nil { if err != nil {
logger.Errorf("skipping kubernetes_sd_config targets for job_name %q because of error: %s", sc.swc.jobName, err) logger.Errorf("skipping %s targets for job_name=%s because of error: %s", discoveryType, sc.swc.jobName, err)
ok = false ok = false
break break
} }
@ -848,13 +663,8 @@ func (cfg *Config) getKubernetesSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
dst = append(dst, sw) dst = append(dst, sw)
} }
} }
if ok { if !ok {
continue dst = sc.appendPrevTargets(dst[:dstLen], swsPrevByJob, discoveryType)
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering kubernetes_sd_config targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
} }
} }
return dst return dst
@ -862,111 +672,81 @@ func (cfg *Config) getKubernetesSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
// getKumaSDScrapeWork returns `kuma_sd_configs` ScrapeWork from cfg. // getKumaSDScrapeWork returns `kuma_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getKumaSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getKumaSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.KumaSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.KumaSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.KumaSDConfigs {
sdc := &sc.KumaSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "kuma_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "kuma_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering kuma targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getNomadSDScrapeWork returns `nomad_sd_configs` ScrapeWork from cfg. // getNomadSDScrapeWork returns `nomad_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getNomadSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getNomadSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
dst := make([]*ScrapeWork, 0, len(prev)) for i := range sc.NomadSDConfigs {
for _, sc := range cfg.ScrapeConfigs { visitor(&sc.NomadSDConfigs[i])
dstLen := len(dst)
ok := true
for j := range sc.NomadSDConfigs {
sdc := &sc.NomadSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "nomad_sd_config")
if ok {
ok = okLocal
} }
} }
if ok { return cfg.getScrapeWorkGeneric(visitConfigs, "nomad_sd_config", prev)
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering nomad_sd_config targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst
} }
// getOpenStackSDScrapeWork returns `openstack_sd_configs` ScrapeWork from cfg. // getOpenStackSDScrapeWork returns `openstack_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getOpenStackSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork { func (cfg *Config) getOpenStackSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
for i := range sc.OpenStackSDConfigs {
visitor(&sc.OpenStackSDConfigs[i])
}
}
return cfg.getScrapeWorkGeneric(visitConfigs, "openstack_sd_config", prev)
}
// getYandexCloudSDScrapeWork returns `yandexcloud_sd_configs` ScrapeWork from cfg.
func (cfg *Config) getYandexCloudSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
visitConfigs := func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)) {
for i := range sc.YandexCloudSDConfigs {
visitor(&sc.YandexCloudSDConfigs[i])
}
}
return cfg.getScrapeWorkGeneric(visitConfigs, "yandexcloud_sd_config", prev)
}
type targetLabelsGetter interface {
GetLabels(baseDir string) ([]*promutils.Labels, error)
}
func (cfg *Config) getScrapeWorkGeneric(visitConfigs func(sc *ScrapeConfig, visitor func(sdc targetLabelsGetter)), discoveryType string, prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev) swsPrevByJob := getSWSByJob(prev)
dst := make([]*ScrapeWork, 0, len(prev)) dst := make([]*ScrapeWork, 0, len(prev))
for _, sc := range cfg.ScrapeConfigs { for _, sc := range cfg.ScrapeConfigs {
dstLen := len(dst) dstLen := len(dst)
ok := true ok := true
for j := range sc.OpenStackSDConfigs { visitConfigs(sc, func(sdc targetLabelsGetter) {
sdc := &sc.OpenStackSDConfigs[j] if !ok {
var okLocal bool return
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "openstack_sd_config")
if ok {
ok = okLocal
} }
targetLabels, err := sdc.GetLabels(cfg.baseDir)
if err != nil {
logger.Errorf("skipping %s targets for job_name=%s because of error: %s", discoveryType, sc.swc.jobName, err)
ok = false
return
} }
if ok { dst = appendScrapeWorkForTargetLabels(dst, sc.swc, targetLabels, discoveryType)
continue })
} if !ok {
swsPrev := swsPrevByJob[sc.swc.jobName] dst = sc.appendPrevTargets(dst[:dstLen], swsPrevByJob, discoveryType)
if len(swsPrev) > 0 {
logger.Errorf("there were errors when discovering openstack targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
} }
} }
return dst return dst
} }
// getYandexCloudSDScrapeWork returns `yandexcloud_sd_configs` ScrapeWork from cfg. func (sc *ScrapeConfig) appendPrevTargets(dst []*ScrapeWork, swsPrevByJob map[string][]*ScrapeWork, discoveryType string) []*ScrapeWork {
func (cfg *Config) getYandexCloudSDScrapeWork(prev []*ScrapeWork) []*ScrapeWork {
swsPrevByJob := getSWSByJob(prev)
dst := make([]*ScrapeWork, 0, len(prev))
for _, sc := range cfg.ScrapeConfigs {
dstLen := len(dst)
ok := true
for j := range sc.YandexCloudSDConfigs {
sdc := &sc.YandexCloudSDConfigs[j]
var okLocal bool
dst, okLocal = appendSDScrapeWork(dst, sdc, cfg.baseDir, sc.swc, "yandexcloud_sd_config")
if ok {
ok = okLocal
}
}
if ok {
continue
}
swsPrev := swsPrevByJob[sc.swc.jobName] swsPrev := swsPrevByJob[sc.swc.jobName]
if len(swsPrev) > 0 { if len(swsPrev) == 0 {
logger.Errorf("there were errors when discovering yandexcloud targets for job %q, so preserving the previous targets", sc.swc.jobName)
dst = append(dst[:dstLen], swsPrev...)
}
}
return dst return dst
} }
logger.Errorf("preserving the previous %s targets for job_name=%s because of temporary discovery error", discoveryType, sc.swc.jobName)
return append(dst, swsPrev...)
}
// getStaticScrapeWork returns `static_configs` ScrapeWork from cfg. // getStaticScrapeWork returns `static_configs` ScrapeWork from cfg.
func (cfg *Config) getStaticScrapeWork() []*ScrapeWork { func (cfg *Config) getStaticScrapeWork() []*ScrapeWork {
@ -1106,19 +886,6 @@ type scrapeWorkConfig struct {
noStaleMarkers bool noStaleMarkers bool
} }
type targetLabelsGetter interface {
GetLabels(baseDir string) ([]*promutils.Labels, error)
}
func appendSDScrapeWork(dst []*ScrapeWork, sdc targetLabelsGetter, baseDir string, swc *scrapeWorkConfig, discoveryType string) ([]*ScrapeWork, bool) {
targetLabels, err := sdc.GetLabels(baseDir)
if err != nil {
logger.Errorf("skipping %s targets for job_name %q because of error: %s", discoveryType, swc.jobName, err)
return dst, false
}
return appendScrapeWorkForTargetLabels(dst, swc, targetLabels, discoveryType), true
}
func appendScrapeWorkForTargetLabels(dst []*ScrapeWork, swc *scrapeWorkConfig, targetLabels []*promutils.Labels, discoveryType string) []*ScrapeWork { func appendScrapeWorkForTargetLabels(dst []*ScrapeWork, swc *scrapeWorkConfig, targetLabels []*promutils.Labels, discoveryType string) []*ScrapeWork {
startTime := time.Now() startTime := time.Now()
// Process targetLabels in parallel in order to reduce processing time for big number of targetLabels. // Process targetLabels in parallel in order to reduce processing time for big number of targetLabels.
@ -1135,7 +902,7 @@ func appendScrapeWorkForTargetLabels(dst []*ScrapeWork, swc *scrapeWorkConfig, t
target := metaLabels.Get("__address__") target := metaLabels.Get("__address__")
sw, err := swc.getScrapeWork(target, nil, metaLabels) sw, err := swc.getScrapeWork(target, nil, metaLabels)
if err != nil { if err != nil {
err = fmt.Errorf("skipping %s target %q for job_name %q because of error: %w", discoveryType, target, swc.jobName, err) err = fmt.Errorf("skipping %s target %q for job_name%s because of error: %w", discoveryType, target, swc.jobName, err)
} }
resultCh <- result{ resultCh <- result{
sw: sw, sw: sw,
@ -1173,7 +940,7 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []*ScrapeWork, baseDir string, swc
paths, err = filepath.Glob(pathPattern) paths, err = filepath.Glob(pathPattern)
if err != nil { if err != nil {
// Do not return this error, since other files may contain valid scrape configs. // Do not return this error, since other files may contain valid scrape configs.
logger.Errorf("invalid pattern %q in `file_sd_config->files` section of job_name=%q: %s; skipping it", file, swc.jobName, err) logger.Errorf("skipping entry %q in `file_sd_config->files` for job_name=%s because of error: %s", file, swc.jobName, err)
continue continue
} }
} }
@ -1181,7 +948,7 @@ func (sdc *FileSDConfig) appendScrapeWork(dst []*ScrapeWork, baseDir string, swc
stcs, err := loadStaticConfigs(path) stcs, err := loadStaticConfigs(path)
if err != nil { if err != nil {
// Do not return this error, since other paths may contain valid scrape configs. // Do not return this error, since other paths may contain valid scrape configs.
logger.Errorf("cannot load file %q for job_name=%q at `file_sd_configs`: %s; skipping this file", path, swc.jobName, err) logger.Errorf("skipping file %s for job_name=%s at `file_sd_configs` because of error: %s", path, swc.jobName, err)
continue continue
} }
pathShort := path pathShort := path
@ -1205,13 +972,13 @@ func (stc *StaticConfig) appendScrapeWork(dst []*ScrapeWork, swc *scrapeWorkConf
for _, target := range stc.Targets { for _, target := range stc.Targets {
if target == "" { if target == "" {
// Do not return this error, since other targets may be valid // Do not return this error, since other targets may be valid
logger.Errorf("`static_configs` target for `job_name` %q cannot be empty; skipping it", swc.jobName) logger.Errorf("skipping empty `static_configs` target for job_name=%s", swc.jobName)
continue continue
} }
sw, err := swc.getScrapeWork(target, stc.Labels, metaLabels) sw, err := swc.getScrapeWork(target, stc.Labels, metaLabels)
if err != nil { if err != nil {
// Do not return this error, since other targets may be valid // Do not return this error, since other targets may be valid
logger.Errorf("error when parsing `static_configs` target %q for `job_name` %q: %s; skipping it", target, swc.jobName, err) logger.Errorf("skipping `static_configs` target %q for job_name=%s because of error: %s", target, swc.jobName, err)
continue continue
} }
if sw != nil { if sw != nil {