2020-02-23 12:35:47 +01:00
|
|
|
package promscrape
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/promrelabel"
|
|
|
|
)
|
|
|
|
|
|
|
|
var tsmGlobal = newTargetStatusMap()
|
|
|
|
|
|
|
|
// WriteHumanReadableTargetsStatus writes human-readable status for all the scrape targets to w.
|
|
|
|
func WriteHumanReadableTargetsStatus(w io.Writer) {
|
|
|
|
tsmGlobal.WriteHumanReadable(w)
|
|
|
|
}
|
|
|
|
|
|
|
|
type targetStatusMap struct {
|
|
|
|
mu sync.Mutex
|
2020-04-14 12:08:48 +02:00
|
|
|
m map[uint64]targetStatus
|
2020-02-23 12:35:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func newTargetStatusMap() *targetStatusMap {
|
|
|
|
return &targetStatusMap{
|
2020-04-14 12:08:48 +02:00
|
|
|
m: make(map[uint64]targetStatus),
|
2020-02-23 12:35:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (tsm *targetStatusMap) Reset() {
|
|
|
|
tsm.mu.Lock()
|
2020-04-14 12:08:48 +02:00
|
|
|
tsm.m = make(map[uint64]targetStatus)
|
2020-02-23 12:35:47 +01:00
|
|
|
tsm.mu.Unlock()
|
|
|
|
}
|
|
|
|
|
2020-03-11 02:19:56 +01:00
|
|
|
func (tsm *targetStatusMap) RegisterAll(sws []ScrapeWork) {
|
|
|
|
tsm.mu.Lock()
|
|
|
|
for i := range sws {
|
|
|
|
sw := &sws[i]
|
2020-04-14 12:08:48 +02:00
|
|
|
tsm.m[sw.ID] = targetStatus{
|
2020-03-11 02:19:56 +01:00
|
|
|
sw: sw,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tsm.mu.Unlock()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (tsm *targetStatusMap) UnregisterAll(sws []ScrapeWork) {
|
|
|
|
tsm.mu.Lock()
|
|
|
|
for i := range sws {
|
2020-04-14 12:08:48 +02:00
|
|
|
delete(tsm.m, sws[i].ID)
|
2020-03-11 02:19:56 +01:00
|
|
|
}
|
|
|
|
tsm.mu.Unlock()
|
|
|
|
}
|
|
|
|
|
2020-02-23 12:35:47 +01:00
|
|
|
func (tsm *targetStatusMap) Update(sw *ScrapeWork, up bool, scrapeTime, scrapeDuration int64, err error) {
|
|
|
|
tsm.mu.Lock()
|
2020-04-14 12:08:48 +02:00
|
|
|
tsm.m[sw.ID] = targetStatus{
|
2020-02-23 12:35:47 +01:00
|
|
|
sw: sw,
|
|
|
|
up: up,
|
|
|
|
scrapeTime: scrapeTime,
|
|
|
|
scrapeDuration: scrapeDuration,
|
|
|
|
err: err,
|
|
|
|
}
|
|
|
|
tsm.mu.Unlock()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (tsm *targetStatusMap) WriteHumanReadable(w io.Writer) {
|
|
|
|
byJob := make(map[string][]targetStatus)
|
|
|
|
tsm.mu.Lock()
|
2020-03-11 02:19:56 +01:00
|
|
|
for _, st := range tsm.m {
|
2020-04-14 12:32:55 +02:00
|
|
|
job := st.sw.Job()
|
2020-02-23 12:35:47 +01:00
|
|
|
byJob[job] = append(byJob[job], st)
|
|
|
|
}
|
|
|
|
tsm.mu.Unlock()
|
|
|
|
|
|
|
|
var jss []jobStatus
|
|
|
|
for job, statuses := range byJob {
|
|
|
|
jss = append(jss, jobStatus{
|
|
|
|
job: job,
|
|
|
|
statuses: statuses,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
sort.Slice(jss, func(i, j int) bool {
|
|
|
|
return jss[i].job < jss[j].job
|
|
|
|
})
|
|
|
|
|
2020-04-14 13:51:52 +02:00
|
|
|
targetsByEndpoint := make(map[string]int)
|
2020-02-23 12:35:47 +01:00
|
|
|
for _, js := range jss {
|
|
|
|
sts := js.statuses
|
|
|
|
sort.Slice(sts, func(i, j int) bool {
|
|
|
|
return sts[i].sw.ScrapeURL < sts[j].sw.ScrapeURL
|
|
|
|
})
|
|
|
|
ups := 0
|
|
|
|
for _, st := range sts {
|
|
|
|
if st.up {
|
|
|
|
ups++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fmt.Fprintf(w, "job=%q (%d/%d up)\n", js.job, ups, len(sts))
|
|
|
|
for _, st := range sts {
|
|
|
|
state := "up"
|
|
|
|
if !st.up {
|
|
|
|
state = "down"
|
|
|
|
}
|
|
|
|
var labels []string
|
|
|
|
for _, label := range promrelabel.FinalizeLabels(nil, st.sw.Labels) {
|
|
|
|
labels = append(labels, fmt.Sprintf("%s=%q", label.Name, label.Value))
|
|
|
|
}
|
|
|
|
labelsStr := "{" + strings.Join(labels, ", ") + "}"
|
|
|
|
lastScrape := st.getDurationFromLastScrape()
|
|
|
|
errMsg := ""
|
|
|
|
if st.err != nil {
|
|
|
|
errMsg = st.err.Error()
|
|
|
|
}
|
|
|
|
fmt.Fprintf(w, "\tstate=%s, endpoint=%s, labels=%s, last_scrape=%.3fs ago, scrape_duration=%.3fs, error=%q\n",
|
|
|
|
state, st.sw.ScrapeURL, labelsStr, lastScrape.Seconds(), float64(st.scrapeDuration)/1000, errMsg)
|
2020-04-14 13:51:52 +02:00
|
|
|
key := fmt.Sprintf("endpoint=%s, labels=%s", st.sw.ScrapeURL, labelsStr)
|
|
|
|
targetsByEndpoint[key]++
|
2020-02-23 12:35:47 +01:00
|
|
|
}
|
|
|
|
}
|
2020-04-14 13:51:52 +02:00
|
|
|
fmt.Fprintf(w, "\n")
|
|
|
|
|
|
|
|
// Check whether there are targets with duplicate endpoints and labels.
|
|
|
|
for key, n := range targetsByEndpoint {
|
|
|
|
if n <= 1 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
fmt.Fprintf(w, "!!! Scrape config error: %d duplicate targets with identical endpoint and labels found:\n", n)
|
|
|
|
fmt.Fprintf(w, "\t%s\n", key)
|
|
|
|
}
|
2020-02-23 12:35:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
type jobStatus struct {
|
|
|
|
job string
|
|
|
|
statuses []targetStatus
|
|
|
|
}
|
|
|
|
|
|
|
|
type targetStatus struct {
|
|
|
|
sw *ScrapeWork
|
|
|
|
up bool
|
|
|
|
scrapeTime int64
|
|
|
|
scrapeDuration int64
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (st *targetStatus) getDurationFromLastScrape() time.Duration {
|
|
|
|
return time.Since(time.Unix(st.scrapeTime/1000, (st.scrapeTime%1000)*1e6))
|
|
|
|
}
|