2019-05-22 23:16:55 +02:00
|
|
|
package promql
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2021-09-27 17:02:41 +02:00
|
|
|
"math"
|
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2021-09-27 17:55:35 +02:00
|
|
|
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/storage"
|
|
|
|
"github.com/VictoriaMetrics/metrics"
|
|
|
|
"github.com/VictoriaMetrics/metricsql"
|
2021-10-16 20:13:58 +02:00
|
|
|
xxhash "github.com/cespare/xxhash/v2"
|
2019-05-22 23:16:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
var aggrFuncs = map[string]aggrFunc{
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/querying/operators/#aggregation-operators
|
|
|
|
"sum": newAggrFunc(aggrFuncSum),
|
|
|
|
"min": newAggrFunc(aggrFuncMin),
|
|
|
|
"max": newAggrFunc(aggrFuncMax),
|
|
|
|
"avg": newAggrFunc(aggrFuncAvg),
|
|
|
|
"stddev": newAggrFunc(aggrFuncStddev),
|
|
|
|
"stdvar": newAggrFunc(aggrFuncStdvar),
|
|
|
|
"count": newAggrFunc(aggrFuncCount),
|
|
|
|
"count_values": aggrFuncCountValues,
|
|
|
|
"bottomk": newAggrFuncTopK(true),
|
|
|
|
"topk": newAggrFuncTopK(false),
|
|
|
|
"quantile": aggrFuncQuantile,
|
2020-07-28 12:40:09 +02:00
|
|
|
"group": newAggrFunc(aggrFuncGroup),
|
2019-05-22 23:16:55 +02:00
|
|
|
|
2019-12-12 18:25:45 +01:00
|
|
|
// PromQL extension funcs
|
2019-12-05 18:19:31 +01:00
|
|
|
"median": aggrFuncMedian,
|
|
|
|
"limitk": aggrFuncLimitK,
|
2021-11-03 15:02:20 +01:00
|
|
|
"limit_offset": aggrFuncLimitOffset,
|
2019-12-05 18:19:31 +01:00
|
|
|
"distinct": newAggrFunc(aggrFuncDistinct),
|
|
|
|
"sum2": newAggrFunc(aggrFuncSum2),
|
|
|
|
"geomean": newAggrFunc(aggrFuncGeomean),
|
|
|
|
"histogram": newAggrFunc(aggrFuncHistogram),
|
|
|
|
"topk_min": newAggrFuncRangeTopK(minValue, false),
|
|
|
|
"topk_max": newAggrFuncRangeTopK(maxValue, false),
|
|
|
|
"topk_avg": newAggrFuncRangeTopK(avgValue, false),
|
|
|
|
"topk_median": newAggrFuncRangeTopK(medianValue, false),
|
2021-09-30 12:22:52 +02:00
|
|
|
"topk_last": newAggrFuncRangeTopK(lastValue, false),
|
2019-12-05 18:19:31 +01:00
|
|
|
"bottomk_min": newAggrFuncRangeTopK(minValue, true),
|
|
|
|
"bottomk_max": newAggrFuncRangeTopK(maxValue, true),
|
|
|
|
"bottomk_avg": newAggrFuncRangeTopK(avgValue, true),
|
|
|
|
"bottomk_median": newAggrFuncRangeTopK(medianValue, true),
|
2021-09-30 12:22:52 +02:00
|
|
|
"bottomk_last": newAggrFuncRangeTopK(lastValue, true),
|
2020-07-17 14:14:58 +02:00
|
|
|
"any": aggrFuncAny,
|
2021-09-16 12:33:53 +02:00
|
|
|
"mad": newAggrFunc(aggrFuncMAD),
|
|
|
|
"outliers_mad": aggrFuncOutliersMAD,
|
2020-05-19 12:52:29 +02:00
|
|
|
"outliersk": aggrFuncOutliersK,
|
2020-07-20 14:18:20 +02:00
|
|
|
"mode": newAggrFunc(aggrFuncMode),
|
2020-08-03 20:51:15 +02:00
|
|
|
"zscore": aggrFuncZScore,
|
2021-08-27 17:37:18 +02:00
|
|
|
"quantiles": aggrFuncQuantiles,
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
type aggrFunc func(afa *aggrFuncArg) ([]*timeseries, error)
|
|
|
|
|
|
|
|
type aggrFuncArg struct {
|
|
|
|
args [][]*timeseries
|
2019-12-25 20:35:47 +01:00
|
|
|
ae *metricsql.AggrFuncExpr
|
2019-05-22 23:16:55 +02:00
|
|
|
ec *EvalConfig
|
|
|
|
}
|
|
|
|
|
|
|
|
func getAggrFunc(s string) aggrFunc {
|
|
|
|
s = strings.ToLower(s)
|
|
|
|
return aggrFuncs[s]
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFunc(afe func(tss []*timeseries) []*timeseries) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
2020-08-15 00:15:01 +02:00
|
|
|
tss, err := getAggrTimeseries(afa.args)
|
|
|
|
if err != nil {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
return aggrFuncExt(func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
return afe(tss)
|
|
|
|
}, tss, &afa.ae.Modifier, afa.ae.Limit, false)
|
2020-08-15 00:15:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func getAggrTimeseries(args [][]*timeseries) ([]*timeseries, error) {
|
|
|
|
if len(args) == 0 {
|
|
|
|
return nil, fmt.Errorf("expecting at least one arg")
|
|
|
|
}
|
|
|
|
tss := args[0]
|
|
|
|
for _, arg := range args[1:] {
|
|
|
|
tss = append(tss, arg...)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2020-08-15 00:15:01 +02:00
|
|
|
return tss, nil
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2019-12-25 20:35:47 +01:00
|
|
|
func removeGroupTags(metricName *storage.MetricName, modifier *metricsql.ModifierExpr) {
|
2019-07-09 22:20:38 +02:00
|
|
|
groupOp := strings.ToLower(modifier.Op)
|
2019-05-22 23:16:55 +02:00
|
|
|
switch groupOp {
|
2019-07-09 22:20:38 +02:00
|
|
|
case "", "by":
|
|
|
|
metricName.RemoveTagsOn(modifier.Args)
|
2019-05-22 23:16:55 +02:00
|
|
|
case "without":
|
2019-07-09 22:20:38 +02:00
|
|
|
metricName.RemoveTagsIgnoring(modifier.Args)
|
2020-07-17 14:14:58 +02:00
|
|
|
// Reset metric group as Prometheus does on `aggr(...) without (...)` call.
|
|
|
|
metricName.ResetMetricGroup()
|
2019-05-22 23:16:55 +02:00
|
|
|
default:
|
2019-07-09 22:20:38 +02:00
|
|
|
logger.Panicf("BUG: unknown group modifier: %q", groupOp)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-07-09 22:20:38 +02:00
|
|
|
}
|
|
|
|
|
2020-10-20 18:41:48 +02:00
|
|
|
func aggrFuncExt(afe func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries, argOrig []*timeseries,
|
|
|
|
modifier *metricsql.ModifierExpr, maxSeries int, keepOriginal bool) ([]*timeseries, error) {
|
2020-07-17 15:47:16 +02:00
|
|
|
arg := copyTimeseriesMetricNames(argOrig, keepOriginal)
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
// Perform grouping.
|
|
|
|
m := make(map[string][]*timeseries)
|
|
|
|
bb := bbPool.Get()
|
|
|
|
for i, ts := range arg {
|
2019-07-09 22:20:38 +02:00
|
|
|
removeGroupTags(&ts.MetricName, modifier)
|
2019-05-22 23:16:55 +02:00
|
|
|
bb.B = marshalMetricNameSorted(bb.B[:0], &ts.MetricName)
|
|
|
|
if keepOriginal {
|
|
|
|
ts = argOrig[i]
|
|
|
|
}
|
2020-05-12 18:06:54 +02:00
|
|
|
tss := m[string(bb.B)]
|
|
|
|
if tss == nil && maxSeries > 0 && len(m) >= maxSeries {
|
|
|
|
// We already reached time series limit after grouping. Skip other time series.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
tss = append(tss, ts)
|
|
|
|
m[string(bb.B)] = tss
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
bbPool.Put(bb)
|
|
|
|
|
2019-06-11 00:02:56 +02:00
|
|
|
srcTssCount := 0
|
|
|
|
dstTssCount := 0
|
2019-05-22 23:16:55 +02:00
|
|
|
rvs := make([]*timeseries, 0, len(m))
|
|
|
|
for _, tss := range m {
|
2020-10-20 18:41:48 +02:00
|
|
|
rv := afe(tss, modifier)
|
2019-05-22 23:16:55 +02:00
|
|
|
rvs = append(rvs, rv...)
|
2019-06-11 00:02:56 +02:00
|
|
|
srcTssCount += len(tss)
|
|
|
|
dstTssCount += len(rv)
|
|
|
|
if dstTssCount > 2000 && dstTssCount > 16*srcTssCount {
|
|
|
|
// This looks like count_values explosion.
|
|
|
|
return nil, fmt.Errorf(`too many timeseries after aggragation; got %d; want less than %d`, dstTssCount, 16*srcTssCount)
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return rvs, nil
|
|
|
|
}
|
|
|
|
|
2020-07-17 14:14:58 +02:00
|
|
|
func aggrFuncAny(afa *aggrFuncArg) ([]*timeseries, error) {
|
2020-08-15 00:15:01 +02:00
|
|
|
tss, err := getAggrTimeseries(afa.args)
|
|
|
|
if err != nil {
|
2020-07-17 14:14:58 +02:00
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
2020-07-17 14:14:58 +02:00
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
limit := afa.ae.Limit
|
|
|
|
if limit > 1 {
|
|
|
|
// Only a single time series per group must be returned
|
|
|
|
limit = 1
|
|
|
|
}
|
2020-08-15 00:15:01 +02:00
|
|
|
return aggrFuncExt(afe, tss, &afa.ae.Modifier, limit, true)
|
2020-05-12 18:45:42 +02:00
|
|
|
}
|
|
|
|
|
2020-07-28 12:40:09 +02:00
|
|
|
func aggrFuncGroup(tss []*timeseries) []*timeseries {
|
|
|
|
// See https://github.com/prometheus/prometheus/commit/72425d4e3d14d209cc3f3f6e10e3240411303399
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
v := nan
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(ts.Values[i]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
v = 1
|
2020-07-17 14:16:01 +02:00
|
|
|
}
|
2020-07-28 12:40:09 +02:00
|
|
|
dst.Values[i] = v
|
2020-07-17 14:16:01 +02:00
|
|
|
}
|
2020-07-28 12:40:09 +02:00
|
|
|
return tss[:1]
|
2020-07-17 14:16:01 +02:00
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncSum(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to sum.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
sum := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
2020-10-20 18:41:48 +02:00
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
2019-05-22 23:16:55 +02:00
|
|
|
continue
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
sum += v
|
2019-05-22 23:16:55 +02:00
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
sum = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = sum
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2019-06-24 15:17:28 +02:00
|
|
|
func aggrFuncSum2(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
sum2 := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
sum2 += v * v
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
sum2 = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = sum2
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncGeomean(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to geomean.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
p := 1.0
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
p *= v
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
p = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = math.Pow(p, 1/float64(count))
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2019-11-23 23:02:18 +01:00
|
|
|
func aggrFuncHistogram(tss []*timeseries) []*timeseries {
|
2019-11-25 13:23:35 +01:00
|
|
|
var h metrics.Histogram
|
2019-11-23 23:02:18 +01:00
|
|
|
m := make(map[string]*timeseries)
|
|
|
|
for i := range tss[0].Values {
|
2019-11-25 13:23:35 +01:00
|
|
|
h.Reset()
|
2019-11-23 23:02:18 +01:00
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
h.Update(v)
|
|
|
|
}
|
|
|
|
h.VisitNonZeroBuckets(func(vmrange string, count uint64) {
|
|
|
|
ts := m[vmrange]
|
|
|
|
if ts == nil {
|
|
|
|
ts = ×eries{}
|
|
|
|
ts.CopyFromShallowTimestamps(tss[0])
|
|
|
|
ts.MetricName.RemoveTag("vmrange")
|
|
|
|
ts.MetricName.AddTag("vmrange", vmrange)
|
|
|
|
values := ts.Values
|
|
|
|
for k := range values {
|
|
|
|
values[k] = 0
|
|
|
|
}
|
|
|
|
m[vmrange] = ts
|
|
|
|
}
|
|
|
|
ts.Values[i] = float64(count)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
rvs := make([]*timeseries, 0, len(m))
|
|
|
|
for _, ts := range m {
|
|
|
|
rvs = append(rvs, ts)
|
|
|
|
}
|
|
|
|
return vmrangeBucketsToLE(rvs)
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncMin(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to min.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
min := dst.Values[i]
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(min) || ts.Values[i] < min {
|
|
|
|
min = ts.Values[i]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dst.Values[i] = min
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncMax(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to max.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
max := dst.Values[i]
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(max) || ts.Values[i] > max {
|
|
|
|
max = ts.Values[i]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dst.Values[i] = max
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncAvg(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to avg.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
// Do not use `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation,
|
|
|
|
// since it is slower and has no obvious benefits in increased precision.
|
|
|
|
var sum float64
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
avg := nan
|
|
|
|
if count > 0 {
|
|
|
|
avg = sum / float64(count)
|
|
|
|
}
|
|
|
|
dst.Values[i] = avg
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncStddev(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - stddev over a single time series is zero
|
|
|
|
values := tss[0].Values
|
|
|
|
for i, v := range values {
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
values[i] = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
rvs := aggrFuncStdvar(tss)
|
|
|
|
dst := rvs[0]
|
|
|
|
for i, v := range dst.Values {
|
|
|
|
dst.Values[i] = math.Sqrt(v)
|
|
|
|
}
|
|
|
|
return rvs
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncStdvar(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - stdvar over a single time series is zero
|
|
|
|
values := tss[0].Values
|
|
|
|
for i, v := range values {
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
values[i] = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
|
2020-08-03 20:51:15 +02:00
|
|
|
var avg, count, q float64
|
2019-05-22 23:16:55 +02:00
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
avgNew := avg + (v-avg)/count
|
|
|
|
q += (v - avg) * (v - avgNew)
|
|
|
|
avg = avgNew
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
q = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = q / count
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncCount(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(ts.Values[i]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
}
|
2019-07-25 21:02:03 +02:00
|
|
|
v := float64(count)
|
|
|
|
if count == 0 {
|
|
|
|
v = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = v
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncDistinct(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
m := make(map[float64]struct{}, len(tss))
|
|
|
|
for i := range dst.Values {
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
m[v] = struct{}{}
|
|
|
|
}
|
|
|
|
n := float64(len(m))
|
|
|
|
if n == 0 {
|
|
|
|
n = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = n
|
|
|
|
for k := range m {
|
|
|
|
delete(m, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2020-07-20 14:18:20 +02:00
|
|
|
func aggrFuncMode(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
a := make([]float64, 0, len(tss))
|
|
|
|
for i := range dst.Values {
|
|
|
|
a := a[:0]
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
a = append(a, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dst.Values[i] = modeNoNaNs(nan, a)
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2020-08-03 20:51:15 +02:00
|
|
|
func aggrFuncZScore(afa *aggrFuncArg) ([]*timeseries, error) {
|
2020-08-15 00:15:01 +02:00
|
|
|
tss, err := getAggrTimeseries(afa.args)
|
|
|
|
if err != nil {
|
2020-08-03 20:51:15 +02:00
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
2020-08-03 20:51:15 +02:00
|
|
|
for i := range tss[0].Values {
|
|
|
|
// Calculate avg and stddev for tss points at position i.
|
|
|
|
// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
|
|
|
|
var avg, count, q float64
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
avgNew := avg + (v-avg)/count
|
|
|
|
q += (v - avg) * (v - avgNew)
|
|
|
|
avg = avgNew
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
// Cannot calculate z-score for NaN points.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calculate z-score for tss points at position i.
|
|
|
|
// See https://en.wikipedia.org/wiki/Standard_score
|
|
|
|
stddev := math.Sqrt(q / count)
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
ts.Values[i] = (v - avg) / stddev
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Remove MetricGroup from all the tss.
|
|
|
|
for _, ts := range tss {
|
|
|
|
ts.MetricName.ResetMetricGroup()
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
2020-08-15 00:15:01 +02:00
|
|
|
return aggrFuncExt(afe, tss, &afa.ae.Modifier, afa.ae.Limit, true)
|
2020-08-03 20:51:15 +02:00
|
|
|
}
|
|
|
|
|
2020-07-20 14:18:20 +02:00
|
|
|
// modeNoNaNs returns mode for a.
|
|
|
|
//
|
|
|
|
// It is expected that a doesn't contain NaNs.
|
|
|
|
//
|
2020-10-13 10:56:53 +02:00
|
|
|
// The function modifies contents for a, so the caller must prepare it accordingly.
|
|
|
|
//
|
2020-07-20 14:18:20 +02:00
|
|
|
// See https://en.wikipedia.org/wiki/Mode_(statistics)
|
|
|
|
func modeNoNaNs(prevValue float64, a []float64) float64 {
|
|
|
|
if len(a) == 0 {
|
|
|
|
return prevValue
|
|
|
|
}
|
|
|
|
sort.Float64s(a)
|
|
|
|
j := -1
|
|
|
|
dMax := 0
|
|
|
|
mode := prevValue
|
|
|
|
for i, v := range a {
|
|
|
|
if prevValue == v {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if d := i - j; d > dMax || math.IsNaN(mode) {
|
|
|
|
dMax = d
|
|
|
|
mode = prevValue
|
|
|
|
}
|
|
|
|
j = i
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
if d := len(a) - j; d > dMax || math.IsNaN(mode) {
|
|
|
|
mode = prevValue
|
|
|
|
}
|
|
|
|
return mode
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncCountValues(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
dstLabel, err := getString(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-04 18:48:16 +02:00
|
|
|
|
|
|
|
// Remove dstLabel from grouping like Prometheus does.
|
|
|
|
modifier := &afa.ae.Modifier
|
|
|
|
switch strings.ToLower(modifier.Op) {
|
|
|
|
case "without":
|
|
|
|
modifier.Args = append(modifier.Args, dstLabel)
|
|
|
|
case "by":
|
|
|
|
dstArgs := modifier.Args[:0]
|
|
|
|
for _, arg := range modifier.Args {
|
|
|
|
if arg == dstLabel {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
dstArgs = append(dstArgs, arg)
|
|
|
|
}
|
|
|
|
modifier.Args = dstArgs
|
|
|
|
default:
|
|
|
|
// Do nothing
|
|
|
|
}
|
|
|
|
|
2020-10-20 18:41:48 +02:00
|
|
|
afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries {
|
2019-05-22 23:16:55 +02:00
|
|
|
m := make(map[float64]bool)
|
|
|
|
for _, ts := range tss {
|
|
|
|
for _, v := range ts.Values {
|
2019-06-10 21:42:03 +02:00
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
m[v] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
values := make([]float64, 0, len(m))
|
|
|
|
for v := range m {
|
|
|
|
values = append(values, v)
|
|
|
|
}
|
|
|
|
sort.Float64s(values)
|
|
|
|
|
|
|
|
var rvs []*timeseries
|
|
|
|
for _, v := range values {
|
|
|
|
var dst timeseries
|
2019-06-10 23:43:45 +02:00
|
|
|
dst.CopyFromShallowTimestamps(tss[0])
|
2019-05-22 23:16:55 +02:00
|
|
|
dst.MetricName.RemoveTag(dstLabel)
|
2021-09-13 12:47:58 +02:00
|
|
|
dst.MetricName.AddTag(dstLabel, strconv.FormatFloat(v, 'f', -1, 64))
|
2019-05-22 23:16:55 +02:00
|
|
|
for i := range dst.Values {
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
if ts.Values[i] == v {
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n := float64(count)
|
|
|
|
if n == 0 {
|
|
|
|
n = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = n
|
|
|
|
}
|
|
|
|
rvs = append(rvs, &dst)
|
|
|
|
}
|
|
|
|
return rvs
|
|
|
|
}
|
2020-05-12 18:06:54 +02:00
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, false)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFuncTopK(isReverse bool) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
afe := func(tss []*timeseries, modififer *metricsql.ModifierExpr) []*timeseries {
|
2019-12-05 18:19:31 +01:00
|
|
|
for n := range tss[0].Values {
|
|
|
|
sort.Slice(tss, func(i, j int) bool {
|
|
|
|
a := tss[i].Values[n]
|
|
|
|
b := tss[j].Values[n]
|
2019-05-22 23:16:55 +02:00
|
|
|
if isReverse {
|
2019-12-05 18:19:31 +01:00
|
|
|
a, b = b, a
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-05 18:19:31 +01:00
|
|
|
return lessWithNaNs(a, b)
|
2019-05-22 23:16:55 +02:00
|
|
|
})
|
2019-12-05 18:19:31 +01:00
|
|
|
fillNaNsAtIdx(n, ks[n], tss)
|
|
|
|
}
|
2021-04-07 23:09:34 +02:00
|
|
|
tss = removeNaNs(tss)
|
|
|
|
reverseSeries(tss)
|
|
|
|
return tss
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2020-05-12 18:06:54 +02:00
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, true)
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFuncRangeTopK(f func(values []float64) float64, isReverse bool) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
2020-10-20 18:41:48 +02:00
|
|
|
if len(args) < 2 {
|
|
|
|
return nil, fmt.Errorf(`unexpected number of args; got %d; want at least %d`, len(args), 2)
|
|
|
|
}
|
|
|
|
if len(args) > 3 {
|
|
|
|
return nil, fmt.Errorf(`unexpected number of args; got %d; want no more than %d`, len(args), 3)
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
remainingSumTagName := ""
|
|
|
|
if len(args) == 3 {
|
|
|
|
remainingSumTagName, err = getString(args[2], 2)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
return getRangeTopKTimeseries(tss, modifier, ks, remainingSumTagName, f, isReverse)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2020-05-12 18:06:54 +02:00
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, true)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-20 18:41:48 +02:00
|
|
|
func getRangeTopKTimeseries(tss []*timeseries, modifier *metricsql.ModifierExpr, ks []float64, remainingSumTagName string,
|
|
|
|
f func(values []float64) float64, isReverse bool) []*timeseries {
|
2020-05-19 15:10:52 +02:00
|
|
|
type tsWithValue struct {
|
|
|
|
ts *timeseries
|
|
|
|
value float64
|
|
|
|
}
|
|
|
|
maxs := make([]tsWithValue, len(tss))
|
|
|
|
for i, ts := range tss {
|
|
|
|
value := f(ts.Values)
|
|
|
|
maxs[i] = tsWithValue{
|
|
|
|
ts: ts,
|
|
|
|
value: value,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sort.Slice(maxs, func(i, j int) bool {
|
|
|
|
a := maxs[i].value
|
|
|
|
b := maxs[j].value
|
|
|
|
if isReverse {
|
|
|
|
a, b = b, a
|
|
|
|
}
|
|
|
|
return lessWithNaNs(a, b)
|
|
|
|
})
|
|
|
|
for i := range maxs {
|
|
|
|
tss[i] = maxs[i].ts
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
remainingSumTS := getRemainingSumTimeseries(tss, modifier, ks, remainingSumTagName)
|
2020-05-19 15:10:52 +02:00
|
|
|
for i, k := range ks {
|
|
|
|
fillNaNsAtIdx(i, k, tss)
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
if remainingSumTS != nil {
|
|
|
|
tss = append(tss, remainingSumTS)
|
|
|
|
}
|
2021-04-07 23:09:34 +02:00
|
|
|
tss = removeNaNs(tss)
|
|
|
|
reverseSeries(tss)
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
|
|
|
|
func reverseSeries(tss []*timeseries) {
|
|
|
|
j := len(tss)
|
|
|
|
for i := 0; i < len(tss)/2; i++ {
|
|
|
|
j--
|
|
|
|
tss[i], tss[j] = tss[j], tss[i]
|
|
|
|
}
|
2020-05-19 15:10:52 +02:00
|
|
|
}
|
|
|
|
|
2020-10-20 18:41:48 +02:00
|
|
|
func getRemainingSumTimeseries(tss []*timeseries, modifier *metricsql.ModifierExpr, ks []float64, remainingSumTagName string) *timeseries {
|
|
|
|
if len(remainingSumTagName) == 0 || len(tss) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
var dst timeseries
|
|
|
|
dst.CopyFromShallowTimestamps(tss[0])
|
|
|
|
removeGroupTags(&dst.MetricName, modifier)
|
2021-04-02 22:55:54 +02:00
|
|
|
tagValue := remainingSumTagName
|
|
|
|
n := strings.IndexByte(remainingSumTagName, '=')
|
|
|
|
if n >= 0 {
|
|
|
|
tagValue = remainingSumTagName[n+1:]
|
|
|
|
remainingSumTagName = remainingSumTagName[:n]
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
dst.MetricName.RemoveTag(remainingSumTagName)
|
2021-04-02 22:55:54 +02:00
|
|
|
dst.MetricName.AddTag(remainingSumTagName, tagValue)
|
2020-10-20 18:41:48 +02:00
|
|
|
for i, k := range ks {
|
|
|
|
kn := getIntK(k, len(tss))
|
|
|
|
var sum float64
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss[:len(tss)-kn] {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
sum += v
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
sum = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = sum
|
|
|
|
}
|
|
|
|
return &dst
|
|
|
|
}
|
|
|
|
|
2019-12-05 18:19:31 +01:00
|
|
|
func fillNaNsAtIdx(idx int, k float64, tss []*timeseries) {
|
2020-10-20 18:41:48 +02:00
|
|
|
kn := getIntK(k, len(tss))
|
|
|
|
for _, ts := range tss[:len(tss)-kn] {
|
|
|
|
ts.Values[idx] = nan
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func getIntK(k float64, kMax int) int {
|
2019-12-05 18:19:31 +01:00
|
|
|
if math.IsNaN(k) {
|
2020-10-20 18:41:48 +02:00
|
|
|
return 0
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
kn := int(k)
|
|
|
|
if kn < 0 {
|
2020-10-20 18:41:48 +02:00
|
|
|
return 0
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
if kn > kMax {
|
|
|
|
return kMax
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
return kn
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func minValue(values []float64) float64 {
|
2020-11-06 00:30:36 +01:00
|
|
|
min := nan
|
|
|
|
for len(values) > 0 && math.IsNaN(min) {
|
|
|
|
min = values[0]
|
|
|
|
values = values[1:]
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2020-11-06 00:29:24 +01:00
|
|
|
for _, v := range values {
|
|
|
|
if !math.IsNaN(v) && v < min {
|
2019-12-05 18:19:31 +01:00
|
|
|
min = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return min
|
|
|
|
}
|
|
|
|
|
|
|
|
func maxValue(values []float64) float64 {
|
2020-11-06 00:30:36 +01:00
|
|
|
max := nan
|
|
|
|
for len(values) > 0 && math.IsNaN(max) {
|
|
|
|
max = values[0]
|
|
|
|
values = values[1:]
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2020-11-06 00:29:24 +01:00
|
|
|
for _, v := range values {
|
|
|
|
if !math.IsNaN(v) && v > max {
|
2019-12-05 18:19:31 +01:00
|
|
|
max = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return max
|
|
|
|
}
|
|
|
|
|
|
|
|
func avgValue(values []float64) float64 {
|
|
|
|
sum := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return sum / float64(count)
|
|
|
|
}
|
|
|
|
|
|
|
|
func medianValue(values []float64) float64 {
|
2021-09-27 17:02:41 +02:00
|
|
|
return quantile(0.5, values)
|
|
|
|
}
|
|
|
|
|
2021-09-30 12:22:52 +02:00
|
|
|
func lastValue(values []float64) float64 {
|
|
|
|
values = skipTrailingNaNs(values)
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return values[len(values)-1]
|
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
// quantiles calculates the given phis from originValues without modifying originValues, appends them to qs and returns the result.
|
|
|
|
func quantiles(qs, phis []float64, originValues []float64) []float64 {
|
|
|
|
a := getFloat64s()
|
2021-09-27 17:02:41 +02:00
|
|
|
a.A = prepareForQuantileFloat64(a.A[:0], originValues)
|
2021-09-27 17:55:35 +02:00
|
|
|
qs = quantilesSorted(qs, phis, a.A)
|
|
|
|
putFloat64s(a)
|
|
|
|
return qs
|
2021-09-27 17:02:41 +02:00
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
// quantile calculates the given phi from originValues without modifying originValues
|
2021-09-27 17:02:41 +02:00
|
|
|
func quantile(phi float64, originValues []float64) float64 {
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
2021-09-27 17:02:41 +02:00
|
|
|
a.A = prepareForQuantileFloat64(a.A[:0], originValues)
|
2021-09-27 17:55:35 +02:00
|
|
|
q := quantileSorted(phi, a.A)
|
|
|
|
putFloat64s(a)
|
|
|
|
return q
|
2021-09-27 17:02:41 +02:00
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
// prepareForQuantileFloat64 copies items from src to dst but removes NaNs and sorts the dst
|
2021-09-27 17:02:41 +02:00
|
|
|
func prepareForQuantileFloat64(dst, src []float64) []float64 {
|
|
|
|
for _, v := range src {
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2021-09-27 17:02:41 +02:00
|
|
|
dst = append(dst, v)
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
2021-09-27 17:02:41 +02:00
|
|
|
sort.Float64s(dst)
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
// quantilesSorted calculates the given phis over a sorted list of values, appends them to qs and returns the result.
|
|
|
|
//
|
|
|
|
// It is expected that values won't contain NaN items.
|
|
|
|
// The implementation mimics Prometheus implementation for compatibility's sake.
|
|
|
|
func quantilesSorted(qs, phis []float64, values []float64) []float64 {
|
|
|
|
for _, phi := range phis {
|
|
|
|
q := quantileSorted(phi, values)
|
|
|
|
qs = append(qs, q)
|
|
|
|
}
|
|
|
|
return qs
|
|
|
|
}
|
|
|
|
|
|
|
|
// quantileSorted calculates the given quantile over a sorted list of values.
|
|
|
|
//
|
2021-09-27 17:02:41 +02:00
|
|
|
// It is expected that values won't contain NaN items.
|
|
|
|
// The implementation mimics Prometheus implementation for compatibility's sake.
|
|
|
|
func quantileSorted(phi float64, values []float64) float64 {
|
|
|
|
if len(values) == 0 || math.IsNaN(phi) {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
if phi < 0 {
|
|
|
|
return math.Inf(-1)
|
|
|
|
}
|
|
|
|
if phi > 1 {
|
|
|
|
return math.Inf(+1)
|
|
|
|
}
|
|
|
|
n := float64(len(values))
|
|
|
|
rank := phi * (n - 1)
|
|
|
|
|
|
|
|
lowerIndex := math.Max(0, math.Floor(rank))
|
|
|
|
upperIndex := math.Min(n-1, lowerIndex+1)
|
|
|
|
|
|
|
|
weight := rank - math.Floor(rank)
|
|
|
|
return values[int(lowerIndex)]*(1-weight) + values[int(upperIndex)]*weight
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
|
2021-09-16 12:33:53 +02:00
|
|
|
func aggrFuncMAD(tss []*timeseries) []*timeseries {
|
|
|
|
// Calculate medians for each point across tss.
|
|
|
|
medians := getPerPointMedians(tss)
|
|
|
|
// Calculate MAD values multipled by tolerance for each point across tss.
|
|
|
|
// See https://en.wikipedia.org/wiki/Median_absolute_deviation
|
|
|
|
mads := getPerPointMADs(tss, medians)
|
|
|
|
tss[0].Values = append(tss[0].Values[:0], mads...)
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncOutliersMAD(afa *aggrFuncArg) ([]*timeseries, error) {
|
2020-05-19 12:52:29 +02:00
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-09-16 12:33:53 +02:00
|
|
|
tolerances, err := getScalar(args[0], 0)
|
2020-05-19 12:52:29 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
2020-05-19 12:52:29 +02:00
|
|
|
// Calculate medians for each point across tss.
|
2021-09-16 12:33:53 +02:00
|
|
|
medians := getPerPointMedians(tss)
|
|
|
|
// Calculate MAD values multipled by tolerance for each point across tss.
|
|
|
|
// See https://en.wikipedia.org/wiki/Median_absolute_deviation
|
|
|
|
mads := getPerPointMADs(tss, medians)
|
|
|
|
for n := range mads {
|
|
|
|
mads[n] *= tolerances[n]
|
|
|
|
}
|
|
|
|
// Leave only time series with at least a single peak above the MAD multiplied by tolerance.
|
|
|
|
tssDst := tss[:0]
|
|
|
|
for _, ts := range tss {
|
|
|
|
values := ts.Values
|
|
|
|
for n, v := range values {
|
|
|
|
ad := math.Abs(v - medians[n])
|
|
|
|
mad := mads[n]
|
|
|
|
if ad > mad {
|
|
|
|
tssDst = append(tssDst, ts)
|
|
|
|
break
|
2020-05-19 12:52:29 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-16 12:33:53 +02:00
|
|
|
return tssDst
|
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, true)
|
|
|
|
}
|
2020-05-19 12:52:29 +02:00
|
|
|
|
2021-09-16 12:33:53 +02:00
|
|
|
func aggrFuncOutliersK(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
// Calculate medians for each point across tss.
|
|
|
|
medians := getPerPointMedians(tss)
|
2020-05-19 15:10:52 +02:00
|
|
|
// Return topK time series with the highest variance from median.
|
|
|
|
f := func(values []float64) float64 {
|
2020-05-19 12:52:29 +02:00
|
|
|
sum2 := float64(0)
|
2020-05-19 15:10:52 +02:00
|
|
|
for n, v := range values {
|
2020-05-19 12:52:29 +02:00
|
|
|
d := v - medians[n]
|
|
|
|
sum2 += d * d
|
|
|
|
}
|
2020-05-19 15:10:52 +02:00
|
|
|
return sum2
|
2020-05-19 12:52:29 +02:00
|
|
|
}
|
2020-10-20 18:41:48 +02:00
|
|
|
return getRangeTopKTimeseries(tss, &afa.ae.Modifier, ks, "", f, false)
|
2020-05-19 12:52:29 +02:00
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, true)
|
|
|
|
}
|
|
|
|
|
2021-09-16 12:33:53 +02:00
|
|
|
func getPerPointMedians(tss []*timeseries) []float64 {
|
|
|
|
if len(tss) == 0 {
|
|
|
|
logger.Panicf("BUG: expecting non-empty tss")
|
|
|
|
}
|
|
|
|
medians := make([]float64, len(tss[0].Values))
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
|
|
|
values := a.A
|
2021-09-16 12:33:53 +02:00
|
|
|
for n := range medians {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = values[:0]
|
2021-09-16 12:33:53 +02:00
|
|
|
for j := range tss {
|
|
|
|
v := tss[j].Values[n]
|
|
|
|
if !math.IsNaN(v) {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = append(values, v)
|
2021-09-16 12:33:53 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
medians[n] = quantile(0.5, values)
|
2021-09-16 12:33:53 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
a.A = values
|
|
|
|
putFloat64s(a)
|
2021-09-16 12:33:53 +02:00
|
|
|
return medians
|
|
|
|
}
|
|
|
|
|
|
|
|
func getPerPointMADs(tss []*timeseries, medians []float64) []float64 {
|
|
|
|
mads := make([]float64, len(medians))
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
|
|
|
values := a.A
|
2021-09-16 12:33:53 +02:00
|
|
|
for n, median := range medians {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = values[:0]
|
2021-09-16 12:33:53 +02:00
|
|
|
for j := range tss {
|
|
|
|
v := tss[j].Values[n]
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
ad := math.Abs(v - median)
|
2021-09-27 17:55:35 +02:00
|
|
|
values = append(values, ad)
|
2021-09-16 12:33:53 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
mads[n] = quantile(0.5, values)
|
2021-09-16 12:33:53 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
a.A = values
|
|
|
|
putFloat64s(a)
|
2021-09-16 12:33:53 +02:00
|
|
|
return mads
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncLimitK(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-11-03 15:02:20 +01:00
|
|
|
limits, err := getScalar(args[0], 0)
|
2019-05-22 23:16:55 +02:00
|
|
|
if err != nil {
|
2021-11-03 15:02:20 +01:00
|
|
|
return nil, fmt.Errorf("cannot obtain limit arg: %w", err)
|
|
|
|
}
|
|
|
|
limit := 0
|
|
|
|
if len(limits) > 0 {
|
|
|
|
limit = int(limits[0])
|
|
|
|
}
|
|
|
|
afe := newLimitOffsetAggrFunc(limit, 0)
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncLimitOffset(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 3); err != nil {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nil, err
|
|
|
|
}
|
2021-11-03 15:02:20 +01:00
|
|
|
limit, err := getIntNumber(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot obtain limit arg: %w", err)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2021-11-03 15:02:20 +01:00
|
|
|
offset, err := getIntNumber(args[1], 1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot obtain offset arg: %w", err)
|
|
|
|
}
|
|
|
|
afe := newLimitOffsetAggrFunc(limit, offset)
|
|
|
|
return aggrFuncExt(afe, args[2], &afa.ae.Modifier, afa.ae.Limit, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func newLimitOffsetAggrFunc(limit, offset int) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
if offset < 0 {
|
|
|
|
offset = 0
|
|
|
|
}
|
|
|
|
if limit < 0 {
|
|
|
|
limit = 0
|
|
|
|
}
|
|
|
|
return func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
// Sort series by metricName hash in order to get consistent set of output series
|
|
|
|
// across multiple calls to limitk() and limit_offset() functions.
|
2021-10-16 20:13:58 +02:00
|
|
|
// Sort series by hash in order to guarantee uniform selection across series.
|
|
|
|
type hashSeries struct {
|
|
|
|
h uint64
|
|
|
|
ts *timeseries
|
|
|
|
}
|
|
|
|
hss := make([]hashSeries, len(tss))
|
|
|
|
d := xxhash.New()
|
|
|
|
for i, ts := range tss {
|
|
|
|
h := getHash(d, &ts.MetricName)
|
|
|
|
hss[i] = hashSeries{
|
|
|
|
h: h,
|
|
|
|
ts: ts,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sort.Slice(hss, func(i, j int) bool {
|
|
|
|
return hss[i].h < hss[j].h
|
2021-10-08 18:53:50 +02:00
|
|
|
})
|
2021-10-16 20:13:58 +02:00
|
|
|
for i, hs := range hss {
|
|
|
|
tss[i] = hs.ts
|
|
|
|
}
|
2021-11-03 15:02:20 +01:00
|
|
|
if offset > len(tss) {
|
|
|
|
return nil
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2021-11-03 15:02:20 +01:00
|
|
|
tss = tss[offset:]
|
|
|
|
if limit < len(tss) {
|
|
|
|
tss = tss[:limit]
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-16 20:13:58 +02:00
|
|
|
func getHash(d *xxhash.Digest, mn *storage.MetricName) uint64 {
|
|
|
|
d.Reset()
|
|
|
|
_, _ = d.Write(mn.MetricGroup)
|
|
|
|
for _, tag := range mn.Tags {
|
|
|
|
_, _ = d.Write(tag.Key)
|
|
|
|
_, _ = d.Write(tag.Value)
|
|
|
|
}
|
|
|
|
return d.Sum64()
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-08-27 17:37:18 +02:00
|
|
|
func aggrFuncQuantiles(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if len(args) < 3 {
|
|
|
|
return nil, fmt.Errorf("unexpected number of args: %d; expecting at least 3 args", len(args))
|
|
|
|
}
|
|
|
|
dstLabel, err := getString(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot obtain dstLabel: %w", err)
|
|
|
|
}
|
|
|
|
phiArgs := args[1 : len(args)-1]
|
2021-09-17 11:33:40 +02:00
|
|
|
phis := make([]float64, len(phiArgs))
|
2021-08-27 17:37:18 +02:00
|
|
|
for i, phiArg := range phiArgs {
|
2021-09-17 11:33:40 +02:00
|
|
|
phisLocal, err := getScalar(phiArg, i+1)
|
2021-08-27 17:37:18 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if len(phis) == 0 {
|
|
|
|
logger.Panicf("BUG: expecting at least a single sample")
|
|
|
|
}
|
2021-09-17 11:33:40 +02:00
|
|
|
phis[i] = phisLocal[0]
|
|
|
|
}
|
|
|
|
argOrig := args[len(args)-1]
|
|
|
|
afe := func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
tssDst := make([]*timeseries, len(phiArgs))
|
|
|
|
for j := range tssDst {
|
|
|
|
ts := ×eries{}
|
|
|
|
ts.CopyFromShallowTimestamps(tss[0])
|
2021-08-27 17:37:18 +02:00
|
|
|
ts.MetricName.RemoveTag(dstLabel)
|
2021-09-17 11:33:40 +02:00
|
|
|
ts.MetricName.AddTag(dstLabel, fmt.Sprintf("%g", phis[j]))
|
|
|
|
tssDst[j] = ts
|
|
|
|
}
|
2021-09-27 17:02:41 +02:00
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
b := getFloat64s()
|
|
|
|
qs := b.A
|
|
|
|
a := getFloat64s()
|
|
|
|
values := a.A
|
2021-09-17 11:33:40 +02:00
|
|
|
for n := range tss[0].Values {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = values[:0]
|
2021-09-17 11:33:40 +02:00
|
|
|
for j := range tss {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = append(values, tss[j].Values[n])
|
2021-09-17 11:33:40 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
qs = quantiles(qs[:0], phis, values)
|
2021-09-17 11:33:40 +02:00
|
|
|
for j := range tssDst {
|
|
|
|
tssDst[j].Values[n] = qs[j]
|
|
|
|
}
|
2021-08-27 17:37:18 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
a.A = values
|
|
|
|
putFloat64s(a)
|
|
|
|
b.A = qs
|
|
|
|
putFloat64s(b)
|
2021-09-17 11:33:40 +02:00
|
|
|
return tssDst
|
2021-08-27 17:37:18 +02:00
|
|
|
}
|
2021-09-17 11:33:40 +02:00
|
|
|
return aggrFuncExt(afe, argOrig, &afa.ae.Modifier, afa.ae.Limit, false)
|
2021-08-27 17:37:18 +02:00
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncQuantile(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
afe := newAggrQuantileFunc(phis)
|
2020-05-12 18:06:54 +02:00
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, afa.ae.Limit, false)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncMedian(afa *aggrFuncArg) ([]*timeseries, error) {
|
2020-08-15 00:15:01 +02:00
|
|
|
tss, err := getAggrTimeseries(afa.args)
|
|
|
|
if err != nil {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis := evalNumber(afa.ec, 0.5)[0].Values
|
|
|
|
afe := newAggrQuantileFunc(phis)
|
2020-08-15 00:15:01 +02:00
|
|
|
return aggrFuncExt(afe, tss, &afa.ae.Modifier, afa.ae.Limit, false)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2020-10-20 18:41:48 +02:00
|
|
|
func newAggrQuantileFunc(phis []float64) func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
|
|
|
return func(tss []*timeseries, modifier *metricsql.ModifierExpr) []*timeseries {
|
2019-05-22 23:16:55 +02:00
|
|
|
dst := tss[0]
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
|
|
|
values := a.A
|
2019-05-22 23:16:55 +02:00
|
|
|
for n := range dst.Values {
|
2021-09-27 17:02:41 +02:00
|
|
|
values = values[:0]
|
2020-05-19 12:52:29 +02:00
|
|
|
for j := range tss {
|
2021-09-27 17:55:35 +02:00
|
|
|
values = append(values, tss[j].Values[n])
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
dst.Values[n] = quantile(phis[n], values)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
a.A = values
|
|
|
|
putFloat64s(a)
|
2019-06-06 16:07:28 +02:00
|
|
|
tss[0] = dst
|
2019-05-22 23:16:55 +02:00
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func lessWithNaNs(a, b float64) bool {
|
|
|
|
if math.IsNaN(a) {
|
|
|
|
return !math.IsNaN(b)
|
|
|
|
}
|
|
|
|
return a < b
|
|
|
|
}
|