2019-05-22 23:16:55 +02:00
|
|
|
package promql
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"math"
|
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2019-07-09 22:20:38 +02:00
|
|
|
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/storage"
|
2019-11-23 23:02:18 +01:00
|
|
|
"github.com/VictoriaMetrics/metrics"
|
2020-04-28 14:28:22 +02:00
|
|
|
"github.com/VictoriaMetrics/metricsql"
|
2019-12-05 18:19:31 +01:00
|
|
|
"github.com/valyala/histogram"
|
2019-05-22 23:16:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
var aggrFuncs = map[string]aggrFunc{
|
|
|
|
// See https://prometheus.io/docs/prometheus/latest/querying/operators/#aggregation-operators
|
|
|
|
"sum": newAggrFunc(aggrFuncSum),
|
|
|
|
"min": newAggrFunc(aggrFuncMin),
|
|
|
|
"max": newAggrFunc(aggrFuncMax),
|
|
|
|
"avg": newAggrFunc(aggrFuncAvg),
|
|
|
|
"stddev": newAggrFunc(aggrFuncStddev),
|
|
|
|
"stdvar": newAggrFunc(aggrFuncStdvar),
|
|
|
|
"count": newAggrFunc(aggrFuncCount),
|
|
|
|
"count_values": aggrFuncCountValues,
|
|
|
|
"bottomk": newAggrFuncTopK(true),
|
|
|
|
"topk": newAggrFuncTopK(false),
|
|
|
|
"quantile": aggrFuncQuantile,
|
|
|
|
|
2019-12-12 18:25:45 +01:00
|
|
|
// PromQL extension funcs
|
2019-12-05 18:19:31 +01:00
|
|
|
"median": aggrFuncMedian,
|
|
|
|
"limitk": aggrFuncLimitK,
|
|
|
|
"distinct": newAggrFunc(aggrFuncDistinct),
|
|
|
|
"sum2": newAggrFunc(aggrFuncSum2),
|
|
|
|
"geomean": newAggrFunc(aggrFuncGeomean),
|
|
|
|
"histogram": newAggrFunc(aggrFuncHistogram),
|
|
|
|
"topk_min": newAggrFuncRangeTopK(minValue, false),
|
|
|
|
"topk_max": newAggrFuncRangeTopK(maxValue, false),
|
|
|
|
"topk_avg": newAggrFuncRangeTopK(avgValue, false),
|
|
|
|
"topk_median": newAggrFuncRangeTopK(medianValue, false),
|
|
|
|
"bottomk_min": newAggrFuncRangeTopK(minValue, true),
|
|
|
|
"bottomk_max": newAggrFuncRangeTopK(maxValue, true),
|
|
|
|
"bottomk_avg": newAggrFuncRangeTopK(avgValue, true),
|
|
|
|
"bottomk_median": newAggrFuncRangeTopK(medianValue, true),
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
type aggrFunc func(afa *aggrFuncArg) ([]*timeseries, error)
|
|
|
|
|
|
|
|
type aggrFuncArg struct {
|
|
|
|
args [][]*timeseries
|
2019-12-25 20:35:47 +01:00
|
|
|
ae *metricsql.AggrFuncExpr
|
2019-05-22 23:16:55 +02:00
|
|
|
ec *EvalConfig
|
|
|
|
}
|
|
|
|
|
|
|
|
func getAggrFunc(s string) aggrFunc {
|
|
|
|
s = strings.ToLower(s)
|
|
|
|
return aggrFuncs[s]
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFunc(afe func(tss []*timeseries) []*timeseries) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 1); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[0], &afa.ae.Modifier, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-25 20:35:47 +01:00
|
|
|
func removeGroupTags(metricName *storage.MetricName, modifier *metricsql.ModifierExpr) {
|
2019-07-09 22:20:38 +02:00
|
|
|
groupOp := strings.ToLower(modifier.Op)
|
2019-05-22 23:16:55 +02:00
|
|
|
switch groupOp {
|
2019-07-09 22:20:38 +02:00
|
|
|
case "", "by":
|
|
|
|
metricName.RemoveTagsOn(modifier.Args)
|
2019-05-22 23:16:55 +02:00
|
|
|
case "without":
|
2019-07-09 22:20:38 +02:00
|
|
|
metricName.RemoveTagsIgnoring(modifier.Args)
|
2019-05-22 23:16:55 +02:00
|
|
|
default:
|
2019-07-09 22:20:38 +02:00
|
|
|
logger.Panicf("BUG: unknown group modifier: %q", groupOp)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-07-09 22:20:38 +02:00
|
|
|
}
|
|
|
|
|
2019-12-25 20:35:47 +01:00
|
|
|
func aggrFuncExt(afe func(tss []*timeseries) []*timeseries, argOrig []*timeseries, modifier *metricsql.ModifierExpr, keepOriginal bool) ([]*timeseries, error) {
|
2019-07-09 22:20:38 +02:00
|
|
|
arg := copyTimeseriesMetricNames(argOrig)
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
// Perform grouping.
|
|
|
|
m := make(map[string][]*timeseries)
|
|
|
|
bb := bbPool.Get()
|
|
|
|
for i, ts := range arg {
|
2019-07-09 22:20:38 +02:00
|
|
|
removeGroupTags(&ts.MetricName, modifier)
|
2019-05-22 23:16:55 +02:00
|
|
|
bb.B = marshalMetricNameSorted(bb.B[:0], &ts.MetricName)
|
|
|
|
if keepOriginal {
|
|
|
|
ts = argOrig[i]
|
|
|
|
}
|
|
|
|
m[string(bb.B)] = append(m[string(bb.B)], ts)
|
|
|
|
}
|
|
|
|
bbPool.Put(bb)
|
|
|
|
|
2019-06-11 00:02:56 +02:00
|
|
|
srcTssCount := 0
|
|
|
|
dstTssCount := 0
|
2019-05-22 23:16:55 +02:00
|
|
|
rvs := make([]*timeseries, 0, len(m))
|
|
|
|
for _, tss := range m {
|
|
|
|
rv := afe(tss)
|
|
|
|
rvs = append(rvs, rv...)
|
2019-06-11 00:02:56 +02:00
|
|
|
srcTssCount += len(tss)
|
|
|
|
dstTssCount += len(rv)
|
|
|
|
if dstTssCount > 2000 && dstTssCount > 16*srcTssCount {
|
|
|
|
// This looks like count_values explosion.
|
|
|
|
return nil, fmt.Errorf(`too many timeseries after aggragation; got %d; want less than %d`, dstTssCount, 16*srcTssCount)
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return rvs, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncSum(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to sum.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
sum := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(ts.Values[i]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
sum += ts.Values[i]
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
sum = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = sum
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2019-06-24 15:17:28 +02:00
|
|
|
func aggrFuncSum2(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
sum2 := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
sum2 += v * v
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
sum2 = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = sum2
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncGeomean(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to geomean.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
p := 1.0
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
p *= v
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
p = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = math.Pow(p, 1/float64(count))
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
2019-11-23 23:02:18 +01:00
|
|
|
func aggrFuncHistogram(tss []*timeseries) []*timeseries {
|
2019-11-25 13:23:35 +01:00
|
|
|
var h metrics.Histogram
|
2019-11-23 23:02:18 +01:00
|
|
|
m := make(map[string]*timeseries)
|
|
|
|
for i := range tss[0].Values {
|
2019-11-25 13:23:35 +01:00
|
|
|
h.Reset()
|
2019-11-23 23:02:18 +01:00
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
h.Update(v)
|
|
|
|
}
|
|
|
|
h.VisitNonZeroBuckets(func(vmrange string, count uint64) {
|
|
|
|
ts := m[vmrange]
|
|
|
|
if ts == nil {
|
|
|
|
ts = ×eries{}
|
|
|
|
ts.CopyFromShallowTimestamps(tss[0])
|
|
|
|
ts.MetricName.RemoveTag("vmrange")
|
|
|
|
ts.MetricName.AddTag("vmrange", vmrange)
|
|
|
|
values := ts.Values
|
|
|
|
for k := range values {
|
|
|
|
values[k] = 0
|
|
|
|
}
|
|
|
|
m[vmrange] = ts
|
|
|
|
}
|
|
|
|
ts.Values[i] = float64(count)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
rvs := make([]*timeseries, 0, len(m))
|
|
|
|
for _, ts := range m {
|
|
|
|
rvs = append(rvs, ts)
|
|
|
|
}
|
|
|
|
return vmrangeBucketsToLE(rvs)
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncMin(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to min.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
min := dst.Values[i]
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(min) || ts.Values[i] < min {
|
|
|
|
min = ts.Values[i]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dst.Values[i] = min
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncMax(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to max.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
max := dst.Values[i]
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(max) || ts.Values[i] > max {
|
|
|
|
max = ts.Values[i]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
dst.Values[i] = max
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncAvg(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - nothing to avg.
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
// Do not use `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation,
|
|
|
|
// since it is slower and has no obvious benefits in increased precision.
|
|
|
|
var sum float64
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
avg := nan
|
|
|
|
if count > 0 {
|
|
|
|
avg = sum / float64(count)
|
|
|
|
}
|
|
|
|
dst.Values[i] = avg
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncStddev(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - stddev over a single time series is zero
|
|
|
|
values := tss[0].Values
|
|
|
|
for i, v := range values {
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
values[i] = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
rvs := aggrFuncStdvar(tss)
|
|
|
|
dst := rvs[0]
|
|
|
|
for i, v := range dst.Values {
|
|
|
|
dst.Values[i] = math.Sqrt(v)
|
|
|
|
}
|
|
|
|
return rvs
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncStdvar(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) == 1 {
|
|
|
|
// Fast path - stdvar over a single time series is zero
|
|
|
|
values := tss[0].Values
|
|
|
|
for i, v := range values {
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
values[i] = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
|
|
|
|
var avg float64
|
|
|
|
var count float64
|
|
|
|
var q float64
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
avgNew := avg + (v-avg)/count
|
|
|
|
q += (v - avg) * (v - avgNew)
|
|
|
|
avg = avgNew
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
q = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = q / count
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncCount(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
for i := range dst.Values {
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
if math.IsNaN(ts.Values[i]) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
}
|
2019-07-25 21:02:03 +02:00
|
|
|
v := float64(count)
|
|
|
|
if count == 0 {
|
|
|
|
v = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = v
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncDistinct(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
m := make(map[float64]struct{}, len(tss))
|
|
|
|
for i := range dst.Values {
|
|
|
|
for _, ts := range tss {
|
|
|
|
v := ts.Values[i]
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
m[v] = struct{}{}
|
|
|
|
}
|
|
|
|
n := float64(len(m))
|
|
|
|
if n == 0 {
|
|
|
|
n = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = n
|
|
|
|
for k := range m {
|
|
|
|
delete(m, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncCountValues(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
dstLabel, err := getString(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-04 18:48:16 +02:00
|
|
|
|
|
|
|
// Remove dstLabel from grouping like Prometheus does.
|
|
|
|
modifier := &afa.ae.Modifier
|
|
|
|
switch strings.ToLower(modifier.Op) {
|
|
|
|
case "without":
|
|
|
|
modifier.Args = append(modifier.Args, dstLabel)
|
|
|
|
case "by":
|
|
|
|
dstArgs := modifier.Args[:0]
|
|
|
|
for _, arg := range modifier.Args {
|
|
|
|
if arg == dstLabel {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
dstArgs = append(dstArgs, arg)
|
|
|
|
}
|
|
|
|
modifier.Args = dstArgs
|
|
|
|
default:
|
|
|
|
// Do nothing
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
afe := func(tss []*timeseries) []*timeseries {
|
|
|
|
m := make(map[float64]bool)
|
|
|
|
for _, ts := range tss {
|
|
|
|
for _, v := range ts.Values {
|
2019-06-10 21:42:03 +02:00
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
m[v] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
values := make([]float64, 0, len(m))
|
|
|
|
for v := range m {
|
|
|
|
values = append(values, v)
|
|
|
|
}
|
|
|
|
sort.Float64s(values)
|
|
|
|
|
|
|
|
var rvs []*timeseries
|
|
|
|
for _, v := range values {
|
|
|
|
var dst timeseries
|
2019-06-10 23:43:45 +02:00
|
|
|
dst.CopyFromShallowTimestamps(tss[0])
|
2019-05-22 23:16:55 +02:00
|
|
|
dst.MetricName.RemoveTag(dstLabel)
|
|
|
|
dst.MetricName.AddTag(dstLabel, strconv.FormatFloat(v, 'g', -1, 64))
|
|
|
|
for i := range dst.Values {
|
|
|
|
count := 0
|
|
|
|
for _, ts := range tss {
|
|
|
|
if ts.Values[i] == v {
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n := float64(count)
|
|
|
|
if n == 0 {
|
|
|
|
n = nan
|
|
|
|
}
|
|
|
|
dst.Values[i] = n
|
|
|
|
}
|
|
|
|
rvs = append(rvs, &dst)
|
|
|
|
}
|
|
|
|
return rvs
|
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFuncTopK(isReverse bool) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
afe := func(tss []*timeseries) []*timeseries {
|
2019-12-05 18:19:31 +01:00
|
|
|
for n := range tss[0].Values {
|
|
|
|
sort.Slice(tss, func(i, j int) bool {
|
|
|
|
a := tss[i].Values[n]
|
|
|
|
b := tss[j].Values[n]
|
2019-05-22 23:16:55 +02:00
|
|
|
if isReverse {
|
2019-12-05 18:19:31 +01:00
|
|
|
a, b = b, a
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-05 18:19:31 +01:00
|
|
|
return lessWithNaNs(a, b)
|
2019-05-22 23:16:55 +02:00
|
|
|
})
|
2019-12-05 18:19:31 +01:00
|
|
|
fillNaNsAtIdx(n, ks[n], tss)
|
|
|
|
}
|
|
|
|
return removeNaNs(tss)
|
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type tsWithValue struct {
|
|
|
|
ts *timeseries
|
|
|
|
value float64
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrFuncRangeTopK(f func(values []float64) float64, isReverse bool) aggrFunc {
|
|
|
|
return func(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
afe := func(tss []*timeseries) []*timeseries {
|
|
|
|
maxs := make([]tsWithValue, len(tss))
|
|
|
|
for i, ts := range tss {
|
|
|
|
value := f(ts.Values)
|
|
|
|
maxs[i] = tsWithValue{
|
|
|
|
ts: ts,
|
|
|
|
value: value,
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-05 18:19:31 +01:00
|
|
|
}
|
|
|
|
sort.Slice(maxs, func(i, j int) bool {
|
|
|
|
a := maxs[i].value
|
|
|
|
b := maxs[j].value
|
|
|
|
if isReverse {
|
|
|
|
a, b = b, a
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-05 18:19:31 +01:00
|
|
|
return lessWithNaNs(a, b)
|
|
|
|
})
|
|
|
|
for i := range maxs {
|
|
|
|
tss[i] = maxs[i].ts
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-05 18:19:31 +01:00
|
|
|
for i, k := range ks {
|
|
|
|
fillNaNsAtIdx(i, k, tss)
|
|
|
|
}
|
|
|
|
return removeNaNs(tss)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-05 18:19:31 +01:00
|
|
|
func fillNaNsAtIdx(idx int, k float64, tss []*timeseries) {
|
|
|
|
if math.IsNaN(k) {
|
|
|
|
k = 0
|
|
|
|
}
|
|
|
|
kn := int(k)
|
|
|
|
if kn < 0 {
|
|
|
|
kn = 0
|
|
|
|
}
|
|
|
|
if kn > len(tss) {
|
|
|
|
kn = len(tss)
|
|
|
|
}
|
|
|
|
for _, ts := range tss[:len(tss)-kn] {
|
|
|
|
ts.Values[idx] = nan
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func minValue(values []float64) float64 {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
min := values[0]
|
|
|
|
for _, v := range values[1:] {
|
|
|
|
if v < min {
|
|
|
|
min = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return min
|
|
|
|
}
|
|
|
|
|
|
|
|
func maxValue(values []float64) float64 {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
max := values[0]
|
|
|
|
for _, v := range values[1:] {
|
|
|
|
if v > max {
|
|
|
|
max = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return max
|
|
|
|
}
|
|
|
|
|
|
|
|
func avgValue(values []float64) float64 {
|
|
|
|
sum := float64(0)
|
|
|
|
count := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
count++
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
if count == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return sum / float64(count)
|
|
|
|
}
|
|
|
|
|
|
|
|
func medianValue(values []float64) float64 {
|
|
|
|
h := histogram.GetFast()
|
|
|
|
for _, v := range values {
|
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
h.Update(v)
|
|
|
|
}
|
|
|
|
value := h.Quantile(0.5)
|
|
|
|
histogram.PutFast(h)
|
|
|
|
return value
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func aggrFuncLimitK(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
ks, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
maxK := 0
|
|
|
|
for _, kf := range ks {
|
|
|
|
k := int(kf)
|
|
|
|
if k > maxK {
|
|
|
|
maxK = k
|
|
|
|
}
|
|
|
|
}
|
|
|
|
afe := func(tss []*timeseries) []*timeseries {
|
|
|
|
if len(tss) > maxK {
|
|
|
|
tss = tss[:maxK]
|
|
|
|
}
|
|
|
|
for i, kf := range ks {
|
|
|
|
k := int(kf)
|
|
|
|
if k < 0 {
|
|
|
|
k = 0
|
|
|
|
}
|
|
|
|
for j := k; j < len(tss); j++ {
|
|
|
|
tss[j].Values[i] = nan
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return tss
|
|
|
|
}
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncQuantile(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
afe := newAggrQuantileFunc(phis)
|
|
|
|
return aggrFuncExt(afe, args[1], &afa.ae.Modifier, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func aggrFuncMedian(afa *aggrFuncArg) ([]*timeseries, error) {
|
|
|
|
args := afa.args
|
|
|
|
if err := expectTransformArgsNum(args, 1); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis := evalNumber(afa.ec, 0.5)[0].Values
|
|
|
|
afe := newAggrQuantileFunc(phis)
|
|
|
|
return aggrFuncExt(afe, args[0], &afa.ae.Modifier, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func newAggrQuantileFunc(phis []float64) func(tss []*timeseries) []*timeseries {
|
|
|
|
return func(tss []*timeseries) []*timeseries {
|
|
|
|
dst := tss[0]
|
|
|
|
for n := range dst.Values {
|
|
|
|
sort.Slice(tss, func(i, j int) bool {
|
|
|
|
a := tss[i].Values[n]
|
|
|
|
b := tss[j].Values[n]
|
|
|
|
return lessWithNaNs(a, b)
|
|
|
|
})
|
|
|
|
phi := phis[n]
|
|
|
|
if math.IsNaN(phi) {
|
|
|
|
phi = 1
|
|
|
|
}
|
|
|
|
if phi < 0 {
|
|
|
|
phi = 0
|
|
|
|
}
|
|
|
|
if phi > 1 {
|
|
|
|
phi = 1
|
|
|
|
}
|
|
|
|
idx := int(math.Round(float64(len(tss)-1) * phi))
|
|
|
|
dst.Values[n] = tss[idx].Values[n]
|
|
|
|
}
|
2019-06-06 16:07:28 +02:00
|
|
|
tss[0] = dst
|
2019-05-22 23:16:55 +02:00
|
|
|
return tss[:1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func lessWithNaNs(a, b float64) bool {
|
|
|
|
if math.IsNaN(a) {
|
|
|
|
return !math.IsNaN(b)
|
|
|
|
}
|
|
|
|
return a < b
|
|
|
|
}
|