2019-05-22 23:16:55 +02:00
|
|
|
package promql
|
|
|
|
|
|
|
|
import (
|
2020-04-20 18:41:59 +02:00
|
|
|
"flag"
|
2019-05-22 23:16:55 +02:00
|
|
|
"fmt"
|
|
|
|
"math"
|
2024-02-22 12:39:07 +01:00
|
|
|
"strconv"
|
2019-05-22 23:16:55 +02:00
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
|
2023-09-25 14:01:00 +02:00
|
|
|
"github.com/VictoriaMetrics/metrics"
|
|
|
|
"github.com/VictoriaMetrics/metricsql"
|
|
|
|
|
2024-02-22 12:39:07 +01:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/bytesutil"
|
2019-05-22 23:16:55 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/decimal"
|
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/logger"
|
2020-01-03 22:50:47 +01:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/lib/storage"
|
2019-05-22 23:16:55 +02:00
|
|
|
)
|
|
|
|
|
2020-11-10 00:18:08 +01:00
|
|
|
var minStalenessInterval = flag.Duration("search.minStalenessInterval", 0, "The minimum interval for staleness calculations. "+
|
2020-04-20 18:41:59 +02:00
|
|
|
"This flag could be useful for removing gaps on graphs generated from time series with irregular intervals between samples. "+
|
|
|
|
"See also '-search.maxStalenessInterval'")
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
var rollupFuncs = map[string]newRollupFunc{
|
2022-01-14 00:48:04 +01:00
|
|
|
"absent_over_time": newRollupFuncOneArg(rollupAbsent),
|
|
|
|
"aggr_over_time": newRollupFuncTwoArgs(rollupFake),
|
|
|
|
"ascent_over_time": newRollupFuncOneArg(rollupAscentOverTime),
|
|
|
|
"avg_over_time": newRollupFuncOneArg(rollupAvg),
|
|
|
|
"changes": newRollupFuncOneArg(rollupChanges),
|
|
|
|
"changes_prometheus": newRollupFuncOneArg(rollupChangesPrometheus),
|
|
|
|
"count_eq_over_time": newRollupCountEQ,
|
|
|
|
"count_gt_over_time": newRollupCountGT,
|
|
|
|
"count_le_over_time": newRollupCountLE,
|
|
|
|
"count_ne_over_time": newRollupCountNE,
|
|
|
|
"count_over_time": newRollupFuncOneArg(rollupCount),
|
2024-02-22 12:39:07 +01:00
|
|
|
"count_values_over_time": newRollupCountValues,
|
2022-01-14 00:48:04 +01:00
|
|
|
"decreases_over_time": newRollupFuncOneArg(rollupDecreases),
|
|
|
|
"default_rollup": newRollupFuncOneArg(rollupDefault), // default rollup func
|
|
|
|
"delta": newRollupFuncOneArg(rollupDelta),
|
|
|
|
"delta_prometheus": newRollupFuncOneArg(rollupDeltaPrometheus),
|
|
|
|
"deriv": newRollupFuncOneArg(rollupDerivSlow),
|
|
|
|
"deriv_fast": newRollupFuncOneArg(rollupDerivFast),
|
|
|
|
"descent_over_time": newRollupFuncOneArg(rollupDescentOverTime),
|
|
|
|
"distinct_over_time": newRollupFuncOneArg(rollupDistinct),
|
|
|
|
"duration_over_time": newRollupDurationOverTime,
|
|
|
|
"first_over_time": newRollupFuncOneArg(rollupFirst),
|
|
|
|
"geomean_over_time": newRollupFuncOneArg(rollupGeomean),
|
|
|
|
"histogram_over_time": newRollupFuncOneArg(rollupHistogram),
|
|
|
|
"hoeffding_bound_lower": newRollupHoeffdingBoundLower,
|
|
|
|
"hoeffding_bound_upper": newRollupHoeffdingBoundUpper,
|
|
|
|
"holt_winters": newRollupHoltWinters,
|
|
|
|
"idelta": newRollupFuncOneArg(rollupIdelta),
|
|
|
|
"ideriv": newRollupFuncOneArg(rollupIderiv),
|
|
|
|
"increase": newRollupFuncOneArg(rollupDelta), // + rollupFuncsRemoveCounterResets
|
|
|
|
"increase_prometheus": newRollupFuncOneArg(rollupDeltaPrometheus), // + rollupFuncsRemoveCounterResets
|
|
|
|
"increase_pure": newRollupFuncOneArg(rollupIncreasePure), // + rollupFuncsRemoveCounterResets
|
|
|
|
"increases_over_time": newRollupFuncOneArg(rollupIncreases),
|
|
|
|
"integrate": newRollupFuncOneArg(rollupIntegrate),
|
|
|
|
"irate": newRollupFuncOneArg(rollupIderiv), // + rollupFuncsRemoveCounterResets
|
|
|
|
"lag": newRollupFuncOneArg(rollupLag),
|
|
|
|
"last_over_time": newRollupFuncOneArg(rollupLast),
|
|
|
|
"lifetime": newRollupFuncOneArg(rollupLifetime),
|
2023-02-11 10:06:16 +01:00
|
|
|
"mad_over_time": newRollupFuncOneArg(rollupMAD),
|
2022-01-14 00:48:04 +01:00
|
|
|
"max_over_time": newRollupFuncOneArg(rollupMax),
|
2023-09-25 15:28:10 +02:00
|
|
|
"median_over_time": newRollupFuncOneArg(rollupMedian),
|
2022-01-14 00:48:04 +01:00
|
|
|
"min_over_time": newRollupFuncOneArg(rollupMin),
|
|
|
|
"mode_over_time": newRollupFuncOneArg(rollupModeOverTime),
|
2023-10-31 22:10:29 +01:00
|
|
|
"outlier_iqr_over_time": newRollupFuncOneArg(rollupOutlierIQR),
|
2022-01-14 00:48:04 +01:00
|
|
|
"predict_linear": newRollupPredictLinear,
|
|
|
|
"present_over_time": newRollupFuncOneArg(rollupPresent),
|
|
|
|
"quantile_over_time": newRollupQuantile,
|
|
|
|
"quantiles_over_time": newRollupQuantiles,
|
|
|
|
"range_over_time": newRollupFuncOneArg(rollupRange),
|
|
|
|
"rate": newRollupFuncOneArg(rollupDerivFast), // + rollupFuncsRemoveCounterResets
|
|
|
|
"rate_over_sum": newRollupFuncOneArg(rollupRateOverSum),
|
|
|
|
"resets": newRollupFuncOneArg(rollupResets),
|
2023-02-24 22:47:52 +01:00
|
|
|
"rollup": newRollupFuncOneOrTwoArgs(rollupFake),
|
|
|
|
"rollup_candlestick": newRollupFuncOneOrTwoArgs(rollupFake),
|
|
|
|
"rollup_delta": newRollupFuncOneOrTwoArgs(rollupFake),
|
|
|
|
"rollup_deriv": newRollupFuncOneOrTwoArgs(rollupFake),
|
|
|
|
"rollup_increase": newRollupFuncOneOrTwoArgs(rollupFake), // + rollupFuncsRemoveCounterResets
|
|
|
|
"rollup_rate": newRollupFuncOneOrTwoArgs(rollupFake), // + rollupFuncsRemoveCounterResets
|
|
|
|
"rollup_scrape_interval": newRollupFuncOneOrTwoArgs(rollupFake),
|
2022-01-14 00:48:04 +01:00
|
|
|
"scrape_interval": newRollupFuncOneArg(rollupScrapeInterval),
|
2024-02-13 22:40:04 +01:00
|
|
|
"share_eq_over_time": newRollupShareEQ,
|
2022-01-14 00:48:04 +01:00
|
|
|
"share_gt_over_time": newRollupShareGT,
|
|
|
|
"share_le_over_time": newRollupShareLE,
|
|
|
|
"stale_samples_over_time": newRollupFuncOneArg(rollupStaleSamples),
|
|
|
|
"stddev_over_time": newRollupFuncOneArg(rollupStddev),
|
|
|
|
"stdvar_over_time": newRollupFuncOneArg(rollupStdvar),
|
2024-02-13 22:40:04 +01:00
|
|
|
"sum_eq_over_time": newRollupSumEQ,
|
|
|
|
"sum_gt_over_time": newRollupSumGT,
|
|
|
|
"sum_le_over_time": newRollupSumLE,
|
2022-01-14 00:48:04 +01:00
|
|
|
"sum_over_time": newRollupFuncOneArg(rollupSum),
|
|
|
|
"sum2_over_time": newRollupFuncOneArg(rollupSum2),
|
|
|
|
"tfirst_over_time": newRollupFuncOneArg(rollupTfirst),
|
2020-04-15 11:00:39 +02:00
|
|
|
// `timestamp` function must return timestamp for the last datapoint on the current window
|
|
|
|
// in order to properly handle offset and timestamps unaligned to the current step.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/415 for details.
|
2022-04-27 09:57:49 +02:00
|
|
|
"timestamp": newRollupFuncOneArg(rollupTlast),
|
|
|
|
"timestamp_with_name": newRollupFuncOneArg(rollupTlast), // + rollupFuncsKeepMetricName
|
|
|
|
"tlast_change_over_time": newRollupFuncOneArg(rollupTlastChange),
|
|
|
|
"tlast_over_time": newRollupFuncOneArg(rollupTlast),
|
|
|
|
"tmax_over_time": newRollupFuncOneArg(rollupTmax),
|
|
|
|
"tmin_over_time": newRollupFuncOneArg(rollupTmin),
|
|
|
|
"zscore_over_time": newRollupFuncOneArg(rollupZScoreOverTime),
|
2020-01-10 20:16:14 +01:00
|
|
|
}
|
|
|
|
|
2024-02-21 17:47:49 +01:00
|
|
|
// Functions, which need the previous sample before the lookbehind window for proper calculations.
|
|
|
|
//
|
|
|
|
// All the rollup functions, which do not rely on the previous sample
|
|
|
|
// before the lookbehind window (aka prevValue and realPrevValue), do not need silence interval.
|
|
|
|
var needSilenceIntervalForRollupFunc = map[string]bool{
|
|
|
|
"ascent_over_time": true,
|
|
|
|
"changes": true,
|
|
|
|
"decreases_over_time": true,
|
|
|
|
// The default_rollup implicitly relies on the previous samples in order to fill gaps.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/5388
|
|
|
|
"default_rollup": true,
|
|
|
|
"delta": true,
|
|
|
|
"deriv_fast": true,
|
|
|
|
"descent_over_time": true,
|
|
|
|
"idelta": true,
|
|
|
|
"ideriv": true,
|
|
|
|
"increase": true,
|
|
|
|
"increase_pure": true,
|
|
|
|
"increases_over_time": true,
|
|
|
|
"integrate": true,
|
|
|
|
"irate": true,
|
|
|
|
"lag": true,
|
|
|
|
"lifetime": true,
|
|
|
|
"rate": true,
|
|
|
|
"resets": true,
|
|
|
|
"rollup": true,
|
|
|
|
"rollup_candlestick": true,
|
|
|
|
"rollup_delta": true,
|
|
|
|
"rollup_deriv": true,
|
|
|
|
"rollup_increase": true,
|
|
|
|
"rollup_rate": true,
|
|
|
|
"rollup_scrape_interval": true,
|
|
|
|
"scrape_interval": true,
|
|
|
|
"tlast_change_over_time": true,
|
|
|
|
}
|
|
|
|
|
2020-01-10 20:16:14 +01:00
|
|
|
// rollupAggrFuncs are functions that can be passed to `aggr_over_time()`
|
|
|
|
var rollupAggrFuncs = map[string]rollupFunc{
|
2022-01-14 00:48:04 +01:00
|
|
|
"absent_over_time": rollupAbsent,
|
|
|
|
"ascent_over_time": rollupAscentOverTime,
|
|
|
|
"avg_over_time": rollupAvg,
|
|
|
|
"changes": rollupChanges,
|
|
|
|
"count_over_time": rollupCount,
|
|
|
|
"decreases_over_time": rollupDecreases,
|
|
|
|
"default_rollup": rollupDefault,
|
|
|
|
"delta": rollupDelta,
|
|
|
|
"deriv": rollupDerivSlow,
|
|
|
|
"deriv_fast": rollupDerivFast,
|
|
|
|
"descent_over_time": rollupDescentOverTime,
|
|
|
|
"distinct_over_time": rollupDistinct,
|
|
|
|
"first_over_time": rollupFirst,
|
|
|
|
"geomean_over_time": rollupGeomean,
|
|
|
|
"idelta": rollupIdelta,
|
|
|
|
"ideriv": rollupIderiv,
|
|
|
|
"increase": rollupDelta,
|
|
|
|
"increase_pure": rollupIncreasePure,
|
|
|
|
"increases_over_time": rollupIncreases,
|
|
|
|
"integrate": rollupIntegrate,
|
|
|
|
"irate": rollupIderiv,
|
2023-10-31 22:10:29 +01:00
|
|
|
"iqr_over_time": rollupOutlierIQR,
|
2022-01-14 00:48:04 +01:00
|
|
|
"lag": rollupLag,
|
|
|
|
"last_over_time": rollupLast,
|
|
|
|
"lifetime": rollupLifetime,
|
2023-02-11 10:06:16 +01:00
|
|
|
"mad_over_time": rollupMAD,
|
2022-01-14 00:48:04 +01:00
|
|
|
"max_over_time": rollupMax,
|
2023-09-25 14:01:00 +02:00
|
|
|
"median_over_time": rollupMedian,
|
2022-01-14 00:48:04 +01:00
|
|
|
"min_over_time": rollupMin,
|
|
|
|
"mode_over_time": rollupModeOverTime,
|
|
|
|
"present_over_time": rollupPresent,
|
|
|
|
"range_over_time": rollupRange,
|
|
|
|
"rate": rollupDerivFast,
|
|
|
|
"rate_over_sum": rollupRateOverSum,
|
|
|
|
"resets": rollupResets,
|
|
|
|
"scrape_interval": rollupScrapeInterval,
|
|
|
|
"stale_samples_over_time": rollupStaleSamples,
|
|
|
|
"stddev_over_time": rollupStddev,
|
|
|
|
"stdvar_over_time": rollupStdvar,
|
|
|
|
"sum_over_time": rollupSum,
|
|
|
|
"sum2_over_time": rollupSum2,
|
|
|
|
"tfirst_over_time": rollupTfirst,
|
|
|
|
"timestamp": rollupTlast,
|
|
|
|
"timestamp_with_name": rollupTlast,
|
2022-04-27 09:57:49 +02:00
|
|
|
"tlast_change_over_time": rollupTlastChange,
|
2022-01-14 00:48:04 +01:00
|
|
|
"tlast_over_time": rollupTlast,
|
|
|
|
"tmax_over_time": rollupTmax,
|
|
|
|
"tmin_over_time": rollupTmin,
|
|
|
|
"zscore_over_time": rollupZScoreOverTime,
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2022-12-20 09:12:54 +01:00
|
|
|
// VictoriaMetrics can extends lookbehind window for these functions
|
|
|
|
// in order to make sure it contains enough points for returning non-empty results.
|
2021-10-07 12:17:20 +02:00
|
|
|
//
|
2022-12-20 09:12:54 +01:00
|
|
|
// This is needed for returning the expected non-empty graphs when zooming in the graph in Grafana,
|
|
|
|
// which is built with `func_name(metric)` query.
|
2021-10-07 12:17:20 +02:00
|
|
|
var rollupFuncsCanAdjustWindow = map[string]bool{
|
|
|
|
"default_rollup": true,
|
|
|
|
"deriv": true,
|
|
|
|
"deriv_fast": true,
|
|
|
|
"ideriv": true,
|
|
|
|
"irate": true,
|
|
|
|
"rate": true,
|
|
|
|
"rate_over_sum": true,
|
|
|
|
"rollup": true,
|
|
|
|
"rollup_candlestick": true,
|
|
|
|
"rollup_deriv": true,
|
|
|
|
"rollup_rate": true,
|
|
|
|
"rollup_scrape_interval": true,
|
|
|
|
"scrape_interval": true,
|
|
|
|
"timestamp": true,
|
2019-06-23 18:06:16 +02:00
|
|
|
}
|
|
|
|
|
2022-10-06 22:21:45 +02:00
|
|
|
// rollupFuncsRemoveCounterResets contains functions, which need to call removeCounterResets
|
|
|
|
// over input samples before calling the corresponding rollup functions.
|
2019-05-22 23:16:55 +02:00
|
|
|
var rollupFuncsRemoveCounterResets = map[string]bool{
|
2021-12-20 12:13:16 +01:00
|
|
|
"increase": true,
|
|
|
|
"increase_prometheus": true,
|
|
|
|
"increase_pure": true,
|
|
|
|
"irate": true,
|
|
|
|
"rate": true,
|
|
|
|
"rollup_increase": true,
|
|
|
|
"rollup_rate": true,
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2022-10-06 22:21:45 +02:00
|
|
|
// rollupFuncsSamplesScannedPerCall contains functions, which scan lower number of samples
|
|
|
|
// than is passed to the rollup func.
|
|
|
|
//
|
|
|
|
// It is expected that the remaining rollupFuncs scan all the samples passed to them.
|
|
|
|
var rollupFuncsSamplesScannedPerCall = map[string]int{
|
|
|
|
"absent_over_time": 1,
|
|
|
|
"count_over_time": 1,
|
|
|
|
"default_rollup": 1,
|
|
|
|
"delta": 2,
|
|
|
|
"delta_prometheus": 2,
|
|
|
|
"deriv_fast": 2,
|
|
|
|
"first_over_time": 1,
|
|
|
|
"idelta": 2,
|
|
|
|
"ideriv": 2,
|
|
|
|
"increase": 2,
|
|
|
|
"increase_prometheus": 2,
|
|
|
|
"increase_pure": 2,
|
|
|
|
"irate": 2,
|
|
|
|
"lag": 1,
|
|
|
|
"last_over_time": 1,
|
|
|
|
"lifetime": 2,
|
|
|
|
"present_over_time": 1,
|
|
|
|
"rate": 2,
|
|
|
|
"scrape_interval": 2,
|
|
|
|
"tfirst_over_time": 1,
|
|
|
|
"timestamp": 1,
|
|
|
|
"timestamp_with_name": 1,
|
|
|
|
"tlast_over_time": 1,
|
|
|
|
}
|
|
|
|
|
2021-11-14 12:42:03 +01:00
|
|
|
// These functions don't change physical meaning of input time series,
|
|
|
|
// so they don't drop metric name
|
2022-01-14 03:05:39 +01:00
|
|
|
var rollupFuncsKeepMetricName = map[string]bool{
|
2020-10-12 10:27:46 +02:00
|
|
|
"avg_over_time": true,
|
2021-11-14 12:42:03 +01:00
|
|
|
"default_rollup": true,
|
|
|
|
"first_over_time": true,
|
2020-10-12 10:27:46 +02:00
|
|
|
"geomean_over_time": true,
|
2020-01-11 13:40:32 +01:00
|
|
|
"hoeffding_bound_lower": true,
|
|
|
|
"hoeffding_bound_upper": true,
|
2021-11-14 12:42:03 +01:00
|
|
|
"holt_winters": true,
|
2023-10-31 22:10:29 +01:00
|
|
|
"iqr_over_time": true,
|
2020-04-04 13:44:40 +02:00
|
|
|
"last_over_time": true,
|
2021-11-14 12:42:03 +01:00
|
|
|
"max_over_time": true,
|
2023-09-25 15:28:10 +02:00
|
|
|
"median_over_time": true,
|
2021-11-14 12:42:03 +01:00
|
|
|
"min_over_time": true,
|
2020-10-12 10:27:46 +02:00
|
|
|
"mode_over_time": true,
|
2021-11-14 12:42:03 +01:00
|
|
|
"predict_linear": true,
|
|
|
|
"quantile_over_time": true,
|
|
|
|
"quantiles_over_time": true,
|
|
|
|
"rollup": true,
|
2021-09-09 18:21:16 +02:00
|
|
|
"rollup_candlestick": true,
|
2021-12-15 22:37:05 +01:00
|
|
|
"timestamp_with_name": true,
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2020-01-10 20:16:14 +01:00
|
|
|
func getRollupAggrFuncNames(expr metricsql.Expr) ([]string, error) {
|
2020-01-10 20:56:59 +01:00
|
|
|
afe, ok := expr.(*metricsql.AggrFuncExpr)
|
|
|
|
if ok {
|
|
|
|
// This is for incremental aggregate function case:
|
|
|
|
//
|
|
|
|
// sum(aggr_over_time(...))
|
|
|
|
//
|
|
|
|
// See aggr_incremental.go for details.
|
|
|
|
expr = afe.Args[0]
|
|
|
|
}
|
2020-01-10 20:16:14 +01:00
|
|
|
fe, ok := expr.(*metricsql.FuncExpr)
|
|
|
|
if !ok {
|
|
|
|
logger.Panicf("BUG: unexpected expression; want metricsql.FuncExpr; got %T; value: %s", expr, expr.AppendString(nil))
|
|
|
|
}
|
|
|
|
if fe.Name != "aggr_over_time" {
|
|
|
|
logger.Panicf("BUG: unexpected function name: %q; want `aggr_over_time`", fe.Name)
|
|
|
|
}
|
|
|
|
if len(fe.Args) != 2 {
|
|
|
|
return nil, fmt.Errorf("unexpected number of args to aggr_over_time(); got %d; want %d", len(fe.Args), 2)
|
|
|
|
}
|
|
|
|
arg := fe.Args[0]
|
|
|
|
var aggrFuncNames []string
|
|
|
|
if se, ok := arg.(*metricsql.StringExpr); ok {
|
|
|
|
aggrFuncNames = append(aggrFuncNames, se.S)
|
|
|
|
} else {
|
|
|
|
fe, ok := arg.(*metricsql.FuncExpr)
|
|
|
|
if !ok || fe.Name != "" {
|
|
|
|
return nil, fmt.Errorf("%s cannot be passed to aggr_over_time(); expecting quoted aggregate function name or a list of quoted aggregate function names",
|
|
|
|
arg.AppendString(nil))
|
|
|
|
}
|
|
|
|
for _, e := range fe.Args {
|
|
|
|
se, ok := e.(*metricsql.StringExpr)
|
|
|
|
if !ok {
|
|
|
|
return nil, fmt.Errorf("%s cannot be passed here; expecting quoted aggregate function name", e.AppendString(nil))
|
|
|
|
}
|
|
|
|
aggrFuncNames = append(aggrFuncNames, se.S)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(aggrFuncNames) == 0 {
|
|
|
|
return nil, fmt.Errorf("aggr_over_time() must contain at least a single aggregate function name")
|
|
|
|
}
|
|
|
|
for _, s := range aggrFuncNames {
|
|
|
|
if rollupAggrFuncs[s] == nil {
|
|
|
|
return nil, fmt.Errorf("%q cannot be used in `aggr_over_time` function; expecting quoted aggregate function name", s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return aggrFuncNames, nil
|
|
|
|
}
|
|
|
|
|
2023-02-27 20:47:26 +01:00
|
|
|
// getRollupTag returns the possible second arg from the expr.
|
|
|
|
//
|
|
|
|
// The expr can have the following forms:
|
|
|
|
// - rollup_func(q, tag)
|
|
|
|
// - aggr_func(rollup_func(q, tag)) - this form is used during incremental aggregate calculations
|
2023-02-24 22:47:52 +01:00
|
|
|
func getRollupTag(expr metricsql.Expr) (string, error) {
|
2023-02-27 20:47:26 +01:00
|
|
|
af, ok := expr.(*metricsql.AggrFuncExpr)
|
|
|
|
if ok {
|
|
|
|
// extract rollup_func() from aggr_func(rollup_func(q, tag))
|
|
|
|
if len(af.Args) != 1 {
|
|
|
|
logger.Panicf("BUG: unexpected number of args to %s; got %d; want 1", af.AppendString(nil), len(af.Args))
|
|
|
|
}
|
|
|
|
expr = af.Args[0]
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
fe, ok := expr.(*metricsql.FuncExpr)
|
|
|
|
if !ok {
|
2023-02-27 20:47:26 +01:00
|
|
|
logger.Panicf("BUG: unexpected expression; want *metricsql.FuncExpr; got %T; value: %s", expr, expr.AppendString(nil))
|
2023-02-24 22:47:52 +01:00
|
|
|
}
|
|
|
|
if len(fe.Args) < 2 {
|
|
|
|
return "", nil
|
|
|
|
}
|
|
|
|
if len(fe.Args) != 2 {
|
|
|
|
return "", fmt.Errorf("unexpected number of args; got %d; want %d", len(fe.Args), 2)
|
|
|
|
}
|
|
|
|
arg := fe.Args[1]
|
|
|
|
|
|
|
|
se, ok := arg.(*metricsql.StringExpr)
|
|
|
|
if !ok {
|
|
|
|
return "", fmt.Errorf("unexpected rollup tag type: %s; expecting string", arg.AppendString(nil))
|
|
|
|
}
|
|
|
|
if se.S == "" {
|
|
|
|
return "", fmt.Errorf("rollup tag cannot be empty")
|
|
|
|
}
|
|
|
|
return se.S, nil
|
|
|
|
}
|
|
|
|
|
2022-10-06 22:21:45 +02:00
|
|
|
func getRollupConfigs(funcName string, rf rollupFunc, expr metricsql.Expr, start, end, step int64, maxPointsPerSeries int,
|
|
|
|
window, lookbackDelta int64, sharedTimestamps []int64) (
|
2020-01-10 20:16:14 +01:00
|
|
|
func(values []float64, timestamps []int64), []*rollupConfig, error) {
|
2024-04-02 22:16:24 +02:00
|
|
|
preFunc := func(_ []float64, _ []int64) {}
|
2022-10-06 22:21:45 +02:00
|
|
|
funcName = strings.ToLower(funcName)
|
|
|
|
if rollupFuncsRemoveCounterResets[funcName] {
|
2024-04-02 22:16:24 +02:00
|
|
|
preFunc = func(values []float64, _ []int64) {
|
2020-01-10 20:16:14 +01:00
|
|
|
removeCounterResets(values)
|
|
|
|
}
|
|
|
|
}
|
2022-10-06 22:21:45 +02:00
|
|
|
samplesScannedPerCall := rollupFuncsSamplesScannedPerCall[funcName]
|
2020-01-10 20:16:14 +01:00
|
|
|
newRollupConfig := func(rf rollupFunc, tagValue string) *rollupConfig {
|
|
|
|
return &rollupConfig{
|
2022-08-24 14:25:18 +02:00
|
|
|
TagValue: tagValue,
|
|
|
|
Func: rf,
|
|
|
|
Start: start,
|
|
|
|
End: end,
|
|
|
|
Step: step,
|
|
|
|
Window: window,
|
|
|
|
|
|
|
|
MaxPointsPerSeries: maxPointsPerSeries,
|
|
|
|
|
2022-10-06 22:21:45 +02:00
|
|
|
MayAdjustWindow: rollupFuncsCanAdjustWindow[funcName],
|
|
|
|
LookbackDelta: lookbackDelta,
|
|
|
|
Timestamps: sharedTimestamps,
|
|
|
|
isDefaultRollup: funcName == "default_rollup",
|
|
|
|
samplesScannedPerCall: samplesScannedPerCall,
|
2020-01-10 20:16:14 +01:00
|
|
|
}
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
|
|
|
|
appendRollupConfigs := func(dst []*rollupConfig, expr metricsql.Expr) ([]*rollupConfig, error) {
|
|
|
|
tag, err := getRollupTag(expr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("invalid args for %s: %w", expr.AppendString(nil), err)
|
|
|
|
}
|
|
|
|
switch tag {
|
|
|
|
case "min":
|
|
|
|
dst = append(dst, newRollupConfig(rollupMin, ""))
|
|
|
|
case "max":
|
|
|
|
dst = append(dst, newRollupConfig(rollupMax, ""))
|
|
|
|
case "avg":
|
|
|
|
dst = append(dst, newRollupConfig(rollupAvg, ""))
|
|
|
|
case "":
|
|
|
|
dst = append(dst, newRollupConfig(rollupMin, "min"))
|
|
|
|
dst = append(dst, newRollupConfig(rollupMax, "max"))
|
|
|
|
dst = append(dst, newRollupConfig(rollupAvg, "avg"))
|
|
|
|
default:
|
|
|
|
return nil, fmt.Errorf("unexpected second arg for %s: %q; want `min`, `max` or `avg`", expr.AppendString(nil), tag)
|
|
|
|
}
|
|
|
|
return dst, nil
|
2020-01-10 20:16:14 +01:00
|
|
|
}
|
|
|
|
var rcs []*rollupConfig
|
2023-02-24 22:47:52 +01:00
|
|
|
var err error
|
2022-10-06 22:21:45 +02:00
|
|
|
switch funcName {
|
2020-01-10 20:16:14 +01:00
|
|
|
case "rollup":
|
2023-02-24 22:47:52 +01:00
|
|
|
rcs, err = appendRollupConfigs(rcs, expr)
|
2020-01-10 20:16:14 +01:00
|
|
|
case "rollup_rate", "rollup_deriv":
|
|
|
|
preFuncPrev := preFunc
|
|
|
|
preFunc = func(values []float64, timestamps []int64) {
|
|
|
|
preFuncPrev(values, timestamps)
|
|
|
|
derivValues(values, timestamps)
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
rcs, err = appendRollupConfigs(rcs, expr)
|
2020-01-10 20:16:14 +01:00
|
|
|
case "rollup_increase", "rollup_delta":
|
|
|
|
preFuncPrev := preFunc
|
|
|
|
preFunc = func(values []float64, timestamps []int64) {
|
|
|
|
preFuncPrev(values, timestamps)
|
|
|
|
deltaValues(values)
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
rcs, err = appendRollupConfigs(rcs, expr)
|
2020-01-10 20:16:14 +01:00
|
|
|
case "rollup_candlestick":
|
2023-02-24 22:47:52 +01:00
|
|
|
tag, err := getRollupTag(expr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, fmt.Errorf("invalid args for %s: %w", expr.AppendString(nil), err)
|
|
|
|
}
|
|
|
|
switch tag {
|
|
|
|
case "open":
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupOpen, "open"))
|
|
|
|
case "close":
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupClose, "close"))
|
|
|
|
case "low":
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupLow, "low"))
|
|
|
|
case "high":
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupHigh, "high"))
|
|
|
|
case "":
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupOpen, "open"))
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupClose, "close"))
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupLow, "low"))
|
|
|
|
rcs = append(rcs, newRollupConfig(rollupHigh, "high"))
|
|
|
|
default:
|
|
|
|
return nil, nil, fmt.Errorf("unexpected second arg for %s: %q; want `min`, `max` or `avg`", expr.AppendString(nil), tag)
|
|
|
|
}
|
2021-09-27 18:21:22 +02:00
|
|
|
case "rollup_scrape_interval":
|
|
|
|
preFuncPrev := preFunc
|
|
|
|
preFunc = func(values []float64, timestamps []int64) {
|
|
|
|
preFuncPrev(values, timestamps)
|
|
|
|
// Calculate intervals in seconds between samples.
|
|
|
|
tsSecsPrev := nan
|
|
|
|
for i, ts := range timestamps {
|
|
|
|
tsSecs := float64(ts) / 1000
|
|
|
|
values[i] = tsSecs - tsSecsPrev
|
|
|
|
tsSecsPrev = tsSecs
|
|
|
|
}
|
|
|
|
if len(values) > 1 {
|
|
|
|
// Overwrite the first NaN interval with the second interval,
|
|
|
|
// So min, max and avg rollups could be calculated properly, since they don't expect to receive NaNs.
|
|
|
|
values[0] = values[1]
|
|
|
|
}
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
rcs, err = appendRollupConfigs(rcs, expr)
|
2020-01-10 20:16:14 +01:00
|
|
|
case "aggr_over_time":
|
|
|
|
aggrFuncNames, err := getRollupAggrFuncNames(expr)
|
|
|
|
if err != nil {
|
2020-06-30 21:58:18 +02:00
|
|
|
return nil, nil, fmt.Errorf("invalid args to %s: %w", expr.AppendString(nil), err)
|
2020-01-10 20:16:14 +01:00
|
|
|
}
|
|
|
|
for _, aggrFuncName := range aggrFuncNames {
|
|
|
|
if rollupFuncsRemoveCounterResets[aggrFuncName] {
|
|
|
|
// There is no need to save the previous preFunc, since it is either empty or the same.
|
2024-04-02 22:16:24 +02:00
|
|
|
preFunc = func(values []float64, _ []int64) {
|
2020-01-10 20:16:14 +01:00
|
|
|
removeCounterResets(values)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
rf := rollupAggrFuncs[aggrFuncName]
|
|
|
|
rcs = append(rcs, newRollupConfig(rf, aggrFuncName))
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
rcs = append(rcs, newRollupConfig(rf, ""))
|
|
|
|
}
|
2023-02-24 22:47:52 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
2020-01-10 20:16:14 +01:00
|
|
|
return preFunc, rcs, nil
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func getRollupFunc(funcName string) newRollupFunc {
|
|
|
|
funcName = strings.ToLower(funcName)
|
|
|
|
return rollupFuncs[funcName]
|
|
|
|
}
|
|
|
|
|
|
|
|
type rollupFuncArg struct {
|
2023-02-13 13:27:13 +01:00
|
|
|
// The value preceding values if it fits staleness interval.
|
2020-11-13 13:55:58 +01:00
|
|
|
prevValue float64
|
|
|
|
|
|
|
|
// The timestamp for prevValue.
|
2019-05-22 23:16:55 +02:00
|
|
|
prevTimestamp int64
|
|
|
|
|
2020-11-13 13:55:58 +01:00
|
|
|
// Values that fit window ending at currTimestamp.
|
|
|
|
values []float64
|
|
|
|
|
|
|
|
// Timestamps for values.
|
|
|
|
timestamps []int64
|
|
|
|
|
2023-02-13 13:27:13 +01:00
|
|
|
// Real value preceding values without restrictions on staleness interval.
|
2020-11-13 13:55:58 +01:00
|
|
|
realPrevValue float64
|
|
|
|
|
2020-11-13 14:21:51 +01:00
|
|
|
// Real value which goes after values.
|
|
|
|
realNextValue float64
|
|
|
|
|
2020-11-13 13:55:58 +01:00
|
|
|
// Current timestamp for rollup evaluation.
|
2020-01-08 13:41:35 +01:00
|
|
|
currTimestamp int64
|
2020-11-13 13:55:58 +01:00
|
|
|
|
|
|
|
// Index for the currently evaluated point relative to time range for query evaluation.
|
|
|
|
idx int
|
|
|
|
|
|
|
|
// Time window for rollup calculations.
|
|
|
|
window int64
|
2019-12-24 14:54:04 +01:00
|
|
|
|
2020-01-03 22:50:47 +01:00
|
|
|
tsm *timeseriesMap
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (rfa *rollupFuncArg) reset() {
|
|
|
|
rfa.prevValue = 0
|
|
|
|
rfa.prevTimestamp = 0
|
|
|
|
rfa.values = nil
|
|
|
|
rfa.timestamps = nil
|
2019-11-01 11:21:12 +01:00
|
|
|
rfa.currTimestamp = 0
|
2019-05-22 23:16:55 +02:00
|
|
|
rfa.idx = 0
|
2020-07-29 11:36:10 +02:00
|
|
|
rfa.window = 0
|
2020-01-03 22:50:47 +01:00
|
|
|
rfa.tsm = nil
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// rollupFunc must return rollup value for the given rfa.
|
|
|
|
//
|
|
|
|
// prevValue may be nan, values and timestamps may be empty.
|
|
|
|
type rollupFunc func(rfa *rollupFuncArg) float64
|
|
|
|
|
|
|
|
type rollupConfig struct {
|
|
|
|
// This tag value must be added to "rollup" tag if non-empty.
|
|
|
|
TagValue string
|
|
|
|
|
|
|
|
Func rollupFunc
|
|
|
|
Start int64
|
|
|
|
End int64
|
|
|
|
Step int64
|
|
|
|
Window int64
|
|
|
|
|
2022-08-24 14:25:18 +02:00
|
|
|
// The maximum number of points, which can be generated per each series.
|
|
|
|
MaxPointsPerSeries int
|
|
|
|
|
2019-06-23 18:06:16 +02:00
|
|
|
// Whether window may be adjusted to 2 x interval between data points.
|
|
|
|
// This is needed for functions which have dt in the denominator
|
|
|
|
// such as rate, deriv, etc.
|
2021-01-19 21:55:46 +01:00
|
|
|
// Without the adjustment their value would jump in unexpected directions
|
2019-06-23 18:06:16 +02:00
|
|
|
// when using window smaller than 2 x scrape_interval.
|
|
|
|
MayAdjustWindow bool
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
Timestamps []int64
|
2019-10-15 18:12:27 +02:00
|
|
|
|
|
|
|
// LoookbackDelta is the analog to `-query.lookback-delta` from Prometheus world.
|
|
|
|
LookbackDelta int64
|
2021-08-15 12:20:02 +02:00
|
|
|
|
|
|
|
// Whether default_rollup is used.
|
|
|
|
isDefaultRollup bool
|
2022-10-06 22:21:45 +02:00
|
|
|
|
|
|
|
// The estimated number of samples scanned per Func call.
|
|
|
|
//
|
|
|
|
// If zero, then it is considered that Func scans all the samples passed to it.
|
|
|
|
samplesScannedPerCall int
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2022-08-24 14:25:18 +02:00
|
|
|
func (rc *rollupConfig) getTimestamps() []int64 {
|
|
|
|
return getTimestamps(rc.Start, rc.End, rc.Step, rc.MaxPointsPerSeries)
|
|
|
|
}
|
|
|
|
|
2022-06-28 18:26:17 +02:00
|
|
|
func (rc *rollupConfig) String() string {
|
|
|
|
start := storage.TimestampToHumanReadableFormat(rc.Start)
|
|
|
|
end := storage.TimestampToHumanReadableFormat(rc.End)
|
|
|
|
return fmt.Sprintf("timeRange=[%s..%s], step=%d, window=%d, points=%d", start, end, rc.Step, rc.Window, len(rc.Timestamps))
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
var (
|
|
|
|
nan = math.NaN()
|
|
|
|
inf = math.Inf(1)
|
|
|
|
)
|
|
|
|
|
2020-01-03 22:50:47 +01:00
|
|
|
type timeseriesMap struct {
|
2021-09-17 22:33:15 +02:00
|
|
|
origin *timeseries
|
|
|
|
h metrics.Histogram
|
|
|
|
m map[string]*timeseries
|
2020-01-03 22:50:47 +01:00
|
|
|
}
|
|
|
|
|
2022-01-14 03:05:39 +01:00
|
|
|
func newTimeseriesMap(funcName string, keepMetricNames bool, sharedTimestamps []int64, mnSrc *storage.MetricName) *timeseriesMap {
|
2021-09-17 22:33:15 +02:00
|
|
|
funcName = strings.ToLower(funcName)
|
|
|
|
switch funcName {
|
2024-02-22 12:39:07 +01:00
|
|
|
case "histogram_over_time", "quantiles_over_time", "count_values_over_time":
|
2021-09-17 22:33:15 +02:00
|
|
|
default:
|
2020-01-03 22:50:47 +01:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
values := make([]float64, len(sharedTimestamps))
|
|
|
|
for i := range values {
|
|
|
|
values[i] = nan
|
|
|
|
}
|
|
|
|
var origin timeseries
|
|
|
|
origin.MetricName.CopyFrom(mnSrc)
|
2022-01-14 03:05:39 +01:00
|
|
|
if !keepMetricNames && !rollupFuncsKeepMetricName[funcName] {
|
2021-09-17 22:33:15 +02:00
|
|
|
origin.MetricName.ResetMetricGroup()
|
|
|
|
}
|
2020-01-03 22:50:47 +01:00
|
|
|
origin.Timestamps = sharedTimestamps
|
|
|
|
origin.Values = values
|
|
|
|
return ×eriesMap{
|
2021-09-17 22:33:15 +02:00
|
|
|
origin: &origin,
|
|
|
|
m: make(map[string]*timeseries),
|
2020-01-03 22:50:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (tsm *timeseriesMap) AppendTimeseriesTo(dst []*timeseries) []*timeseries {
|
|
|
|
for _, ts := range tsm.m {
|
|
|
|
dst = append(dst, ts)
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2021-09-17 22:33:15 +02:00
|
|
|
func (tsm *timeseriesMap) GetOrCreateTimeseries(labelName, labelValue string) *timeseries {
|
2020-01-03 22:50:47 +01:00
|
|
|
ts := tsm.m[labelValue]
|
|
|
|
if ts != nil {
|
|
|
|
return ts
|
|
|
|
}
|
2024-02-22 12:39:07 +01:00
|
|
|
|
|
|
|
// Make a clone of labelValue in order to use it as map key, since it may point to unsafe string,
|
|
|
|
// which refers some other byte slice, which can change in the future.
|
|
|
|
labelValue = strings.Clone(labelValue)
|
|
|
|
|
2020-01-03 22:50:47 +01:00
|
|
|
ts = ×eries{}
|
|
|
|
ts.CopyFromShallowTimestamps(tsm.origin)
|
2021-09-17 22:33:15 +02:00
|
|
|
ts.MetricName.RemoveTag(labelName)
|
|
|
|
ts.MetricName.AddTag(labelName, labelValue)
|
2024-02-22 12:39:07 +01:00
|
|
|
|
2020-01-03 22:50:47 +01:00
|
|
|
tsm.m[labelValue] = ts
|
|
|
|
return ts
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
// Do calculates rollups for the given timestamps and values, appends
|
|
|
|
// them to dstValues and returns results.
|
|
|
|
//
|
|
|
|
// rc.Timestamps are used as timestamps for dstValues.
|
|
|
|
//
|
2023-12-06 13:15:48 +01:00
|
|
|
// It is expected that timestamps cover the time range [rc.Start - rc.Window ... rc.End].
|
2019-05-22 23:16:55 +02:00
|
|
|
//
|
2020-01-03 22:50:47 +01:00
|
|
|
// Do cannot be called from concurrent goroutines.
|
2022-06-28 18:26:17 +02:00
|
|
|
func (rc *rollupConfig) Do(dstValues []float64, values []float64, timestamps []int64) ([]float64, uint64) {
|
2020-01-03 22:50:47 +01:00
|
|
|
return rc.doInternal(dstValues, nil, values, timestamps)
|
|
|
|
}
|
|
|
|
|
|
|
|
// DoTimeseriesMap calculates rollups for the given timestamps and values and puts them to tsm.
|
2022-06-28 18:26:17 +02:00
|
|
|
func (rc *rollupConfig) DoTimeseriesMap(tsm *timeseriesMap, values []float64, timestamps []int64) uint64 {
|
2020-01-03 22:50:47 +01:00
|
|
|
ts := getTimeseries()
|
2022-06-28 18:26:17 +02:00
|
|
|
var samplesScanned uint64
|
|
|
|
ts.Values, samplesScanned = rc.doInternal(ts.Values[:0], tsm, values, timestamps)
|
2020-01-03 22:50:47 +01:00
|
|
|
putTimeseries(ts)
|
2022-06-28 18:26:17 +02:00
|
|
|
return samplesScanned
|
2020-01-03 22:50:47 +01:00
|
|
|
}
|
|
|
|
|
2022-06-28 18:26:17 +02:00
|
|
|
func (rc *rollupConfig) doInternal(dstValues []float64, tsm *timeseriesMap, values []float64, timestamps []int64) ([]float64, uint64) {
|
2019-05-22 23:16:55 +02:00
|
|
|
// Sanity checks.
|
|
|
|
if rc.Step <= 0 {
|
|
|
|
logger.Panicf("BUG: Step must be bigger than 0; got %d", rc.Step)
|
|
|
|
}
|
|
|
|
if rc.Start > rc.End {
|
|
|
|
logger.Panicf("BUG: Start cannot exceed End; got %d vs %d", rc.Start, rc.End)
|
|
|
|
}
|
|
|
|
if rc.Window < 0 {
|
|
|
|
logger.Panicf("BUG: Window must be non-negative; got %d", rc.Window)
|
|
|
|
}
|
2022-08-24 14:25:18 +02:00
|
|
|
if err := ValidateMaxPointsPerSeries(rc.Start, rc.End, rc.Step, rc.MaxPointsPerSeries); err != nil {
|
2019-05-22 23:16:55 +02:00
|
|
|
logger.Panicf("BUG: %s; this must be validated before the call to rollupConfig.Do", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Extend dstValues in order to remove mallocs below.
|
|
|
|
dstValues = decimal.ExtendFloat64sCapacity(dstValues, len(rc.Timestamps))
|
|
|
|
|
2022-07-13 11:27:38 +02:00
|
|
|
scrapeInterval := getScrapeInterval(timestamps, rc.Step)
|
2019-12-24 14:54:04 +01:00
|
|
|
maxPrevInterval := getMaxPrevInterval(scrapeInterval)
|
2019-10-15 18:12:27 +02:00
|
|
|
if rc.LookbackDelta > 0 && maxPrevInterval > rc.LookbackDelta {
|
|
|
|
maxPrevInterval = rc.LookbackDelta
|
|
|
|
}
|
2020-04-20 18:41:59 +02:00
|
|
|
if *minStalenessInterval > 0 {
|
|
|
|
if msi := minStalenessInterval.Milliseconds(); msi > 0 && maxPrevInterval < msi {
|
|
|
|
maxPrevInterval = msi
|
|
|
|
}
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
window := rc.Window
|
|
|
|
if window <= 0 {
|
|
|
|
window = rc.Step
|
2022-12-20 09:12:54 +01:00
|
|
|
if rc.MayAdjustWindow && window < maxPrevInterval {
|
2023-02-13 13:27:13 +01:00
|
|
|
// Adjust lookbehind window only if it isn't set explicitly, e.g. rate(foo).
|
2022-12-20 09:12:54 +01:00
|
|
|
// In the case of missing lookbehind window it should be adjusted in order to return non-empty graph
|
|
|
|
// when the window doesn't cover at least two raw samples (this is what most users expect).
|
|
|
|
//
|
|
|
|
// If the user explicitly sets the lookbehind window to some fixed value, e.g. rate(foo[1s]),
|
|
|
|
// then it is expected he knows what he is doing. Do not adjust the lookbehind window then.
|
|
|
|
//
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/3483
|
|
|
|
window = maxPrevInterval
|
|
|
|
}
|
2021-08-15 12:20:02 +02:00
|
|
|
if rc.isDefaultRollup && rc.LookbackDelta > 0 && window > rc.LookbackDelta {
|
2021-06-09 17:34:31 +02:00
|
|
|
// Implicit window exceeds -search.maxStalenessInterval, so limit it to -search.maxStalenessInterval
|
2020-09-23 22:01:29 +02:00
|
|
|
// according to https://github.com/VictoriaMetrics/VictoriaMetrics/issues/784
|
|
|
|
window = rc.LookbackDelta
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
rfa := getRollupFuncArg()
|
|
|
|
rfa.idx = 0
|
2020-07-29 11:36:10 +02:00
|
|
|
rfa.window = window
|
2020-01-03 22:50:47 +01:00
|
|
|
rfa.tsm = tsm
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
i := 0
|
|
|
|
j := 0
|
2019-08-19 12:39:12 +02:00
|
|
|
ni := 0
|
|
|
|
nj := 0
|
2021-03-15 19:29:27 +01:00
|
|
|
f := rc.Func
|
2022-10-06 22:21:45 +02:00
|
|
|
samplesScanned := uint64(len(values))
|
|
|
|
samplesScannedPerCall := uint64(rc.samplesScannedPerCall)
|
2019-06-21 20:50:44 +02:00
|
|
|
for _, tEnd := range rc.Timestamps {
|
2019-05-22 23:16:55 +02:00
|
|
|
tStart := tEnd - window
|
2019-08-19 12:39:12 +02:00
|
|
|
ni = seekFirstTimestampIdxAfter(timestamps[i:], tStart, ni)
|
|
|
|
i += ni
|
2019-05-22 23:16:55 +02:00
|
|
|
if j < i {
|
|
|
|
j = i
|
|
|
|
}
|
2019-08-19 12:39:12 +02:00
|
|
|
nj = seekFirstTimestampIdxAfter(timestamps[j:], tEnd, nj)
|
|
|
|
j += nj
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
rfa.prevValue = nan
|
|
|
|
rfa.prevTimestamp = tStart - maxPrevInterval
|
2019-09-04 15:22:06 +02:00
|
|
|
if i < len(timestamps) && i > 0 && timestamps[i-1] > rfa.prevTimestamp {
|
2019-05-22 23:16:55 +02:00
|
|
|
rfa.prevValue = values[i-1]
|
|
|
|
rfa.prevTimestamp = timestamps[i-1]
|
|
|
|
}
|
|
|
|
rfa.values = values[i:j]
|
|
|
|
rfa.timestamps = timestamps[i:j]
|
2020-11-13 13:55:58 +01:00
|
|
|
if i > 0 {
|
|
|
|
rfa.realPrevValue = values[i-1]
|
|
|
|
} else {
|
|
|
|
rfa.realPrevValue = nan
|
|
|
|
}
|
2020-11-13 14:21:51 +01:00
|
|
|
if j < len(values) {
|
|
|
|
rfa.realNextValue = values[j]
|
|
|
|
} else {
|
|
|
|
rfa.realNextValue = nan
|
|
|
|
}
|
2019-11-01 11:21:12 +01:00
|
|
|
rfa.currTimestamp = tEnd
|
2021-03-15 19:29:27 +01:00
|
|
|
value := f(rfa)
|
2019-05-22 23:16:55 +02:00
|
|
|
rfa.idx++
|
2022-10-06 22:21:45 +02:00
|
|
|
if samplesScannedPerCall > 0 {
|
|
|
|
samplesScanned += samplesScannedPerCall
|
|
|
|
} else {
|
|
|
|
samplesScanned += uint64(len(rfa.values))
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
dstValues = append(dstValues, value)
|
|
|
|
}
|
|
|
|
putRollupFuncArg(rfa)
|
|
|
|
|
2022-06-28 18:26:17 +02:00
|
|
|
return dstValues, samplesScanned
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2019-08-19 12:01:37 +02:00
|
|
|
func seekFirstTimestampIdxAfter(timestamps []int64, seekTimestamp int64, nHint int) int {
|
|
|
|
if len(timestamps) == 0 || timestamps[0] > seekTimestamp {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
startIdx := nHint - 2
|
|
|
|
if startIdx < 0 {
|
|
|
|
startIdx = 0
|
|
|
|
}
|
|
|
|
if startIdx >= len(timestamps) {
|
|
|
|
startIdx = len(timestamps) - 1
|
|
|
|
}
|
|
|
|
endIdx := nHint + 2
|
|
|
|
if endIdx > len(timestamps) {
|
|
|
|
endIdx = len(timestamps)
|
|
|
|
}
|
|
|
|
if startIdx > 0 && timestamps[startIdx] <= seekTimestamp {
|
|
|
|
timestamps = timestamps[startIdx:]
|
|
|
|
endIdx -= startIdx
|
|
|
|
} else {
|
|
|
|
startIdx = 0
|
|
|
|
}
|
|
|
|
if endIdx < len(timestamps) && timestamps[endIdx] > seekTimestamp {
|
|
|
|
timestamps = timestamps[:endIdx]
|
|
|
|
}
|
|
|
|
if len(timestamps) < 16 {
|
|
|
|
// Fast path: the number of timestamps to search is small, so scan them all.
|
|
|
|
for i, timestamp := range timestamps {
|
|
|
|
if timestamp > seekTimestamp {
|
|
|
|
return startIdx + i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return startIdx + len(timestamps)
|
|
|
|
}
|
|
|
|
// Slow path: too big len(timestamps), so use binary search.
|
2019-11-25 13:01:36 +01:00
|
|
|
i := binarySearchInt64(timestamps, seekTimestamp+1)
|
|
|
|
return startIdx + int(i)
|
|
|
|
}
|
|
|
|
|
|
|
|
func binarySearchInt64(a []int64, v int64) uint {
|
|
|
|
// Copy-pasted sort.Search from https://golang.org/src/sort/search.go?s=2246:2286#L49
|
|
|
|
i, j := uint(0), uint(len(a))
|
|
|
|
for i < j {
|
|
|
|
h := (i + j) >> 1
|
|
|
|
if h < uint(len(a)) && a[h] < v {
|
|
|
|
i = h + 1
|
|
|
|
} else {
|
|
|
|
j = h
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return i
|
2019-08-19 12:01:37 +02:00
|
|
|
}
|
|
|
|
|
2022-07-13 11:33:04 +02:00
|
|
|
func getScrapeInterval(timestamps []int64, defaultInterval int64) int64 {
|
2019-05-22 23:16:55 +02:00
|
|
|
if len(timestamps) < 2 {
|
2022-07-13 11:27:38 +02:00
|
|
|
// can't calculate scrape interval with less than 2 timestamps
|
2022-07-13 11:33:04 +02:00
|
|
|
// return defaultInterval
|
|
|
|
return defaultInterval
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-02 12:41:48 +01:00
|
|
|
|
2021-03-15 19:29:27 +01:00
|
|
|
// Estimate scrape interval as 0.6 quantile for the first 20 intervals.
|
2019-12-02 12:41:48 +01:00
|
|
|
tsPrev := timestamps[0]
|
|
|
|
timestamps = timestamps[1:]
|
2021-03-15 19:29:27 +01:00
|
|
|
if len(timestamps) > 20 {
|
|
|
|
timestamps = timestamps[:20]
|
2019-12-02 12:41:48 +01:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
|
|
|
intervals := a.A[:0]
|
2019-12-02 12:41:48 +01:00
|
|
|
for _, ts := range timestamps {
|
2021-09-27 17:55:35 +02:00
|
|
|
intervals = append(intervals, float64(ts-tsPrev))
|
2019-12-02 12:41:48 +01:00
|
|
|
tsPrev = ts
|
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
scrapeInterval := int64(quantile(0.6, intervals))
|
|
|
|
a.A = intervals
|
|
|
|
putFloat64s(a)
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 0 {
|
2022-07-13 11:33:04 +02:00
|
|
|
return defaultInterval
|
2019-08-04 18:32:18 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
return scrapeInterval
|
|
|
|
}
|
|
|
|
|
|
|
|
func getMaxPrevInterval(scrapeInterval int64) int64 {
|
|
|
|
// Increase scrapeInterval more for smaller scrape intervals in order to hide possible gaps
|
2019-08-04 18:32:18 +02:00
|
|
|
// when high jitter is present.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/139 .
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 2*1000 {
|
|
|
|
return scrapeInterval + 4*scrapeInterval
|
2019-08-04 18:32:18 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 4*1000 {
|
|
|
|
return scrapeInterval + 2*scrapeInterval
|
2019-08-04 18:32:18 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 8*1000 {
|
|
|
|
return scrapeInterval + scrapeInterval
|
2019-08-04 18:32:18 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 16*1000 {
|
|
|
|
return scrapeInterval + scrapeInterval/2
|
2019-08-04 18:32:18 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
if scrapeInterval <= 32*1000 {
|
|
|
|
return scrapeInterval + scrapeInterval/4
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-12-24 14:54:04 +01:00
|
|
|
return scrapeInterval + scrapeInterval/8
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func removeCounterResets(values []float64) {
|
2021-08-15 11:56:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they are impossible
|
|
|
|
// on values from vmstorage.
|
2019-05-22 23:16:55 +02:00
|
|
|
if len(values) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var correction float64
|
|
|
|
prevValue := values[0]
|
|
|
|
for i, v := range values {
|
|
|
|
d := v - prevValue
|
|
|
|
if d < 0 {
|
|
|
|
if (-d * 8) < prevValue {
|
2022-06-30 21:39:38 +02:00
|
|
|
// This is likely a partial counter reset.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/2787
|
|
|
|
correction += prevValue - v
|
2019-05-22 23:16:55 +02:00
|
|
|
} else {
|
|
|
|
correction += prevValue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
prevValue = v
|
2021-08-15 11:56:35 +02:00
|
|
|
values[i] = v + correction
|
2024-01-21 01:53:29 +01:00
|
|
|
// Check again, there could be precision error in float operations,
|
|
|
|
// see https://github.com/VictoriaMetrics/VictoriaMetrics/issues/5571
|
|
|
|
if i > 0 && values[i] < values[i-1] {
|
|
|
|
values[i] = values[i-1]
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func deltaValues(values []float64) {
|
|
|
|
// There is no need in handling NaNs here, since they are impossible
|
|
|
|
// on values from vmstorage.
|
|
|
|
if len(values) == 0 {
|
|
|
|
return
|
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
prevDelta := float64(0)
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue := values[0]
|
|
|
|
for i, v := range values[1:] {
|
2019-07-03 11:27:07 +02:00
|
|
|
prevDelta = v - prevValue
|
|
|
|
values[i] = prevDelta
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue = v
|
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
values[len(values)-1] = prevDelta
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func derivValues(values []float64, timestamps []int64) {
|
|
|
|
// There is no need in handling NaNs here, since they are impossible
|
|
|
|
// on values from vmstorage.
|
|
|
|
if len(values) == 0 {
|
|
|
|
return
|
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
prevDeriv := float64(0)
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue := values[0]
|
|
|
|
prevTs := timestamps[0]
|
|
|
|
for i, v := range values[1:] {
|
|
|
|
ts := timestamps[i+1]
|
2019-07-03 11:27:07 +02:00
|
|
|
if ts == prevTs {
|
|
|
|
// Use the previous value for duplicate timestamps.
|
|
|
|
values[i] = prevDeriv
|
|
|
|
continue
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
dt := float64(ts-prevTs) / 1e3
|
2019-07-03 11:27:07 +02:00
|
|
|
prevDeriv = (v - prevValue) / dt
|
|
|
|
values[i] = prevDeriv
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue = v
|
|
|
|
prevTs = ts
|
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
values[len(values)-1] = prevDeriv
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
type newRollupFunc func(args []any) (rollupFunc, error)
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
func newRollupFuncOneArg(rf rollupFunc) newRollupFunc {
|
2024-07-10 00:14:15 +02:00
|
|
|
return func(args []any) (rollupFunc, error) {
|
2019-05-22 23:16:55 +02:00
|
|
|
if err := expectRollupArgsNum(args, 1); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-10 20:16:14 +01:00
|
|
|
func newRollupFuncTwoArgs(rf rollupFunc) newRollupFunc {
|
2024-07-10 00:14:15 +02:00
|
|
|
return func(args []any) (rollupFunc, error) {
|
2020-01-10 20:16:14 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-24 22:47:52 +01:00
|
|
|
func newRollupFuncOneOrTwoArgs(rf rollupFunc) newRollupFunc {
|
2024-07-10 00:14:15 +02:00
|
|
|
return func(args []any) (rollupFunc, error) {
|
2023-02-24 22:47:52 +01:00
|
|
|
if len(args) < 1 || len(args) > 2 {
|
|
|
|
return nil, fmt.Errorf("unexpected number of args; got %d; want 1...2", len(args))
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupHoltWinters(args []any) (rollupFunc, error) {
|
2019-05-22 23:16:55 +02:00
|
|
|
if err := expectRollupArgsNum(args, 3); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
sfs, err := getScalar(args[1], 1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
tfs, err := getScalar(args[2], 2)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2024-02-21 17:47:49 +01:00
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
sf := sfs[rfa.idx]
|
2023-11-14 01:17:16 +01:00
|
|
|
if sf < 0 || sf > 1 {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
tf := tfs[rfa.idx]
|
2023-11-14 01:17:16 +01:00
|
|
|
if tf < 0 || tf > 1 {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
|
|
|
// See https://en.wikipedia.org/wiki/Exponential_smoothing#Double_exponential_smoothing .
|
|
|
|
// TODO: determine whether this shit really works.
|
|
|
|
s0 := rfa.prevValue
|
|
|
|
if math.IsNaN(s0) {
|
|
|
|
s0 = values[0]
|
|
|
|
values = values[1:]
|
|
|
|
if len(values) == 0 {
|
|
|
|
return s0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
b0 := values[0] - s0
|
|
|
|
for _, v := range values {
|
|
|
|
s1 := sf*v + (1-sf)*(s0+b0)
|
|
|
|
b1 := tf*(s1-s0) + (1-tf)*b0
|
|
|
|
s0 = s1
|
|
|
|
b0 = b1
|
|
|
|
}
|
|
|
|
return s0
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupPredictLinear(args []any) (rollupFunc, error) {
|
2019-05-22 23:16:55 +02:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
secs, err := getScalar(args[1], 1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
2022-11-16 23:38:48 +01:00
|
|
|
v, k := linearRegression(rfa.values, rfa.timestamps, rfa.currTimestamp)
|
2019-06-21 21:54:32 +02:00
|
|
|
if math.IsNaN(v) {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
sec := secs[rfa.idx]
|
|
|
|
return v + k*sec
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2022-11-16 23:38:48 +01:00
|
|
|
func linearRegression(values []float64, timestamps []int64, interceptTime int64) (float64, float64) {
|
|
|
|
if len(values) == 0 {
|
2021-10-07 11:50:49 +02:00
|
|
|
return nan, nan
|
2019-06-21 21:54:32 +02:00
|
|
|
}
|
2021-11-17 17:01:30 +01:00
|
|
|
if areConstValues(values) {
|
2021-10-07 11:52:24 +02:00
|
|
|
return values[0], 0
|
|
|
|
}
|
2019-06-21 21:54:32 +02:00
|
|
|
|
|
|
|
// See https://en.wikipedia.org/wiki/Simple_linear_regression#Numerical_example
|
2021-10-07 11:50:49 +02:00
|
|
|
vSum := float64(0)
|
2019-06-23 18:06:16 +02:00
|
|
|
tSum := float64(0)
|
|
|
|
tvSum := float64(0)
|
|
|
|
ttSum := float64(0)
|
2022-11-16 23:38:48 +01:00
|
|
|
n := 0
|
2019-06-21 21:54:32 +02:00
|
|
|
for i, v := range values {
|
2022-11-16 23:38:48 +01:00
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
2021-10-07 11:50:49 +02:00
|
|
|
dt := float64(timestamps[i]-interceptTime) / 1e3
|
2019-06-21 21:54:32 +02:00
|
|
|
vSum += v
|
|
|
|
tSum += dt
|
|
|
|
tvSum += dt * v
|
|
|
|
ttSum += dt * dt
|
2022-11-16 23:38:48 +01:00
|
|
|
n++
|
|
|
|
}
|
|
|
|
if n == 0 {
|
|
|
|
return nan, nan
|
2019-06-21 21:54:32 +02:00
|
|
|
}
|
2021-11-12 12:50:43 +01:00
|
|
|
k := float64(0)
|
2022-11-16 23:38:48 +01:00
|
|
|
tDiff := ttSum - tSum*tSum/float64(n)
|
2021-11-12 12:50:43 +01:00
|
|
|
if math.Abs(tDiff) >= 1e-6 {
|
|
|
|
// Prevent from incorrect division for too small tDiff values.
|
2022-11-16 23:38:48 +01:00
|
|
|
k = (tvSum - tSum*vSum/float64(n)) / tDiff
|
2021-11-12 12:50:43 +01:00
|
|
|
}
|
2022-11-16 23:38:48 +01:00
|
|
|
v := vSum/float64(n) - k*tSum/float64(n)
|
2019-06-21 21:54:32 +02:00
|
|
|
return v, k
|
|
|
|
}
|
|
|
|
|
2021-11-17 17:01:30 +01:00
|
|
|
func areConstValues(values []float64) bool {
|
|
|
|
if len(values) <= 1 {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
vPrev := values[0]
|
|
|
|
for _, v := range values[1:] {
|
|
|
|
if v != vPrev {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
vPrev = v
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupDurationOverTime(args []any) (rollupFunc, error) {
|
2021-11-08 12:14:06 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
dMaxs, err := getScalar(args[1], 1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
2021-11-09 10:41:42 +01:00
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if len(timestamps) == 0 {
|
2021-11-08 12:14:06 +01:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
tPrev := timestamps[0]
|
|
|
|
dSum := int64(0)
|
|
|
|
dMax := int64(dMaxs[rfa.idx] * 1000)
|
|
|
|
for _, t := range timestamps {
|
|
|
|
d := t - tPrev
|
|
|
|
if d <= dMax {
|
|
|
|
dSum += d
|
|
|
|
}
|
|
|
|
tPrev = t
|
|
|
|
}
|
2021-11-09 10:41:42 +01:00
|
|
|
return float64(dSum) / 1000
|
2021-11-08 12:14:06 +01:00
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupShareLE(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupAvgFilter(args, countFilterLE)
|
2020-01-02 23:41:09 +01:00
|
|
|
}
|
|
|
|
|
2024-02-13 22:40:04 +01:00
|
|
|
func countFilterLE(values []float64, le float64) float64 {
|
2020-01-02 23:41:09 +01:00
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v <= le {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
}
|
2024-02-13 22:40:04 +01:00
|
|
|
return float64(n)
|
2020-01-02 23:41:09 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupShareGT(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupAvgFilter(args, countFilterGT)
|
2020-01-02 23:41:09 +01:00
|
|
|
}
|
|
|
|
|
2024-02-13 22:40:04 +01:00
|
|
|
func countFilterGT(values []float64, gt float64) float64 {
|
2020-01-02 23:41:09 +01:00
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v > gt {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
}
|
2024-02-13 22:40:04 +01:00
|
|
|
return float64(n)
|
2020-01-02 23:41:09 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupShareEQ(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupAvgFilter(args, countFilterEQ)
|
|
|
|
}
|
|
|
|
|
|
|
|
func sumFilterEQ(values []float64, eq float64) float64 {
|
|
|
|
var sum float64
|
|
|
|
for _, v := range values {
|
|
|
|
if v == eq {
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return sum
|
|
|
|
}
|
|
|
|
|
|
|
|
func sumFilterLE(values []float64, le float64) float64 {
|
|
|
|
var sum float64
|
|
|
|
for _, v := range values {
|
|
|
|
if v <= le {
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return sum
|
|
|
|
}
|
|
|
|
|
|
|
|
func sumFilterGT(values []float64, gt float64) float64 {
|
|
|
|
var sum float64
|
|
|
|
for _, v := range values {
|
|
|
|
if v > gt {
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return sum
|
2023-07-31 15:23:59 +02:00
|
|
|
}
|
|
|
|
|
2024-02-13 22:40:04 +01:00
|
|
|
func countFilterEQ(values []float64, eq float64) float64 {
|
2020-12-05 11:30:46 +01:00
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v == eq {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
}
|
2024-02-13 22:40:04 +01:00
|
|
|
return float64(n)
|
2020-12-05 11:30:46 +01:00
|
|
|
}
|
|
|
|
|
2024-02-13 22:40:04 +01:00
|
|
|
func countFilterNE(values []float64, ne float64) float64 {
|
2020-12-05 11:30:46 +01:00
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v != ne {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
}
|
2024-02-13 22:40:04 +01:00
|
|
|
return float64(n)
|
2020-12-05 11:30:46 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupAvgFilter(args []any, f func(values []float64, limit float64) float64) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
rf, err := newRollupFilter(args, f)
|
2020-09-03 14:26:14 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return func(rfa *rollupFuncArg) float64 {
|
|
|
|
n := rf(rfa)
|
|
|
|
return n / float64(len(rfa.values))
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupCountEQ(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, countFilterEQ)
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupCountLE(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, countFilterLE)
|
2020-09-03 14:26:14 +02:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupCountGT(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, countFilterGT)
|
2020-09-03 14:26:14 +02:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupCountNE(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, countFilterNE)
|
2020-12-05 11:30:46 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupSumEQ(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, sumFilterEQ)
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupSumLE(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, sumFilterLE)
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupSumGT(args []any) (rollupFunc, error) {
|
2024-02-13 22:40:04 +01:00
|
|
|
return newRollupFilter(args, sumFilterGT)
|
2020-12-05 11:30:46 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupFilter(args []any, f func(values []float64, limit float64) float64) (rollupFunc, error) {
|
2020-01-02 23:41:09 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
limits, err := getScalar(args[1], 1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
limit := limits[rfa.idx]
|
2024-02-13 22:40:04 +01:00
|
|
|
return f(values, limit)
|
2020-01-02 23:41:09 +01:00
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupHoeffdingBoundLower(args []any) (rollupFunc, error) {
|
2020-01-11 13:40:32 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
|
|
|
bound, avg := rollupHoeffdingBoundInternal(rfa, phis)
|
|
|
|
return avg - bound
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupHoeffdingBoundUpper(args []any) (rollupFunc, error) {
|
2020-01-11 13:40:32 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
|
|
|
bound, avg := rollupHoeffdingBoundInternal(rfa, phis)
|
|
|
|
return avg + bound
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupHoeffdingBoundInternal(rfa *rollupFuncArg, phis []float64) (float64, float64) {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan, nan
|
|
|
|
}
|
|
|
|
if len(values) == 1 {
|
|
|
|
return 0, values[0]
|
|
|
|
}
|
|
|
|
vMax := rollupMax(rfa)
|
|
|
|
vMin := rollupMin(rfa)
|
|
|
|
vAvg := rollupAvg(rfa)
|
|
|
|
vRange := vMax - vMin
|
|
|
|
if vRange <= 0 {
|
|
|
|
return 0, vAvg
|
|
|
|
}
|
|
|
|
phi := phis[rfa.idx]
|
|
|
|
if phi >= 1 {
|
|
|
|
return inf, vAvg
|
|
|
|
}
|
|
|
|
if phi <= 0 {
|
|
|
|
return 0, vAvg
|
|
|
|
}
|
|
|
|
// See https://en.wikipedia.org/wiki/Hoeffding%27s_inequality
|
|
|
|
// and https://www.youtube.com/watch?v=6UwcqiNsZ8U&feature=youtu.be&t=1237
|
|
|
|
bound := vRange * math.Sqrt(math.Log(1/(1-phi))/(2*float64(len(values))))
|
|
|
|
return bound, vAvg
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupQuantiles(args []any) (rollupFunc, error) {
|
2021-09-17 22:33:15 +02:00
|
|
|
if len(args) < 3 {
|
|
|
|
return nil, fmt.Errorf("unexpected number of args: %d; want at least 3 args", len(args))
|
|
|
|
}
|
|
|
|
tssPhi, ok := args[0].([]*timeseries)
|
|
|
|
if !ok {
|
|
|
|
return nil, fmt.Errorf("unexpected type for phi arg: %T; want string", args[0])
|
|
|
|
}
|
|
|
|
phiLabel, err := getString(tssPhi, 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phiArgs := args[1 : len(args)-1]
|
|
|
|
phis := make([]float64, len(phiArgs))
|
|
|
|
phiStrs := make([]string, len(phiArgs))
|
|
|
|
for i, phiArg := range phiArgs {
|
|
|
|
phiValues, err := getScalar(phiArg, i+1)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot obtain phi from arg #%d: %w", i+1, err)
|
|
|
|
}
|
|
|
|
phis[i] = phiValues[0]
|
|
|
|
phiStrs[i] = fmt.Sprintf("%g", phiValues[0])
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2022-10-06 21:30:40 +02:00
|
|
|
return nan
|
2021-09-17 22:33:15 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
qs := getFloat64s()
|
|
|
|
qs.A = quantiles(qs.A[:0], phis, values)
|
2021-09-17 22:33:15 +02:00
|
|
|
idx := rfa.idx
|
|
|
|
tsm := rfa.tsm
|
|
|
|
for i, phiStr := range phiStrs {
|
|
|
|
ts := tsm.GetOrCreateTimeseries(phiLabel, phiStr)
|
2021-09-27 17:55:35 +02:00
|
|
|
ts.Values[idx] = qs.A[i]
|
2021-09-17 22:33:15 +02:00
|
|
|
}
|
2021-09-27 17:55:35 +02:00
|
|
|
putFloat64s(qs)
|
2021-09-17 22:33:15 +02:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2023-10-31 22:10:29 +01:00
|
|
|
func rollupOutlierIQR(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
|
|
|
|
// See Outliers section at https://en.wikipedia.org/wiki/Interquartile_range
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) < 2 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
qs := getFloat64s()
|
|
|
|
qs.A = quantiles(qs.A[:0], iqrPhis, values)
|
|
|
|
q25 := qs.A[0]
|
|
|
|
q75 := qs.A[1]
|
|
|
|
iqr := 1.5 * (q75 - q25)
|
|
|
|
putFloat64s(qs)
|
|
|
|
|
|
|
|
v := values[len(values)-1]
|
|
|
|
if v > q75+iqr || v < q25-iqr {
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupQuantile(args []any) (rollupFunc, error) {
|
2019-05-22 23:16:55 +02:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
phis, err := getScalar(args[0], 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rf := func(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
phi := phis[rfa.idx]
|
2021-09-27 17:02:41 +02:00
|
|
|
qv := quantile(phi, values)
|
2019-05-22 23:16:55 +02:00
|
|
|
return qv
|
|
|
|
}
|
|
|
|
return rf, nil
|
|
|
|
}
|
|
|
|
|
2023-02-11 10:06:16 +01:00
|
|
|
func rollupMAD(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
|
2023-02-19 00:03:24 +01:00
|
|
|
return mad(rfa.values)
|
|
|
|
}
|
|
|
|
|
|
|
|
func mad(values []float64) float64 {
|
2023-02-11 10:06:16 +01:00
|
|
|
// See https://en.wikipedia.org/wiki/Median_absolute_deviation
|
|
|
|
median := quantile(0.5, values)
|
|
|
|
a := getFloat64s()
|
|
|
|
ds := a.A[:0]
|
|
|
|
for _, v := range values {
|
|
|
|
ds = append(ds, math.Abs(v-median))
|
|
|
|
}
|
2023-02-19 00:03:24 +01:00
|
|
|
v := quantile(0.5, ds)
|
2023-02-11 10:06:16 +01:00
|
|
|
a.A = ds
|
|
|
|
putFloat64s(a)
|
2023-02-19 00:03:24 +01:00
|
|
|
return v
|
2023-02-11 10:06:16 +01:00
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func newRollupCountValues(args []any) (rollupFunc, error) {
|
2024-02-22 12:39:07 +01:00
|
|
|
if err := expectRollupArgsNum(args, 2); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-02-25 00:47:09 +01:00
|
|
|
tssLabelNum, ok := args[0].([]*timeseries)
|
2024-02-22 12:39:07 +01:00
|
|
|
if !ok {
|
2024-02-25 00:47:09 +01:00
|
|
|
return nil, fmt.Errorf(`unexpected type for labelName arg; got %T; want %T`, args[0], tssLabelNum)
|
2024-02-22 12:39:07 +01:00
|
|
|
}
|
2024-02-25 00:47:09 +01:00
|
|
|
labelName, err := getString(tssLabelNum, 0)
|
2024-02-22 12:39:07 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot get labelName: %w", err)
|
|
|
|
}
|
|
|
|
f := func(rfa *rollupFuncArg) float64 {
|
|
|
|
tsm := rfa.tsm
|
|
|
|
idx := rfa.idx
|
|
|
|
bb := bbPool.Get()
|
|
|
|
// Note: the code below may create very big number of time series
|
|
|
|
// if the number of unique values in rfa.values is big.
|
|
|
|
for _, v := range rfa.values {
|
|
|
|
bb.B = strconv.AppendFloat(bb.B[:0], v, 'g', -1, 64)
|
|
|
|
labelValue := bytesutil.ToUnsafeString(bb.B)
|
|
|
|
ts := tsm.GetOrCreateTimeseries(labelName, labelValue)
|
|
|
|
count := ts.Values[idx]
|
|
|
|
if math.IsNaN(count) {
|
|
|
|
count = 1
|
|
|
|
} else {
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
ts.Values[idx] = count
|
|
|
|
}
|
|
|
|
bbPool.Put(bb)
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return f, nil
|
|
|
|
}
|
|
|
|
|
2020-01-03 22:50:47 +01:00
|
|
|
func rollupHistogram(rfa *rollupFuncArg) float64 {
|
|
|
|
values := rfa.values
|
|
|
|
tsm := rfa.tsm
|
|
|
|
tsm.h.Reset()
|
|
|
|
for _, v := range values {
|
|
|
|
tsm.h.Update(v)
|
|
|
|
}
|
|
|
|
idx := rfa.idx
|
|
|
|
tsm.h.VisitNonZeroBuckets(func(vmrange string, count uint64) {
|
2021-09-17 22:33:15 +02:00
|
|
|
ts := tsm.GetOrCreateTimeseries("vmrange", vmrange)
|
2020-01-03 22:50:47 +01:00
|
|
|
ts.Values[idx] = float64(count)
|
|
|
|
})
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupAvg(rfa *rollupFuncArg) float64 {
|
|
|
|
// Do not use `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation,
|
|
|
|
// since it is slower and has no significant benefits in precision.
|
|
|
|
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2020-01-11 11:47:29 +01:00
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
var sum float64
|
|
|
|
for _, v := range values {
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
return sum / float64(len(values))
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupMin(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
2020-01-10 23:24:31 +01:00
|
|
|
if len(values) == 0 {
|
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2020-01-10 23:24:31 +01:00
|
|
|
minValue := values[0]
|
2019-05-22 23:16:55 +02:00
|
|
|
for _, v := range values {
|
|
|
|
if v < minValue {
|
|
|
|
minValue = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return minValue
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupMax(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
2020-01-10 23:24:31 +01:00
|
|
|
if len(values) == 0 {
|
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2020-01-10 23:24:31 +01:00
|
|
|
maxValue := values[0]
|
2019-05-22 23:16:55 +02:00
|
|
|
for _, v := range values {
|
|
|
|
if v > maxValue {
|
|
|
|
maxValue = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return maxValue
|
|
|
|
}
|
|
|
|
|
2023-09-25 14:01:00 +02:00
|
|
|
func rollupMedian(rfa *rollupFuncArg) float64 {
|
|
|
|
return quantile(0.5, rfa.values)
|
|
|
|
}
|
|
|
|
|
2020-01-10 18:38:46 +01:00
|
|
|
func rollupTmin(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
timestamps := rfa.timestamps
|
2020-01-10 23:24:31 +01:00
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
2020-01-10 18:38:46 +01:00
|
|
|
}
|
2020-01-10 23:24:31 +01:00
|
|
|
minValue := values[0]
|
|
|
|
minTimestamp := timestamps[0]
|
2020-01-10 18:38:46 +01:00
|
|
|
for i, v := range values {
|
2021-06-23 13:18:13 +02:00
|
|
|
// Get the last timestamp for the minimum value as most users expect.
|
|
|
|
if v <= minValue {
|
2020-01-10 18:38:46 +01:00
|
|
|
minValue = v
|
|
|
|
minTimestamp = timestamps[i]
|
|
|
|
}
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(minTimestamp) / 1e3
|
2020-01-10 18:38:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func rollupTmax(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
timestamps := rfa.timestamps
|
2020-01-10 23:24:31 +01:00
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
2020-01-10 18:38:46 +01:00
|
|
|
}
|
2020-01-10 23:24:31 +01:00
|
|
|
maxValue := values[0]
|
|
|
|
maxTimestamp := timestamps[0]
|
2020-01-10 18:38:46 +01:00
|
|
|
for i, v := range values {
|
2021-06-23 13:18:13 +02:00
|
|
|
// Get the last timestamp for the maximum value as most users expect.
|
|
|
|
if v >= maxValue {
|
2020-01-10 18:38:46 +01:00
|
|
|
maxValue = v
|
|
|
|
maxTimestamp = timestamps[i]
|
|
|
|
}
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(maxTimestamp) / 1e3
|
2020-01-10 18:38:46 +01:00
|
|
|
}
|
|
|
|
|
2021-01-12 15:12:03 +01:00
|
|
|
func rollupTfirst(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
// Do not take into account rfa.prevTimestamp, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return float64(timestamps[0]) / 1e3
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupTlast(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
// Do not take into account rfa.prevTimestamp, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return float64(timestamps[len(timestamps)-1]) / 1e3
|
|
|
|
}
|
|
|
|
|
2022-04-27 09:57:49 +02:00
|
|
|
func rollupTlastChange(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
lastValue := values[len(values)-1]
|
|
|
|
values = values[:len(values)-1]
|
|
|
|
for i := len(values) - 1; i >= 0; i-- {
|
|
|
|
if values[i] != lastValue {
|
|
|
|
return float64(timestamps[i+1]) / 1e3
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if math.IsNaN(rfa.prevValue) || rfa.prevValue != lastValue {
|
|
|
|
return float64(timestamps[0]) / 1e3
|
|
|
|
}
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupSum(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2020-12-02 12:11:32 +01:00
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
var sum float64
|
|
|
|
for _, v := range values {
|
|
|
|
sum += v
|
|
|
|
}
|
|
|
|
return sum
|
|
|
|
}
|
|
|
|
|
2020-07-24 00:13:05 +02:00
|
|
|
func rollupRateOverSum(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
timestamps := rfa.timestamps
|
2020-07-29 10:36:51 +02:00
|
|
|
if len(timestamps) == 0 {
|
2024-02-21 17:47:49 +01:00
|
|
|
return nan
|
2020-07-24 00:13:05 +02:00
|
|
|
}
|
|
|
|
sum := float64(0)
|
2020-07-29 11:36:10 +02:00
|
|
|
for _, v := range rfa.values {
|
2020-07-24 00:13:05 +02:00
|
|
|
sum += v
|
|
|
|
}
|
2022-09-02 22:18:01 +02:00
|
|
|
return sum / (float64(rfa.window) / 1e3)
|
2020-07-24 00:13:05 +02:00
|
|
|
}
|
|
|
|
|
2020-01-21 18:05:10 +01:00
|
|
|
func rollupRange(rfa *rollupFuncArg) float64 {
|
|
|
|
max := rollupMax(rfa)
|
|
|
|
min := rollupMin(rfa)
|
|
|
|
return max - min
|
|
|
|
}
|
|
|
|
|
2019-06-24 15:17:28 +02:00
|
|
|
func rollupSum2(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2024-02-21 17:47:49 +01:00
|
|
|
return nan
|
2019-06-24 15:17:28 +02:00
|
|
|
}
|
|
|
|
var sum2 float64
|
|
|
|
for _, v := range values {
|
|
|
|
sum2 += v * v
|
|
|
|
}
|
|
|
|
return sum2
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupGeomean(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2024-02-21 17:47:49 +01:00
|
|
|
return nan
|
2019-06-24 15:17:28 +02:00
|
|
|
}
|
|
|
|
p := 1.0
|
|
|
|
for _, v := range values {
|
|
|
|
p *= v
|
|
|
|
}
|
|
|
|
return math.Pow(p, 1/float64(len(values)))
|
|
|
|
}
|
|
|
|
|
2020-01-03 23:46:39 +01:00
|
|
|
func rollupAbsent(rfa *rollupFuncArg) float64 {
|
|
|
|
if len(rfa.values) == 0 {
|
|
|
|
return 1
|
|
|
|
}
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2021-08-03 11:21:05 +02:00
|
|
|
func rollupPresent(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
if len(rfa.values) > 0 {
|
|
|
|
return 1
|
|
|
|
}
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupCount(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2021-09-28 23:37:04 +02:00
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return float64(len(values))
|
|
|
|
}
|
|
|
|
|
2022-01-14 00:48:04 +01:00
|
|
|
func rollupStaleSamples(rfa *rollupFuncArg) float64 {
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
n := 0
|
|
|
|
for _, v := range rfa.values {
|
|
|
|
if decimal.IsStaleNaN(v) {
|
|
|
|
n++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return float64(n)
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupStddev(rfa *rollupFuncArg) float64 {
|
2022-11-17 00:01:46 +01:00
|
|
|
return stddev(rfa.values)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func rollupStdvar(rfa *rollupFuncArg) float64 {
|
2022-11-17 00:01:46 +01:00
|
|
|
return stdvar(rfa.values)
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
|
2022-11-17 00:01:46 +01:00
|
|
|
func stddev(values []float64) float64 {
|
|
|
|
v := stdvar(values)
|
|
|
|
return math.Sqrt(v)
|
|
|
|
}
|
|
|
|
|
|
|
|
func stdvar(values []float64) float64 {
|
|
|
|
// See `Rapid calculation methods` at https://en.wikipedia.org/wiki/Standard_deviation
|
2019-05-22 23:16:55 +02:00
|
|
|
if len(values) == 0 {
|
2021-09-28 23:37:04 +02:00
|
|
|
return nan
|
2019-06-23 18:06:16 +02:00
|
|
|
}
|
|
|
|
if len(values) == 1 {
|
|
|
|
// Fast path.
|
2021-09-28 23:38:55 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
var avg float64
|
|
|
|
var count float64
|
|
|
|
var q float64
|
|
|
|
for _, v := range values {
|
2022-11-17 00:01:46 +01:00
|
|
|
if math.IsNaN(v) {
|
|
|
|
continue
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
count++
|
|
|
|
avgNew := avg + (v-avg)/count
|
|
|
|
q += (v - avg) * (v - avgNew)
|
|
|
|
avg = avgNew
|
|
|
|
}
|
2022-11-17 00:01:46 +01:00
|
|
|
if count == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
return q / count
|
|
|
|
}
|
|
|
|
|
2021-02-22 18:12:12 +01:00
|
|
|
func rollupIncreasePure(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
2021-06-15 16:37:19 +02:00
|
|
|
// restore to the real value because of potential staleness reset
|
|
|
|
prevValue := rfa.realPrevValue
|
2021-02-22 18:12:12 +01:00
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
// Assume the counter starts from 0.
|
|
|
|
prevValue = 0
|
|
|
|
}
|
|
|
|
if len(values) == 0 {
|
2021-10-15 21:26:09 +02:00
|
|
|
// Assume the counter didn't change since prevValue.
|
2021-02-22 18:12:12 +01:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
return values[len(values)-1] - prevValue
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupDelta(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2020-11-13 13:55:58 +01:00
|
|
|
if !math.IsNaN(rfa.realPrevValue) {
|
|
|
|
// Assume that the value didn't change during the current gap.
|
|
|
|
// This should fix high delta() and increase() values at the end of gaps.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/894
|
|
|
|
return values[len(values)-1] - rfa.realPrevValue
|
|
|
|
}
|
2022-09-26 14:33:25 +02:00
|
|
|
// Assume that the previous non-existing value was 0
|
|
|
|
// only if the first value doesn't exceed too much the delta with the next value.
|
2020-07-21 16:22:36 +02:00
|
|
|
//
|
2020-01-28 15:29:48 +01:00
|
|
|
// This should prevent from improper increase() results for os-level counters
|
|
|
|
// such as cpu time or bytes sent over the network interface.
|
|
|
|
// These counters may start long ago before the first value appears in the db.
|
2020-07-21 16:22:36 +02:00
|
|
|
//
|
|
|
|
// This also should prevent from improper increase() results when a part of label values are changed
|
|
|
|
// without counter reset.
|
2020-12-15 12:40:30 +01:00
|
|
|
var d float64
|
2020-07-21 16:22:36 +02:00
|
|
|
if len(values) > 1 {
|
|
|
|
d = values[1] - values[0]
|
2020-11-13 14:21:51 +01:00
|
|
|
} else if !math.IsNaN(rfa.realNextValue) {
|
|
|
|
d = rfa.realNextValue - values[0]
|
2020-07-21 16:22:36 +02:00
|
|
|
}
|
|
|
|
if math.Abs(values[0]) < 10*(math.Abs(d)+1) {
|
2020-01-28 15:29:48 +01:00
|
|
|
prevValue = 0
|
|
|
|
} else {
|
|
|
|
prevValue = values[0]
|
2020-07-29 10:36:51 +02:00
|
|
|
values = values[1:]
|
2020-01-08 13:41:35 +01:00
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
if len(values) == 0 {
|
2019-08-15 22:29:59 +02:00
|
|
|
// Assume that the value didn't change on the given interval.
|
2019-06-23 18:06:16 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return values[len(values)-1] - prevValue
|
|
|
|
}
|
|
|
|
|
2021-12-20 11:32:43 +01:00
|
|
|
func rollupDeltaPrometheus(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
2021-12-20 12:13:16 +01:00
|
|
|
// Just return the difference between the last and the first sample like Prometheus does.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1962
|
|
|
|
if len(values) < 2 {
|
|
|
|
return nan
|
2021-12-20 11:32:43 +01:00
|
|
|
}
|
2021-12-20 12:13:16 +01:00
|
|
|
return values[len(values)-1] - values[0]
|
2021-12-20 11:32:43 +01:00
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupIdelta(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2019-06-23 18:06:16 +02:00
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
return nan
|
|
|
|
}
|
2019-08-15 22:29:59 +02:00
|
|
|
// Assume that the value didn't change on the given interval.
|
2019-06-23 18:06:16 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
lastValue := values[len(values)-1]
|
|
|
|
values = values[:len(values)-1]
|
|
|
|
if len(values) == 0 {
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
2019-08-15 22:29:59 +02:00
|
|
|
// Assume that the previous non-existing value was 0.
|
|
|
|
return lastValue
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
return lastValue - prevValue
|
|
|
|
}
|
|
|
|
return lastValue - values[len(values)-1]
|
|
|
|
}
|
|
|
|
|
2019-06-21 21:54:32 +02:00
|
|
|
func rollupDerivSlow(rfa *rollupFuncArg) float64 {
|
|
|
|
// Use linear regression like Prometheus does.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/73
|
2022-11-16 23:38:48 +01:00
|
|
|
_, k := linearRegression(rfa.values, rfa.timestamps, rfa.currTimestamp)
|
2019-06-21 21:54:32 +02:00
|
|
|
return k
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupDerivFast(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
prevTimestamp := rfa.prevTimestamp
|
|
|
|
if math.IsNaN(prevValue) {
|
2019-12-24 14:54:04 +01:00
|
|
|
if len(values) == 0 {
|
2019-05-22 23:16:55 +02:00
|
|
|
return nan
|
|
|
|
}
|
2020-01-08 13:41:35 +01:00
|
|
|
if len(values) == 1 {
|
|
|
|
// It is impossible to determine the duration during which the value changed
|
|
|
|
// from 0 to the current value.
|
|
|
|
// The following attempts didn't work well:
|
|
|
|
// - using scrape interval as the duration. It fails on Prometheus restarts when it
|
|
|
|
// skips scraping for the counter. This results in too high rate() value for the first point
|
|
|
|
// after Prometheus restarts.
|
|
|
|
// - using window or step as the duration. It results in too small rate() values for the first
|
|
|
|
// points of time series.
|
|
|
|
//
|
|
|
|
// So just return nan
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
prevValue = values[0]
|
|
|
|
prevTimestamp = timestamps[0]
|
2020-01-03 18:00:20 +01:00
|
|
|
} else if len(values) == 0 {
|
2019-08-15 22:29:59 +02:00
|
|
|
// Assume that the value didn't change on the given interval.
|
2019-06-23 18:06:16 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
vEnd := values[len(values)-1]
|
|
|
|
tEnd := timestamps[len(timestamps)-1]
|
|
|
|
dv := vEnd - prevValue
|
2020-09-08 13:00:47 +02:00
|
|
|
dt := float64(tEnd-prevTimestamp) / 1e3
|
2019-05-22 23:16:55 +02:00
|
|
|
return dv / dt
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupIderiv(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
timestamps := rfa.timestamps
|
2019-08-15 22:29:59 +02:00
|
|
|
if len(values) < 2 {
|
2020-01-03 18:00:20 +01:00
|
|
|
if len(values) == 0 {
|
2019-06-23 18:06:16 +02:00
|
|
|
return nan
|
|
|
|
}
|
2020-01-08 13:41:35 +01:00
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
// It is impossible to determine the duration during which the value changed
|
|
|
|
// from 0 to the current value.
|
|
|
|
// The following attempts didn't work well:
|
|
|
|
// - using scrape interval as the duration. It fails on Prometheus restarts when it
|
|
|
|
// skips scraping for the counter. This results in too high rate() value for the first point
|
|
|
|
// after Prometheus restarts.
|
|
|
|
// - using window or step as the duration. It results in too small rate() values for the first
|
|
|
|
// points of time series.
|
|
|
|
//
|
|
|
|
// So just return nan
|
|
|
|
return nan
|
2020-01-03 18:00:20 +01:00
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return (values[0] - rfa.prevValue) / (float64(timestamps[0]-rfa.prevTimestamp) / 1e3)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
vEnd := values[len(values)-1]
|
|
|
|
tEnd := timestamps[len(timestamps)-1]
|
|
|
|
values = values[:len(values)-1]
|
|
|
|
timestamps = timestamps[:len(timestamps)-1]
|
2019-07-03 11:27:07 +02:00
|
|
|
// Skip data points with duplicate timestamps.
|
|
|
|
for len(timestamps) > 0 && timestamps[len(timestamps)-1] >= tEnd {
|
|
|
|
timestamps = timestamps[:len(timestamps)-1]
|
|
|
|
}
|
|
|
|
var tStart int64
|
|
|
|
var vStart float64
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
if math.IsNaN(rfa.prevValue) {
|
2019-06-23 18:06:16 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
tStart = rfa.prevTimestamp
|
|
|
|
vStart = rfa.prevValue
|
2019-05-22 23:16:55 +02:00
|
|
|
} else {
|
2019-07-03 11:27:07 +02:00
|
|
|
tStart = timestamps[len(timestamps)-1]
|
|
|
|
vStart = values[len(timestamps)-1]
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2019-07-03 11:27:07 +02:00
|
|
|
dv := vEnd - vStart
|
|
|
|
dt := tEnd - tStart
|
2020-09-08 13:00:47 +02:00
|
|
|
return dv / (float64(dt) / 1e3)
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
|
2019-08-16 10:57:56 +02:00
|
|
|
func rollupLifetime(rfa *rollupFuncArg) float64 {
|
|
|
|
// Calculate the duration between the first and the last data points.
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
if len(timestamps) < 2 {
|
|
|
|
return nan
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(timestamps[len(timestamps)-1]-timestamps[0]) / 1e3
|
2019-08-16 10:57:56 +02:00
|
|
|
}
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(timestamps[len(timestamps)-1]-rfa.prevTimestamp) / 1e3
|
2019-11-01 11:21:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func rollupLag(rfa *rollupFuncArg) float64 {
|
|
|
|
// Calculate the duration between the current timestamp and the last data point.
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
return nan
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(rfa.currTimestamp-rfa.prevTimestamp) / 1e3
|
2019-11-01 11:21:12 +01:00
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return float64(rfa.currTimestamp-timestamps[len(timestamps)-1]) / 1e3
|
2019-08-16 10:57:56 +02:00
|
|
|
}
|
|
|
|
|
2019-08-18 20:07:59 +02:00
|
|
|
func rollupScrapeInterval(rfa *rollupFuncArg) float64 {
|
|
|
|
// Calculate the average interval between data points.
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
if len(timestamps) < 2 {
|
|
|
|
return nan
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return (float64(timestamps[len(timestamps)-1]-timestamps[0]) / 1e3) / float64(len(timestamps)-1)
|
2019-08-18 20:07:59 +02:00
|
|
|
}
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2020-09-08 13:00:47 +02:00
|
|
|
return (float64(timestamps[len(timestamps)-1]-rfa.prevTimestamp) / 1e3) / float64(len(timestamps))
|
2019-08-18 20:07:59 +02:00
|
|
|
}
|
|
|
|
|
2021-12-20 11:32:43 +01:00
|
|
|
func rollupChangesPrometheus(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
2021-12-20 12:13:16 +01:00
|
|
|
// Do not take into account rfa.prevValue like Prometheus does.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1962
|
|
|
|
if len(values) < 1 {
|
|
|
|
return nan
|
2021-12-20 11:32:43 +01:00
|
|
|
}
|
2021-12-20 12:13:16 +01:00
|
|
|
prevValue := values[0]
|
|
|
|
n := 0
|
|
|
|
for _, v := range values[1:] {
|
2021-12-20 11:32:43 +01:00
|
|
|
if v != prevValue {
|
2024-02-08 01:30:55 +01:00
|
|
|
if math.Abs(v-prevValue) < 1e-12*math.Abs(v) {
|
|
|
|
// This may be precision error. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/767#issuecomment-1650932203
|
|
|
|
continue
|
|
|
|
}
|
2021-12-20 11:32:43 +01:00
|
|
|
n++
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return float64(n)
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupChanges(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
prevValue := rfa.prevValue
|
2019-06-23 18:06:16 +02:00
|
|
|
n := 0
|
2019-05-22 23:16:55 +02:00
|
|
|
if math.IsNaN(prevValue) {
|
2019-06-23 18:06:16 +02:00
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue = values[0]
|
2019-06-23 18:06:16 +02:00
|
|
|
values = values[1:]
|
|
|
|
n++
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
for _, v := range values {
|
|
|
|
if v != prevValue {
|
2024-02-08 01:30:55 +01:00
|
|
|
if math.Abs(v-prevValue) < 1e-12*math.Abs(v) {
|
|
|
|
// This may be precision error. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/767#issuecomment-1650932203
|
|
|
|
continue
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
n++
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return float64(n)
|
|
|
|
}
|
|
|
|
|
2019-09-25 19:36:51 +02:00
|
|
|
func rollupIncreases(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
prevValue = values[0]
|
|
|
|
values = values[1:]
|
|
|
|
}
|
|
|
|
if len(values) == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v > prevValue {
|
2024-02-08 01:30:55 +01:00
|
|
|
if math.Abs(v-prevValue) < 1e-12*math.Abs(v) {
|
|
|
|
// This may be precision error. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/767#issuecomment-1650932203
|
|
|
|
continue
|
|
|
|
}
|
2019-09-25 19:36:51 +02:00
|
|
|
n++
|
|
|
|
}
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
return float64(n)
|
|
|
|
}
|
|
|
|
|
|
|
|
// `decreases_over_time` logic is the same as `resets` logic.
|
|
|
|
var rollupDecreases = rollupResets
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupResets(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2019-06-23 18:06:16 +02:00
|
|
|
if math.IsNaN(rfa.prevValue) {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
prevValue = values[0]
|
|
|
|
values = values[1:]
|
|
|
|
}
|
|
|
|
if len(values) == 0 {
|
2019-06-23 18:06:16 +02:00
|
|
|
return 0
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
n := 0
|
|
|
|
for _, v := range values {
|
|
|
|
if v < prevValue {
|
2024-02-08 01:30:55 +01:00
|
|
|
if math.Abs(v-prevValue) < 1e-12*math.Abs(v) {
|
|
|
|
// This may be precision error. See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/767#issuecomment-1650932203
|
|
|
|
continue
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
n++
|
|
|
|
}
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
return float64(n)
|
|
|
|
}
|
|
|
|
|
2020-02-04 21:42:10 +01:00
|
|
|
// getCandlestickValues returns a subset of rfa.values suitable for rollup_candlestick
|
|
|
|
//
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/309 for details.
|
|
|
|
func getCandlestickValues(rfa *rollupFuncArg) []float64 {
|
|
|
|
currTimestamp := rfa.currTimestamp
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
for len(timestamps) > 0 && timestamps[len(timestamps)-1] >= currTimestamp {
|
|
|
|
timestamps = timestamps[:len(timestamps)-1]
|
|
|
|
}
|
|
|
|
if len(timestamps) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return rfa.values[:len(timestamps)]
|
|
|
|
}
|
|
|
|
|
2020-02-24 12:24:18 +01:00
|
|
|
func getFirstValueForCandlestick(rfa *rollupFuncArg) float64 {
|
2020-07-29 11:36:10 +02:00
|
|
|
if rfa.prevTimestamp+rfa.window >= rfa.currTimestamp {
|
2020-02-23 17:03:52 +01:00
|
|
|
return rfa.prevValue
|
|
|
|
}
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
|
2020-02-04 21:42:10 +01:00
|
|
|
func rollupOpen(rfa *rollupFuncArg) float64 {
|
2020-02-24 12:24:18 +01:00
|
|
|
v := getFirstValueForCandlestick(rfa)
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
return v
|
|
|
|
}
|
2020-02-04 21:42:10 +01:00
|
|
|
values := getCandlestickValues(rfa)
|
|
|
|
if len(values) == 0 {
|
2020-02-24 12:24:18 +01:00
|
|
|
return nan
|
2020-02-04 21:42:10 +01:00
|
|
|
}
|
|
|
|
return values[0]
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupClose(rfa *rollupFuncArg) float64 {
|
|
|
|
values := getCandlestickValues(rfa)
|
|
|
|
if len(values) == 0 {
|
2020-02-24 12:24:18 +01:00
|
|
|
return getFirstValueForCandlestick(rfa)
|
2020-02-04 21:42:10 +01:00
|
|
|
}
|
|
|
|
return values[len(values)-1]
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupHigh(rfa *rollupFuncArg) float64 {
|
|
|
|
values := getCandlestickValues(rfa)
|
2020-02-24 12:24:18 +01:00
|
|
|
max := getFirstValueForCandlestick(rfa)
|
|
|
|
if math.IsNaN(max) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
max = values[0]
|
|
|
|
values = values[1:]
|
2020-02-04 21:42:10 +01:00
|
|
|
}
|
2020-02-24 12:24:18 +01:00
|
|
|
for _, v := range values {
|
2020-02-04 21:42:10 +01:00
|
|
|
if v > max {
|
|
|
|
max = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return max
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupLow(rfa *rollupFuncArg) float64 {
|
|
|
|
values := getCandlestickValues(rfa)
|
2020-02-24 12:24:18 +01:00
|
|
|
min := getFirstValueForCandlestick(rfa)
|
|
|
|
if math.IsNaN(min) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
min = values[0]
|
|
|
|
values = values[1:]
|
2020-02-04 21:42:10 +01:00
|
|
|
}
|
2020-02-24 12:24:18 +01:00
|
|
|
for _, v := range values {
|
2020-02-04 21:42:10 +01:00
|
|
|
if v < min {
|
|
|
|
min = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return min
|
|
|
|
}
|
|
|
|
|
2020-07-17 17:28:19 +02:00
|
|
|
func rollupModeOverTime(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
2020-10-13 10:56:53 +02:00
|
|
|
|
|
|
|
// Copy rfa.values to a.A, since modeNoNaNs modifies a.A contents.
|
2021-09-27 17:55:35 +02:00
|
|
|
a := getFloat64s()
|
2020-10-13 10:56:53 +02:00
|
|
|
a.A = append(a.A[:0], rfa.values...)
|
|
|
|
result := modeNoNaNs(rfa.prevValue, a.A)
|
2021-09-27 17:55:35 +02:00
|
|
|
putFloat64s(a)
|
2020-10-13 10:56:53 +02:00
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
func getFloat64s() *float64s {
|
|
|
|
v := float64sPool.Get()
|
|
|
|
if v == nil {
|
|
|
|
v = &float64s{}
|
|
|
|
}
|
|
|
|
return v.(*float64s)
|
|
|
|
}
|
|
|
|
|
|
|
|
func putFloat64s(a *float64s) {
|
|
|
|
a.A = a.A[:0]
|
|
|
|
float64sPool.Put(a)
|
2020-10-13 10:56:53 +02:00
|
|
|
}
|
|
|
|
|
2021-09-27 17:55:35 +02:00
|
|
|
var float64sPool sync.Pool
|
|
|
|
|
2020-10-13 10:56:53 +02:00
|
|
|
type float64s struct {
|
|
|
|
A []float64
|
2020-07-17 17:28:19 +02:00
|
|
|
}
|
|
|
|
|
2020-05-21 11:05:08 +02:00
|
|
|
func rollupAscentOverTime(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
prevValue = values[0]
|
|
|
|
values = values[1:]
|
|
|
|
}
|
|
|
|
s := float64(0)
|
|
|
|
for _, v := range values {
|
|
|
|
d := v - prevValue
|
|
|
|
if d > 0 {
|
|
|
|
s += d
|
|
|
|
}
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupDescentOverTime(rfa *rollupFuncArg) float64 {
|
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
prevValue := rfa.prevValue
|
|
|
|
if math.IsNaN(prevValue) {
|
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
prevValue = values[0]
|
|
|
|
values = values[1:]
|
|
|
|
}
|
|
|
|
s := float64(0)
|
|
|
|
for _, v := range values {
|
|
|
|
d := prevValue - v
|
|
|
|
if d > 0 {
|
|
|
|
s += d
|
|
|
|
}
|
|
|
|
prevValue = v
|
|
|
|
}
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2020-08-03 20:51:15 +02:00
|
|
|
func rollupZScoreOverTime(rfa *rollupFuncArg) float64 {
|
|
|
|
// See https://about.gitlab.com/blog/2019/07/23/anomaly-detection-using-prometheus/#using-z-score-for-anomaly-detection
|
|
|
|
scrapeInterval := rollupScrapeInterval(rfa)
|
|
|
|
lag := rollupLag(rfa)
|
|
|
|
if math.IsNaN(scrapeInterval) || math.IsNaN(lag) || lag > scrapeInterval {
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
d := rollupLast(rfa) - rollupAvg(rfa)
|
|
|
|
if d == 0 {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
return d / rollupStddev(rfa)
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func rollupFirst(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2020-01-10 23:24:31 +01:00
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
2019-05-22 23:16:55 +02:00
|
|
|
return nan
|
|
|
|
}
|
|
|
|
return values[0]
|
|
|
|
}
|
|
|
|
|
2023-12-21 10:22:19 +01:00
|
|
|
var rollupLast = rollupDefault
|
|
|
|
|
2021-08-13 11:10:00 +02:00
|
|
|
func rollupDefault(rfa *rollupFuncArg) float64 {
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
|
|
|
// Do not take into account rfa.prevValue, since it may lead
|
|
|
|
// to inconsistent results comparing to Prometheus on broken time series
|
|
|
|
// with irregular data points.
|
|
|
|
return nan
|
|
|
|
}
|
|
|
|
// Intentionally do not skip the possible last Prometheus staleness mark.
|
|
|
|
// See https://github.com/VictoriaMetrics/VictoriaMetrics/issues/1526 .
|
|
|
|
return values[len(values)-1]
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
|
|
|
|
func rollupDistinct(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
if len(values) == 0 {
|
2024-02-21 17:47:49 +01:00
|
|
|
return nan
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
|
|
|
m := make(map[float64]struct{})
|
|
|
|
for _, v := range values {
|
|
|
|
m[v] = struct{}{}
|
|
|
|
}
|
|
|
|
return float64(len(m))
|
|
|
|
}
|
|
|
|
|
|
|
|
func rollupIntegrate(rfa *rollupFuncArg) float64 {
|
2019-06-21 22:22:35 +02:00
|
|
|
// There is no need in handling NaNs here, since they must be cleaned up
|
2019-05-22 23:16:55 +02:00
|
|
|
// before calling rollup funcs.
|
|
|
|
values := rfa.values
|
|
|
|
timestamps := rfa.timestamps
|
|
|
|
prevValue := rfa.prevValue
|
2020-09-08 13:35:39 +02:00
|
|
|
prevTimestamp := rfa.currTimestamp - rfa.window
|
2019-05-22 23:16:55 +02:00
|
|
|
if math.IsNaN(prevValue) {
|
2020-09-08 13:35:39 +02:00
|
|
|
if len(values) == 0 {
|
|
|
|
return nan
|
|
|
|
}
|
2019-05-22 23:16:55 +02:00
|
|
|
prevValue = values[0]
|
|
|
|
prevTimestamp = timestamps[0]
|
|
|
|
values = values[1:]
|
|
|
|
timestamps = timestamps[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
var sum float64
|
|
|
|
for i, v := range values {
|
|
|
|
timestamp := timestamps[i]
|
2020-09-08 13:00:47 +02:00
|
|
|
dt := float64(timestamp-prevTimestamp) / 1e3
|
2020-09-08 13:23:36 +02:00
|
|
|
sum += prevValue * dt
|
2019-11-13 20:10:08 +01:00
|
|
|
prevTimestamp = timestamp
|
|
|
|
prevValue = v
|
2019-05-22 23:16:55 +02:00
|
|
|
}
|
2020-09-08 14:18:49 +02:00
|
|
|
dt := float64(rfa.currTimestamp-prevTimestamp) / 1e3
|
2020-09-08 13:35:39 +02:00
|
|
|
sum += prevValue * dt
|
2019-05-22 23:16:55 +02:00
|
|
|
return sum
|
|
|
|
}
|
|
|
|
|
2023-09-01 09:34:16 +02:00
|
|
|
func rollupFake(_ *rollupFuncArg) float64 {
|
2019-05-22 23:16:55 +02:00
|
|
|
logger.Panicf("BUG: rollupFake shouldn't be called")
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func getScalar(arg any, argNum int) ([]float64, error) {
|
2019-05-22 23:16:55 +02:00
|
|
|
ts, ok := arg.([]*timeseries)
|
|
|
|
if !ok {
|
|
|
|
return nil, fmt.Errorf(`unexpected type for arg #%d; got %T; want %T`, argNum+1, arg, ts)
|
|
|
|
}
|
|
|
|
if len(ts) != 1 {
|
|
|
|
return nil, fmt.Errorf(`arg #%d must contain a single timeseries; got %d timeseries`, argNum+1, len(ts))
|
|
|
|
}
|
|
|
|
return ts[0].Values, nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func getIntNumber(arg any, argNum int) (int, error) {
|
2021-11-03 15:02:20 +01:00
|
|
|
v, err := getScalar(arg, argNum)
|
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
n := 0
|
|
|
|
if len(v) > 0 {
|
2022-12-11 07:47:30 +01:00
|
|
|
n = floatToIntBounded(v[0])
|
2021-11-03 15:02:20 +01:00
|
|
|
}
|
|
|
|
return n, nil
|
|
|
|
}
|
|
|
|
|
2019-05-22 23:16:55 +02:00
|
|
|
func getString(tss []*timeseries, argNum int) (string, error) {
|
|
|
|
if len(tss) != 1 {
|
|
|
|
return "", fmt.Errorf(`arg #%d must contain a single timeseries; got %d timeseries`, argNum+1, len(tss))
|
|
|
|
}
|
|
|
|
ts := tss[0]
|
|
|
|
for _, v := range ts.Values {
|
|
|
|
if !math.IsNaN(v) {
|
|
|
|
return "", fmt.Errorf(`arg #%d contains non-string timeseries`, argNum+1)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return string(ts.MetricName.MetricGroup), nil
|
|
|
|
}
|
|
|
|
|
2024-07-10 00:14:15 +02:00
|
|
|
func expectRollupArgsNum(args []any, expectedNum int) error {
|
2019-05-22 23:16:55 +02:00
|
|
|
if len(args) == expectedNum {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return fmt.Errorf(`unexpected number of args; got %d; want %d`, len(args), expectedNum)
|
|
|
|
}
|
|
|
|
|
|
|
|
func getRollupFuncArg() *rollupFuncArg {
|
|
|
|
v := rfaPool.Get()
|
|
|
|
if v == nil {
|
|
|
|
return &rollupFuncArg{}
|
|
|
|
}
|
|
|
|
return v.(*rollupFuncArg)
|
|
|
|
}
|
|
|
|
|
|
|
|
func putRollupFuncArg(rfa *rollupFuncArg) {
|
|
|
|
rfa.reset()
|
|
|
|
rfaPool.Put(rfa)
|
|
|
|
}
|
|
|
|
|
|
|
|
var rfaPool sync.Pool
|