2020-05-10 18:58:17 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"math/rand"
|
2020-06-01 12:46:37 +02:00
|
|
|
"os"
|
2021-11-30 00:18:48 +01:00
|
|
|
"strings"
|
2020-05-10 18:58:17 +02:00
|
|
|
"sync"
|
|
|
|
"testing"
|
|
|
|
"time"
|
2020-06-01 12:46:37 +02:00
|
|
|
|
2021-05-25 15:27:22 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/config"
|
2023-10-13 13:54:33 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/datasource"
|
2020-06-01 12:46:37 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/notifier"
|
2021-11-30 00:18:48 +01:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/remotewrite"
|
2023-10-13 13:54:33 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/rule"
|
2022-05-14 11:38:44 +02:00
|
|
|
"github.com/VictoriaMetrics/VictoriaMetrics/app/vmalert/templates"
|
2020-05-10 18:58:17 +02:00
|
|
|
)
|
|
|
|
|
2020-06-01 12:46:37 +02:00
|
|
|
func TestMain(m *testing.M) {
|
2022-05-14 11:38:44 +02:00
|
|
|
if err := templates.Load([]string{"testdata/templates/*good.tmpl"}, true); err != nil {
|
|
|
|
os.Exit(1)
|
|
|
|
}
|
2020-06-01 12:46:37 +02:00
|
|
|
os.Exit(m.Run())
|
|
|
|
}
|
|
|
|
|
2020-09-03 10:04:42 +02:00
|
|
|
// TestManagerEmptyRulesDir tests
|
|
|
|
// successful cases of
|
|
|
|
// starting with empty rules folder
|
|
|
|
func TestManagerEmptyRulesDir(t *testing.T) {
|
2023-10-13 13:54:33 +02:00
|
|
|
m := &manager{groups: make(map[uint64]*rule.Group)}
|
2021-05-25 15:27:22 +02:00
|
|
|
cfg := loadCfg(t, []string{"foo/bar"}, true, true)
|
|
|
|
if err := m.update(context.Background(), cfg, false); err != nil {
|
2022-07-18 11:02:51 +02:00
|
|
|
t.Fatalf("expected to load successfully with empty rules dir; got err instead: %v", err)
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TestManagerUpdateConcurrent supposed to test concurrent
|
|
|
|
// execution of configuration update.
|
|
|
|
// Should be executed with -race flag
|
|
|
|
func TestManagerUpdateConcurrent(t *testing.T) {
|
2020-05-17 16:12:09 +02:00
|
|
|
m := &manager{
|
2023-10-13 13:54:33 +02:00
|
|
|
groups: make(map[uint64]*rule.Group),
|
|
|
|
querierBuilder: &datasource.FakeQuerier{},
|
|
|
|
notifiers: func() []notifier.Notifier { return []notifier.Notifier{¬ifier.FakeNotifier{}} },
|
2020-05-17 16:12:09 +02:00
|
|
|
}
|
2020-05-10 18:58:17 +02:00
|
|
|
paths := []string{
|
2020-06-01 12:46:37 +02:00
|
|
|
"config/testdata/dir/rules0-good.rules",
|
|
|
|
"config/testdata/dir/rules0-bad.rules",
|
|
|
|
"config/testdata/dir/rules1-good.rules",
|
|
|
|
"config/testdata/dir/rules1-bad.rules",
|
2022-05-14 11:38:44 +02:00
|
|
|
"config/testdata/rules/rules0-good.rules",
|
|
|
|
"config/testdata/rules/rules1-good.rules",
|
|
|
|
"config/testdata/rules/rules2-good.rules",
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
2021-06-09 11:20:38 +02:00
|
|
|
evalInterval := *evaluationInterval
|
|
|
|
defer func() { *evaluationInterval = evalInterval }()
|
2020-05-17 16:12:09 +02:00
|
|
|
*evaluationInterval = time.Millisecond
|
2021-05-25 15:27:22 +02:00
|
|
|
cfg := loadCfg(t, []string{paths[0]}, true, true)
|
|
|
|
if err := m.start(context.Background(), cfg); err != nil {
|
2020-05-17 16:12:09 +02:00
|
|
|
t.Fatalf("failed to start: %s", err)
|
|
|
|
}
|
2020-05-10 18:58:17 +02:00
|
|
|
|
2020-05-17 16:12:09 +02:00
|
|
|
const workers = 500
|
|
|
|
const iterations = 10
|
2020-05-10 18:58:17 +02:00
|
|
|
wg := sync.WaitGroup{}
|
2020-05-17 16:12:09 +02:00
|
|
|
wg.Add(workers)
|
|
|
|
for i := 0; i < workers; i++ {
|
2023-01-24 04:25:08 +01:00
|
|
|
go func(n int) {
|
2020-05-10 18:58:17 +02:00
|
|
|
defer wg.Done()
|
2023-01-24 04:25:08 +01:00
|
|
|
r := rand.New(rand.NewSource(int64(n)))
|
2020-05-17 16:12:09 +02:00
|
|
|
for i := 0; i < iterations; i++ {
|
2023-01-24 04:25:08 +01:00
|
|
|
rnd := r.Intn(len(paths))
|
2022-07-22 13:50:41 +02:00
|
|
|
cfg, err := config.Parse([]string{paths[rnd]}, notifier.ValidateTemplates, true)
|
2021-05-25 15:27:22 +02:00
|
|
|
if err != nil { // update can fail and this is expected
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
_ = m.update(context.Background(), cfg, false)
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
2023-01-24 04:25:08 +01:00
|
|
|
}(i)
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
}
|
|
|
|
|
|
|
|
// TestManagerUpdate tests sequential configuration
|
|
|
|
// updates.
|
|
|
|
func TestManagerUpdate(t *testing.T) {
|
2020-06-01 12:46:37 +02:00
|
|
|
const defaultEvalInterval = time.Second * 30
|
|
|
|
currentEvalInterval := *evaluationInterval
|
|
|
|
*evaluationInterval = defaultEvalInterval
|
|
|
|
defer func() {
|
|
|
|
*evaluationInterval = currentEvalInterval
|
|
|
|
}()
|
|
|
|
|
|
|
|
var (
|
2023-10-13 13:54:33 +02:00
|
|
|
VMRows = &rule.AlertingRule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "VMRows",
|
|
|
|
Expr: "vm_rows > 0",
|
|
|
|
For: 10 * time.Second,
|
|
|
|
Labels: map[string]string{
|
|
|
|
"label": "bar",
|
|
|
|
"host": "{{ $labels.instance }}",
|
|
|
|
},
|
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": "{{ $value|humanize }}",
|
|
|
|
"description": "{{$labels}}",
|
|
|
|
},
|
|
|
|
}
|
2023-10-13 13:54:33 +02:00
|
|
|
Conns = &rule.AlertingRule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "Conns",
|
|
|
|
Expr: "sum(vm_tcplistener_conns) by(instance) > 1",
|
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": "Too high connection number for {{$labels.instance}}",
|
|
|
|
"description": "It is {{ $value }} connections for {{$labels.instance}}",
|
|
|
|
},
|
|
|
|
}
|
2023-10-13 13:54:33 +02:00
|
|
|
ExampleAlertAlwaysFiring = &rule.AlertingRule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "ExampleAlertAlwaysFiring",
|
|
|
|
Expr: "sum by(job) (up == 1)",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2020-05-10 18:58:17 +02:00
|
|
|
testCases := []struct {
|
|
|
|
name string
|
|
|
|
initPath string
|
|
|
|
updatePath string
|
2023-10-13 13:54:33 +02:00
|
|
|
want []*rule.Group
|
2020-05-10 18:58:17 +02:00
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "update good rules",
|
2022-05-14 11:38:44 +02:00
|
|
|
initPath: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
updatePath: "config/testdata/dir/rules1-good.rules",
|
2023-10-13 13:54:33 +02:00
|
|
|
want: []*rule.Group{
|
2020-05-10 18:58:17 +02:00
|
|
|
{
|
2020-06-01 12:46:37 +02:00
|
|
|
File: "config/testdata/dir/rules1-good.rules",
|
|
|
|
Name: "duplicatedGroupDiffFiles",
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2020-06-01 12:46:37 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2023-10-13 13:54:33 +02:00
|
|
|
Rules: []rule.Rule{
|
|
|
|
&rule.AlertingRule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "VMRows",
|
|
|
|
Expr: "vm_rows > 0",
|
|
|
|
For: 5 * time.Minute,
|
2021-08-31 13:52:34 +02:00
|
|
|
Labels: map[string]string{"dc": "gcp", "label": "bar"},
|
2020-06-01 12:46:37 +02:00
|
|
|
Annotations: map[string]string{
|
|
|
|
"summary": "{{ $value }}",
|
|
|
|
"description": "{{$labels}}",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "update good rules from 1 to 2 groups",
|
2022-05-14 11:38:44 +02:00
|
|
|
initPath: "config/testdata/dir/rules/rules1-good.rules",
|
|
|
|
updatePath: "config/testdata/rules/rules0-good.rules",
|
2023-10-13 13:54:33 +02:00
|
|
|
want: []*rule.Group{
|
2020-05-10 18:58:17 +02:00
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "groupGorSingleAlert",
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2020-06-01 12:46:37 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2023-10-13 13:54:33 +02:00
|
|
|
Rules: []rule.Rule{VMRows},
|
2020-06-01 12:46:37 +02:00
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2023-10-13 13:54:33 +02:00
|
|
|
Name: "TestGroup",
|
|
|
|
Rules: []rule.Rule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Conns,
|
|
|
|
ExampleAlertAlwaysFiring,
|
2023-09-06 16:29:59 +02:00
|
|
|
},
|
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "update with one bad rule file",
|
2022-05-14 11:38:44 +02:00
|
|
|
initPath: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
updatePath: "config/testdata/dir/rules2-bad.rules",
|
2023-10-13 13:54:33 +02:00
|
|
|
want: []*rule.Group{
|
2020-05-10 18:58:17 +02:00
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
Name: "groupGorSingleAlert",
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2020-06-01 12:46:37 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2023-10-13 13:54:33 +02:00
|
|
|
Rules: []rule.Rule{VMRows},
|
2020-06-01 12:46:37 +02:00
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-06-01 12:46:37 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2021-02-01 14:02:44 +01:00
|
|
|
Name: "TestGroup",
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2023-10-13 13:54:33 +02:00
|
|
|
Rules: []rule.Rule{
|
2020-06-01 12:46:37 +02:00
|
|
|
Conns,
|
|
|
|
ExampleAlertAlwaysFiring,
|
2023-09-06 16:29:59 +02:00
|
|
|
},
|
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
},
|
|
|
|
},
|
2020-09-03 10:04:42 +02:00
|
|
|
{
|
|
|
|
name: "update empty dir rules from 0 to 2 groups",
|
|
|
|
initPath: "config/testdata/empty/*",
|
2022-05-14 11:38:44 +02:00
|
|
|
updatePath: "config/testdata/rules/rules0-good.rules",
|
2023-10-13 13:54:33 +02:00
|
|
|
want: []*rule.Group{
|
2020-09-03 10:04:42 +02:00
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-09-03 10:04:42 +02:00
|
|
|
Name: "groupGorSingleAlert",
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2020-09-03 10:04:42 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2023-10-13 13:54:33 +02:00
|
|
|
Rules: []rule.Rule{VMRows},
|
2020-09-03 10:04:42 +02:00
|
|
|
},
|
|
|
|
{
|
2022-05-14 11:38:44 +02:00
|
|
|
File: "config/testdata/rules/rules0-good.rules",
|
2020-09-03 10:04:42 +02:00
|
|
|
Interval: defaultEvalInterval,
|
2022-07-22 10:44:55 +02:00
|
|
|
Type: config.NewPrometheusType(),
|
2023-10-13 13:54:33 +02:00
|
|
|
Name: "TestGroup",
|
|
|
|
Rules: []rule.Rule{
|
2020-09-03 10:04:42 +02:00
|
|
|
Conns,
|
|
|
|
ExampleAlertAlwaysFiring,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.name, func(t *testing.T) {
|
|
|
|
ctx, cancel := context.WithCancel(context.TODO())
|
2021-11-30 00:18:48 +01:00
|
|
|
m := &manager{
|
2023-10-13 13:54:33 +02:00
|
|
|
groups: make(map[uint64]*rule.Group),
|
|
|
|
querierBuilder: &datasource.FakeQuerier{},
|
|
|
|
notifiers: func() []notifier.Notifier { return []notifier.Notifier{¬ifier.FakeNotifier{}} },
|
2021-11-30 00:18:48 +01:00
|
|
|
}
|
2021-05-25 15:27:22 +02:00
|
|
|
|
|
|
|
cfgInit := loadCfg(t, []string{tc.initPath}, true, true)
|
|
|
|
if err := m.update(ctx, cfgInit, false); err != nil {
|
2020-05-10 18:58:17 +02:00
|
|
|
t.Fatalf("failed to complete initial rules update: %s", err)
|
|
|
|
}
|
|
|
|
|
2022-07-22 13:50:41 +02:00
|
|
|
cfgUpdate, err := config.Parse([]string{tc.updatePath}, notifier.ValidateTemplates, true)
|
2021-05-25 15:27:22 +02:00
|
|
|
if err == nil { // update can fail and that's expected
|
|
|
|
_ = m.update(ctx, cfgUpdate, false)
|
|
|
|
}
|
2020-05-10 18:58:17 +02:00
|
|
|
if len(tc.want) != len(m.groups) {
|
|
|
|
t.Fatalf("\nwant number of groups: %d;\ngot: %d ", len(tc.want), len(m.groups))
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, wantG := range tc.want {
|
|
|
|
gotG, ok := m.groups[wantG.ID()]
|
|
|
|
if !ok {
|
|
|
|
t.Fatalf("expected to have group %q", wantG.Name)
|
|
|
|
}
|
2020-06-01 12:46:37 +02:00
|
|
|
compareGroups(t, wantG, gotG)
|
2020-05-10 18:58:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
cancel()
|
|
|
|
m.close()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-10-13 13:54:33 +02:00
|
|
|
func compareGroups(t *testing.T, a, b *rule.Group) {
|
|
|
|
t.Helper()
|
|
|
|
if a.Name != b.Name {
|
|
|
|
t.Fatalf("expected group name %q; got %q", a.Name, b.Name)
|
|
|
|
}
|
|
|
|
if a.File != b.File {
|
|
|
|
t.Fatalf("expected group %q file name %q; got %q", a.Name, a.File, b.File)
|
|
|
|
}
|
|
|
|
if a.Interval != b.Interval {
|
|
|
|
t.Fatalf("expected group %q interval %v; got %v", a.Name, a.Interval, b.Interval)
|
|
|
|
}
|
|
|
|
if len(a.Rules) != len(b.Rules) {
|
|
|
|
t.Fatalf("expected group %s to have %d rules; got: %d",
|
|
|
|
a.Name, len(a.Rules), len(b.Rules))
|
|
|
|
}
|
|
|
|
for i, r := range a.Rules {
|
|
|
|
got, want := r, b.Rules[i]
|
|
|
|
if a.ID() != b.ID() {
|
|
|
|
t.Fatalf("expected to have rule %q; got %q", want.ID(), got.ID())
|
|
|
|
}
|
|
|
|
if err := rule.CompareRules(t, want, got); err != nil {
|
|
|
|
t.Fatalf("comparison error: %s", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-05-25 15:27:22 +02:00
|
|
|
|
2021-11-30 00:18:48 +01:00
|
|
|
func TestManagerUpdateNegative(t *testing.T) {
|
|
|
|
testCases := []struct {
|
|
|
|
notifiers []notifier.Notifier
|
2023-10-13 13:54:33 +02:00
|
|
|
rw remotewrite.RWClient
|
2021-11-30 00:18:48 +01:00
|
|
|
cfg config.Group
|
|
|
|
expErr string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
nil,
|
|
|
|
nil,
|
2023-09-06 16:29:59 +02:00
|
|
|
config.Group{
|
|
|
|
Name: "Recording rule only",
|
2021-11-30 00:18:48 +01:00
|
|
|
Rules: []config.Rule{
|
|
|
|
{Record: "record", Expr: "max(up)"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"contains recording rules",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
nil,
|
|
|
|
nil,
|
2023-09-06 16:29:59 +02:00
|
|
|
config.Group{
|
|
|
|
Name: "Alerting rule only",
|
2021-11-30 00:18:48 +01:00
|
|
|
Rules: []config.Rule{
|
|
|
|
{Alert: "alert", Expr: "up > 0"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"contains alerting rules",
|
|
|
|
},
|
|
|
|
{
|
2023-10-13 13:54:33 +02:00
|
|
|
[]notifier.Notifier{¬ifier.FakeNotifier{}},
|
2021-11-30 00:18:48 +01:00
|
|
|
nil,
|
2023-09-06 16:29:59 +02:00
|
|
|
config.Group{
|
|
|
|
Name: "Recording and alerting rules",
|
2021-11-30 00:18:48 +01:00
|
|
|
Rules: []config.Rule{
|
|
|
|
{Alert: "alert1", Expr: "up > 0"},
|
|
|
|
{Alert: "alert2", Expr: "up > 0"},
|
|
|
|
{Record: "record", Expr: "max(up)"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"contains recording rules",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
nil,
|
|
|
|
&remotewrite.Client{},
|
2023-09-06 16:29:59 +02:00
|
|
|
config.Group{
|
|
|
|
Name: "Recording and alerting rules",
|
2021-11-30 00:18:48 +01:00
|
|
|
Rules: []config.Rule{
|
|
|
|
{Record: "record1", Expr: "max(up)"},
|
|
|
|
{Record: "record2", Expr: "max(up)"},
|
|
|
|
{Alert: "alert", Expr: "up > 0"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"contains alerting rules",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(tc.cfg.Name, func(t *testing.T) {
|
|
|
|
m := &manager{
|
2023-10-13 13:54:33 +02:00
|
|
|
groups: make(map[uint64]*rule.Group),
|
|
|
|
querierBuilder: &datasource.FakeQuerier{},
|
2021-11-30 00:18:48 +01:00
|
|
|
rw: tc.rw,
|
|
|
|
}
|
2022-02-02 13:11:41 +01:00
|
|
|
if tc.notifiers != nil {
|
|
|
|
m.notifiers = func() []notifier.Notifier { return tc.notifiers }
|
|
|
|
}
|
2021-11-30 00:18:48 +01:00
|
|
|
err := m.update(context.Background(), []config.Group{tc.cfg}, false)
|
|
|
|
if err == nil {
|
|
|
|
t.Fatalf("expected to get error; got nil")
|
|
|
|
}
|
|
|
|
if !strings.Contains(err.Error(), tc.expErr) {
|
|
|
|
t.Fatalf("expected err to contain %q; got %q", tc.expErr, err)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-25 15:27:22 +02:00
|
|
|
func loadCfg(t *testing.T, path []string, validateAnnotations, validateExpressions bool) []config.Group {
|
|
|
|
t.Helper()
|
2022-07-22 13:50:41 +02:00
|
|
|
var validateTplFn config.ValidateTplFn
|
|
|
|
if validateAnnotations {
|
|
|
|
validateTplFn = notifier.ValidateTemplates
|
|
|
|
}
|
|
|
|
cfg, err := config.Parse(path, validateTplFn, validateExpressions)
|
2021-05-25 15:27:22 +02:00
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
return cfg
|
|
|
|
}
|