Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test(metrics): add metrics labels test #1971

Merged
merged 1 commit into from
Dec 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/integration/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ delete-cluster:
.PHONY: test
test:
KIND_NODE_IMAGE=$(KIND_NODE_IMAGE) \
go test -timeout 20m -v -count 1 -run=$(TEST_NAME) .
go test -timeout 20m -v -count 1 -run=$(TEST_NAME) . -args $(TEST_ARGS)

.PHONY: gomod-tidy
gomod-tidy:
Expand Down
22 changes: 20 additions & 2 deletions tests/integration/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ The test framework has the following runtime options that can be utilized:
HELM_NO_DEPENDENCY_UPDATE=1 make test
```

## Running specified tests
## Filtering tests

[Testing framework][sig_e2e_testing_harness] that's used in the tests allows filtering
tests by feature names, labels and step names.
Expand All @@ -108,7 +108,9 @@ This might be handy if you'd like to use those abstractions but its consequence
it will run all the code related to setup and/or teardown so e.g. all kind clusters
that were supposed to be created for tests that got filtered out would be created anyway.

Because of that reason we suggest to use `go` related test filtering like so:
### Running specific tests

In order to run specific tests you can use `go` related test filtering like so:

```shell
make test TEST_NAME="Test_Helm_Default"
Expand All @@ -120,7 +122,23 @@ or
go test -v -count 1 -run=Test_Helm_Default .
```

### Running specific features/assessments

In order to run specific features you can use `TEST_ARGS` makefile argument in
which you can specify [e2e framework's flags][sig_e2e_testing_harness_filtering_tests]:
```shell
make test TEST_NAME="Test_Helm_Default_OT_Metadata" TEST_ARGS="--assess '(metrics)'"
```
or
```shell
make test TEST_NAME="Test_Helm_Default_OT_Metadata" TEST_ARGS="--feature '(installation)'"
```
[sig_e2e_testing_harness]: https://github.com/kubernetes-sigs/e2e-framework/blob/main/docs/design/test-harness-framework.md
[sig_e2e_testing_harness_filtering_tests]: https://github.com/kubernetes-sigs/e2e-framework/blob/fee1391aeccdc260069bd5e0b25c6b187c2293c4/docs/design/test-harness-framework.md#filtering-feature-tests
## K8s node images matrix
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ require (
github.com/stretchr/testify v1.7.0
k8s.io/api v0.22.4
k8s.io/apimachinery v0.22.4
k8s.io/klog/v2 v2.9.0
sigs.k8s.io/e2e-framework v0.0.5
)

Expand Down Expand Up @@ -58,7 +59,6 @@ require (
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
k8s.io/client-go v0.22.4 // indirect
k8s.io/klog/v2 v2.9.0 // indirect
k8s.io/kube-openapi v0.0.0-20211109043538-20434351676c // indirect
k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a // indirect
sigs.k8s.io/controller-runtime v0.9.0 // indirect
Expand Down
101 changes: 95 additions & 6 deletions tests/integration/helm_default_installation_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,17 @@ package integration
import (
"context"
"fmt"
"sort"
"testing"
"time"

appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
log "k8s.io/klog/v2"
"sigs.k8s.io/e2e-framework/klient/k8s/resources"
"sigs.k8s.io/e2e-framework/klient/wait"
"sigs.k8s.io/e2e-framework/klient/wait/conditions"
"sigs.k8s.io/e2e-framework/pkg/envconf"
"sigs.k8s.io/e2e-framework/pkg/features"

Expand All @@ -17,14 +23,15 @@ import (

"github.com/SumoLogic/sumologic-kubernetes-collection/tests/integration/internal"
"github.com/SumoLogic/sumologic-kubernetes-collection/tests/integration/internal/ctxopts"
"github.com/SumoLogic/sumologic-kubernetes-collection/tests/integration/internal/receivermock"
"github.com/SumoLogic/sumologic-kubernetes-collection/tests/integration/internal/stepfuncs"
"github.com/SumoLogic/sumologic-kubernetes-collection/tests/integration/internal/strings"
)

func Test_Helm_Default_FluentD_Metadata(t *testing.T) {
const (
tickDuration = time.Second
waitDuration = time.Minute * 2
tickDuration = 3 * time.Second
waitDuration = 3 * time.Minute
)
var (
expectedMetrics = internal.DefaultExpectedMetrics
Expand All @@ -36,7 +43,7 @@ func Test_Helm_Default_FluentD_Metadata(t *testing.T) {
// detail.
releaseName := strings.ReleaseNameFromT(t)

feat := features.New("installation").
featInstall := features.New("installation").
Assess("sumologic secret is created",
func(ctx context.Context, t *testing.T, envConf *envconf.Config) context.Context {
k8s.WaitUntilSecretAvailable(t, ctxopts.KubectlOptions(ctx), "sumologic", 60, tickDuration)
Expand Down Expand Up @@ -166,8 +173,11 @@ func Test_Helm_Default_FluentD_Metadata(t *testing.T) {

require.EqualValues(t, 0, daemonsets[0].Status.NumberUnavailable)
return ctx
}).
Assess("metrics are present", // TODO: extract this out to a separate feature
}).
Feature()

featMetrics := features.New("metrics").
Assess("expected metrics are present",
stepfuncs.WaitUntilExpectedMetricsPresent(
expectedMetrics,
"receiver-mock",
Expand All @@ -177,7 +187,86 @@ func Test_Helm_Default_FluentD_Metadata(t *testing.T) {
tickDuration,
),
).
Assess("expected labels are present",
// TODO: refactor into a step func?
swiatekm marked this conversation as resolved.
Show resolved Hide resolved
func(ctx context.Context, t *testing.T, envConf *envconf.Config) context.Context {
// Get the receiver mock pod as metrics source
res := envConf.Client().Resources(internal.ReceiverMockNamespace)
podList := corev1.PodList{}
require.NoError(t,
wait.For(
conditions.New(res).
ResourceListN(
&podList,
1,
resources.WithLabelSelector("app=receiver-mock"),
),
wait.WithTimeout(waitDuration),
wait.WithInterval(tickDuration),
),
)
rClient, tunnelCloseFunc := receivermock.NewClientWithK8sTunnel(ctx, t)
defer tunnelCloseFunc()

assert.Eventually(t, func() bool {
filters := receivermock.MetadataFilters{
"__name__": "container_memory_working_set_bytes",
"pod": podList.Items[0].Name,
}
metricsSamples, err := rClient.GetMetricsSamples(filters)
if err != nil {
log.ErrorS(err, "failed getting samples from receiver-mock")
return false
}

if len(metricsSamples) == 0 {
log.InfoS("got 0 metrics samples", "filters", filters)
return false
}

sort.Sort(receivermock.MetricsSamplesByTime(metricsSamples))
// For now let's take the newest metric sample only because it will have the most
// accurate labels and the most labels attached (for instance service/deployment
// labels might not be attached at the very first record).
sample := metricsSamples[0]
labels := sample.Labels
expectedLabels := receivermock.Labels{
"_origin": "kubernetes",
"container": "receiver-mock",
// TODO: figure out why is this flaky and sometimes it's not there
// https://github.com/SumoLogic/sumologic-kubernetes-collection/runs/4508796836?check_suite_focus=true
// "deployment": "receiver-mock",
"endpoint": "https-metrics",
"image": "",
"instance": "",
"job": "kubelet",
"metrics_path": "/metrics/cadvisor",
"namespace": "receiver-mock",
"node": "",
"pod_labels_app": "receiver-mock",
"pod_labels_pod-template-hash": "",
"pod_labels_service": "receiver-mock",
"pod": podList.Items[0].Name,
"prometheus_replica": "",
"prometheus_service": "",
"prometheus": "",
// TODO: figure out why is this flaky and sometimes it's not there
// https://github.com/SumoLogic/sumologic-kubernetes-collection/runs/4508796836?check_suite_focus=true
// "replicaset": "",
"service": "receiver-mock",
}

log.V(0).InfoS("sample's labels", "labels", labels)
if !labels.MatchAll(expectedLabels) {
return false
}

return true
}, waitDuration, tickDuration)
return ctx
},
).
Feature()

testenv.Test(t, feat)
testenv.Test(t, featInstall, featMetrics)
}
Loading