Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions central/metrics/custom/image_vulnerabilities/tracker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,21 +149,21 @@ func TestQueryDeploymentsAndImages(t *testing.T) {
_ = result.Body.Close()
assert.NoError(t, err)
assert.Equal(t,
`# HELP rox_central_image_vuln_Cluster_Namespace_Severity_count The total number of image vulnerabilities aggregated by Cluster,Namespace,Severity and gathered every 2h1m0s
`# HELP rox_central_image_vuln_Cluster_Namespace_Severity_count The total number of image vulnerabilities aggregated by Cluster, Namespace, Severity, and gathered every 2h1m0s
# TYPE rox_central_image_vuln_Cluster_Namespace_Severity_count gauge
rox_central_image_vuln_Cluster_Namespace_Severity_count{Cluster="cluster-1",Namespace="namespace-1",Severity="CRITICAL_VULNERABILITY_SEVERITY"} 1
rox_central_image_vuln_Cluster_Namespace_Severity_count{Cluster="cluster-1",Namespace="namespace-2",Severity="CRITICAL_VULNERABILITY_SEVERITY"} 2
rox_central_image_vuln_Cluster_Namespace_Severity_count{Cluster="cluster-1",Namespace="namespace-2",Severity="MODERATE_VULNERABILITY_SEVERITY"} 2
rox_central_image_vuln_Cluster_Namespace_Severity_count{Cluster="cluster-2",Namespace="namespace-2",Severity="LOW_VULNERABILITY_SEVERITY"} 2
rox_central_image_vuln_Cluster_Namespace_Severity_count{Cluster="cluster-2",Namespace="namespace-2",Severity="MODERATE_VULNERABILITY_SEVERITY"} 2
# HELP rox_central_image_vuln_Deployment_ImageTag_count The total number of image vulnerabilities aggregated by Deployment,ImageTag and gathered every 2h1m0s
# HELP rox_central_image_vuln_Deployment_ImageTag_count The total number of image vulnerabilities aggregated by Deployment, ImageTag, and gathered every 2h1m0s
# TYPE rox_central_image_vuln_Deployment_ImageTag_count gauge
rox_central_image_vuln_Deployment_ImageTag_count{Deployment="D0",ImageTag="tag"} 1
rox_central_image_vuln_Deployment_ImageTag_count{Deployment="D1",ImageTag="tag"} 3
rox_central_image_vuln_Deployment_ImageTag_count{Deployment="D2",ImageTag="tag"} 1
rox_central_image_vuln_Deployment_ImageTag_count{Deployment="D3",ImageTag="latest"} 2
rox_central_image_vuln_Deployment_ImageTag_count{Deployment="D3",ImageTag="tag"} 2
# HELP rox_central_image_vuln_Severity_count The total number of image vulnerabilities aggregated by Severity and gathered every 2h1m0s
# HELP rox_central_image_vuln_Severity_count The total number of image vulnerabilities aggregated by Severity, and gathered every 2h1m0s
# TYPE rox_central_image_vuln_Severity_count gauge
rox_central_image_vuln_Severity_count{Severity="CRITICAL_VULNERABILITY_SEVERITY"} 3
rox_central_image_vuln_Severity_count{Severity="LOW_VULNERABILITY_SEVERITY"} 2
Expand Down
6 changes: 3 additions & 3 deletions central/metrics/custom/runner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ func TestRunner_ServeHTTP(t *testing.T) {

expectedBody := func(metricName, decription, labels, vector string) string {
metricName = "rox_central_" + metricName
return fmt.Sprintf("# HELP %s The total number of %s aggregated by %s and gathered every 10m0s\n"+
return fmt.Sprintf("# HELP %s The total number of %s aggregated by %s, and gathered every 10m0s\n"+
"# TYPE %s gauge\n%s{%s} 1\n", metricName, decription, labels, metricName, metricName, vector)
}

Expand All @@ -165,11 +165,11 @@ func TestRunner_ServeHTTP(t *testing.T) {
assert.NoError(t, err)
assert.Contains(t, string(body),
expectedBody("image_vuln_metric1", "image vulnerabilities",
"Cluster,Severity",
"Cluster, Severity",
`Cluster="cluster1",Severity="IMPORTANT_VULNERABILITY_SEVERITY"`))
assert.Contains(t, string(body),
expectedBody("policy_violation_metric2", "policy violations",
"Categories,Cluster,Policy",
"Categories, Cluster, Policy",
`Categories="catA,catB",Cluster="cluster1",Policy="Test Policy"`))
})
}
Expand Down
36 changes: 25 additions & 11 deletions central/metrics/custom/tracker/aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,19 @@ type aggregatedRecord struct {
// {"Z": {labels: {L2="Z"}, total: 2}}
// }
type aggregator[F Finding] struct {
result map[MetricName]map[aggregationKey]*aggregatedRecord
md MetricDescriptors
lf LabelFilters
getters LazyLabelGetters[F]
result map[MetricName]map[aggregationKey]*aggregatedRecord
md MetricDescriptors
includeFilters LabelFilters
excludeFilters LabelFilters
getters LazyLabelGetters[F]
}

func makeAggregator[F Finding](md MetricDescriptors, lf LabelFilters, getters LazyLabelGetters[F]) *aggregator[F] {
func makeAggregator[F Finding](md MetricDescriptors, includeFilters, excludeFilters LabelFilters, getters LazyLabelGetters[F]) *aggregator[F] {
result := make(map[MetricName]map[aggregationKey]*aggregatedRecord)
for metric := range md {
result[metric] = make(map[aggregationKey]*aggregatedRecord)
}
return &aggregator[F]{result, md, lf, getters}
return &aggregator[F]{result, md, includeFilters, excludeFilters, getters}
}

// count the finding in the aggregation result.
Expand All @@ -58,9 +59,12 @@ func (a *aggregator[F]) count(finding F) {
}

for metric, labels := range a.md {
// Apply label filters. It could, e.g., keep only "ACTIVE" alerts.
if !a.pass(finding, a.lf[metric]) {
// Ignore this finding for this metric.
// Apply include and exclude filters.
// It could, e.g., keep only "ACTIVE" alerts or drop "LOW_SEVERITY"
// alerts.
if !a.matchAll(finding, a.includeFilters[metric]) ||
a.matchAny(finding, a.excludeFilters[metric]) {
// Drop this finding for this metric.
continue
}

Expand All @@ -73,8 +77,8 @@ func (a *aggregator[F]) count(finding F) {
}
}

// pass checks if the finding labels pass the filters.
func (a *aggregator[F]) pass(finding F, filters map[Label]*regexp.Regexp) bool {
// matchAll returns true if all label values match the according filters.
func (a *aggregator[F]) matchAll(finding F, filters map[Label]*regexp.Regexp) bool {
for label, pattern := range filters {
if !pattern.MatchString(a.getters[label](finding)) {
return false
Expand All @@ -83,6 +87,16 @@ func (a *aggregator[F]) pass(finding F, filters map[Label]*regexp.Regexp) bool {
return true
}

// matchAny returns true if any label value matches the according filters.
func (a *aggregator[F]) matchAny(finding F, filters map[Label]*regexp.Regexp) bool {
for label, pattern := range filters {
if pattern.MatchString(a.getters[label](finding)) {
return true
}
}
return false
}

// reset clears the aggregation result without reallocating the maps.
func (a *aggregator[F]) reset() {
for _, records := range a.result {
Expand Down
111 changes: 103 additions & 8 deletions central/metrics/custom/tracker/aggregator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ func Test_aggregator(t *testing.T) {
"Cluster": func(tf testFinding) string { return testData[tf]["Cluster"] },
"Namespace": func(tf testFinding) string { return testData[tf]["Namespace"] },
}
a := makeAggregator(makeTestMetricDescriptors(t), nil, getters)
a := makeAggregator(makeTestMetricDescriptors(t), nil, nil, getters)
assert.NotNil(t, a)
assert.Equal(t, map[MetricName]map[aggregationKey]*aggregatedRecord{
"test_Test_aggregator_metric1": {},
Expand Down Expand Up @@ -115,12 +115,12 @@ func Test_filter(t *testing.T) {
severityFilter := make(map[Label]*regexp.Regexp)
severityFilter[Label("Severity")] = regexp.MustCompile("^CRITICAL|HIGH$")

lf := make(LabelFilters)
lf[MetricName("test_Test_filter_metric1")] = severityFilter
lf[MetricName("test_Test_filter_metric2")] = clusterFilter
incFilters := make(LabelFilters)
incFilters[MetricName("test_Test_filter_metric1")] = severityFilter
incFilters[MetricName("test_Test_filter_metric2")] = clusterFilter

md := makeTestMetricDescriptors(t)
a := makeAggregator(md, lf, testLabelGetters)
a := makeAggregator(md, incFilters, nil, testLabelGetters)

// Count all test data:
for i := range testData {
Expand Down Expand Up @@ -156,9 +156,104 @@ func Test_filter(t *testing.T) {
}, a.result)
}

func Test_excludeFilter(t *testing.T) {
// Exclude filter to drop LOW severity findings.
severityExclude := make(map[Label]*regexp.Regexp)
severityExclude[Label("Severity")] = regexp.MustCompile("^LOW$")

// Exclude filter to drop cluster 1 findings.
clusterExclude := make(map[Label]*regexp.Regexp)
clusterExclude[Label("Cluster")] = regexp.MustCompile("^cluster 1$")

excFilters := make(LabelFilters)
excFilters[MetricName("test_Test_excludeFilter_metric1")] = severityExclude
excFilters[MetricName("test_Test_excludeFilter_metric2")] = clusterExclude

md := makeTestMetricDescriptors(t)
a := makeAggregator(md, nil, excFilters, testLabelGetters)

// Count all test data.
for i := range testData {
a.count(testFinding(i))
}
assert.Equal(t, map[MetricName]map[aggregationKey]*aggregatedRecord{
// LOW severity findings (indices 2, 4) are excluded.
"test_Test_excludeFilter_metric1": {
"cluster 1|CRITICAL": &aggregatedRecord{
labels: prometheus.Labels{"Cluster": "cluster 1", "Severity": "CRITICAL"},
total: 2,
},
"cluster 2|HIGH": &aggregatedRecord{
labels: prometheus.Labels{"Cluster": "cluster 2", "Severity": "HIGH"},
total: 1,
},
},
// cluster 1 findings (indices 0, 3) are excluded.
"test_Test_excludeFilter_metric2": {
"ns 2": &aggregatedRecord{
labels: prometheus.Labels{"Namespace": "ns 2"},
total: 1,
},
"ns 3": &aggregatedRecord{
labels: prometheus.Labels{"Namespace": "ns 3"},
total: 2,
},
},
}, a.result)
}

func Test_includeAndExcludeFilter(t *testing.T) {
// Include filter to keep only CRITICAL and HIGH severity.
severityInclude := make(map[Label]*regexp.Regexp)
severityInclude[Label("Severity")] = regexp.MustCompile("^CRITICAL|HIGH$")

// Exclude filter to drop cluster 1 findings.
clusterExclude := make(map[Label]*regexp.Regexp)
clusterExclude[Label("Cluster")] = regexp.MustCompile("^cluster 1$")

incFilters := make(LabelFilters)
incFilters[MetricName("test_Test_includeAndExcludeFilter_metric1")] = severityInclude

excFilters := make(LabelFilters)
excFilters[MetricName("test_Test_includeAndExcludeFilter_metric1")] = clusterExclude

md := makeTestMetricDescriptors(t)
a := makeAggregator(md, incFilters, excFilters, testLabelGetters)

// Count all test data.
for i := range testData {
a.count(testFinding(i))
}
assert.Equal(t, map[MetricName]map[aggregationKey]*aggregatedRecord{
// Only CRITICAL/HIGH kept (include), then cluster 1 dropped (exclude).
// This leaves only index 1 (cluster 2, HIGH).
"test_Test_includeAndExcludeFilter_metric1": {
"cluster 2|HIGH": &aggregatedRecord{
labels: prometheus.Labels{"Cluster": "cluster 2", "Severity": "HIGH"},
total: 1,
},
},
// No filters on metric2.
"test_Test_includeAndExcludeFilter_metric2": {
"ns 1": &aggregatedRecord{
labels: prometheus.Labels{"Namespace": "ns 1"},
total: 1,
},
"ns 2": &aggregatedRecord{
labels: prometheus.Labels{"Namespace": "ns 2"},
total: 1,
},
"ns 3": &aggregatedRecord{
labels: prometheus.Labels{"Namespace": "ns 3"},
total: 3,
},
},
}, a.result)
}

func Test_makeAggregationKey(t *testing.T) {
md := makeTestMetricDescriptors(t)
a := makeAggregator(md, nil, testLabelGetters)
a := makeAggregator(md, nil, nil, testLabelGetters)

var metric = MetricName("test_" + t.Name() + "_metric1")
key, labels := a.makeAggregationKey(
Expand Down Expand Up @@ -211,7 +306,7 @@ func TestFinding_GetIncrement(t *testing.T) {
"l1": func(tf *withIncrement) string { return "v1" },
}
a := makeAggregator(
MetricDescriptors{"m1": []Label{"l1"}}, nil,
MetricDescriptors{"m1": []Label{"l1"}}, nil, nil,
getters)
a.count(&f)
f.n = 7
Expand All @@ -222,7 +317,7 @@ func TestFinding_GetIncrement(t *testing.T) {

func Test_aggregator_reset(t *testing.T) {
md := makeTestMetricDescriptors(t)
a := makeAggregator(md, nil, testLabelGetters)
a := makeAggregator(md, nil, nil, testLabelGetters)

for i := range testData {
a.count(testFinding(i))
Expand Down
11 changes: 6 additions & 5 deletions central/metrics/custom/tracker/configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@ func (md MetricDescriptors) diff(another MetricDescriptors) (toAdd []MetricName,
}

type Configuration struct {
metrics MetricDescriptors
filters LabelFilters
toAdd []MetricName
toDelete []MetricName
period time.Duration
metrics MetricDescriptors
includeFilters LabelFilters
excludeFilters LabelFilters
toAdd []MetricName
toDelete []MetricName
period time.Duration
}
2 changes: 1 addition & 1 deletion central/metrics/custom/tracker/testing_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ func makeTestMetricLabels(t *testing.T) map[string]*storage.PrometheusMetrics_Gr
return map[string]*storage.PrometheusMetrics_Group_Labels{
pfx + "_metric1": {Labels: []string{"Cluster", "Severity"}},
pfx + "_metric2": {Labels: []string{"Namespace"},
Filters: map[string]string{"Namespace": "ns.*"}},
IncludeFilters: map[string]string{"Namespace": "ns.*"}},
}
}

Expand Down
Loading
Loading