Skip to content

Commit 13fb69e

Browse files
authored
Revert "Update tests for datastream collector (prometheus-community#791)"
This reverts commit dd710cf.
1 parent ad69829 commit 13fb69e

File tree

3 files changed

+59
-74
lines changed

3 files changed

+59
-74
lines changed

collector/data_stream.go

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ type DataStream struct {
4646
client *http.Client
4747
url *url.URL
4848

49+
up prometheus.Gauge
50+
totalScrapes, jsonParseFailures prometheus.Counter
51+
4952
dataStreamMetrics []*dataStreamMetric
5053
}
5154

@@ -56,6 +59,18 @@ func NewDataStream(logger log.Logger, client *http.Client, url *url.URL) *DataSt
5659
client: client,
5760
url: url,
5861

62+
up: prometheus.NewGauge(prometheus.GaugeOpts{
63+
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "up"),
64+
Help: "Was the last scrape of the ElasticSearch Data Stream stats endpoint successful.",
65+
}),
66+
totalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
67+
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "total_scrapes"),
68+
Help: "Current total ElasticSearch Data STream scrapes.",
69+
}),
70+
jsonParseFailures: prometheus.NewCounter(prometheus.CounterOpts{
71+
Name: prometheus.BuildFQName(namespace, "data_stream_stats", "json_parse_failures"),
72+
Help: "Number of errors while parsing JSON.",
73+
}),
5974
dataStreamMetrics: []*dataStreamMetric{
6075
{
6176
Type: prometheus.CounterValue,
@@ -90,6 +105,10 @@ func (ds *DataStream) Describe(ch chan<- *prometheus.Desc) {
90105
for _, metric := range ds.dataStreamMetrics {
91106
ch <- metric.Desc
92107
}
108+
109+
ch <- ds.up.Desc()
110+
ch <- ds.totalScrapes.Desc()
111+
ch <- ds.jsonParseFailures.Desc()
93112
}
94113

95114
func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse, error) {
@@ -119,10 +138,12 @@ func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse,
119138

120139
bts, err := io.ReadAll(res.Body)
121140
if err != nil {
141+
ds.jsonParseFailures.Inc()
122142
return dsr, err
123143
}
124144

125145
if err := json.Unmarshal(bts, &dsr); err != nil {
146+
ds.jsonParseFailures.Inc()
126147
return dsr, err
127148
}
128149

@@ -131,16 +152,25 @@ func (ds *DataStream) fetchAndDecodeDataStreamStats() (DataStreamStatsResponse,
131152

132153
// Collect gets DataStream metric values
133154
func (ds *DataStream) Collect(ch chan<- prometheus.Metric) {
155+
ds.totalScrapes.Inc()
156+
defer func() {
157+
ch <- ds.up
158+
ch <- ds.totalScrapes
159+
ch <- ds.jsonParseFailures
160+
}()
134161

135162
dataStreamStatsResp, err := ds.fetchAndDecodeDataStreamStats()
136163
if err != nil {
164+
ds.up.Set(0)
137165
level.Warn(ds.logger).Log(
138166
"msg", "failed to fetch and decode data stream stats",
139167
"err", err,
140168
)
141169
return
142170
}
143171

172+
ds.up.Set(1)
173+
144174
for _, metric := range ds.dataStreamMetrics {
145175
for _, dataStream := range dataStreamStatsResp.DataStreamStats {
146176
fmt.Printf("Metric: %+v", dataStream)

collector/data_stream_test.go

Lines changed: 29 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -14,65 +14,44 @@
1414
package collector
1515

1616
import (
17-
"io"
17+
"fmt"
1818
"net/http"
1919
"net/http/httptest"
2020
"net/url"
21-
"os"
22-
"strings"
2321
"testing"
2422

2523
"github.com/go-kit/log"
26-
"github.com/prometheus/client_golang/prometheus/testutil"
2724
)
2825

2926
func TestDataStream(t *testing.T) {
30-
31-
tests := []struct {
32-
name string
33-
file string
34-
want string
35-
}{
36-
{
37-
name: "7.15.0",
38-
file: "../fixtures/datastream/7.15.0.json",
39-
want: `# HELP elasticsearch_data_stream_backing_indices_total Number of backing indices
40-
# TYPE elasticsearch_data_stream_backing_indices_total counter
41-
elasticsearch_data_stream_backing_indices_total{data_stream="bar"} 2
42-
elasticsearch_data_stream_backing_indices_total{data_stream="foo"} 5
43-
# HELP elasticsearch_data_stream_store_size_bytes Store size of data stream
44-
# TYPE elasticsearch_data_stream_store_size_bytes counter
45-
elasticsearch_data_stream_store_size_bytes{data_stream="bar"} 6.7382272e+08
46-
elasticsearch_data_stream_store_size_bytes{data_stream="foo"} 4.29205396e+08
47-
`,
48-
},
27+
tcs := map[string]string{
28+
"7.15.0": `{"_shards":{"total":30,"successful":30,"failed":0},"data_stream_count":2,"backing_indices":7,"total_store_size_bytes":1103028116,"data_streams":[{"data_stream":"foo","backing_indices":5,"store_size_bytes":429205396,"maximum_timestamp":1656079894000},{"data_stream":"bar","backing_indices":2,"store_size_bytes":673822720,"maximum_timestamp":1656028796000}]}`,
4929
}
50-
for _, tt := range tests {
51-
t.Run(tt.name, func(t *testing.T) {
52-
f, err := os.Open(tt.file)
53-
if err != nil {
54-
t.Fatal(err)
55-
}
56-
defer f.Close()
57-
58-
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
59-
io.Copy(w, f)
60-
}))
61-
defer ts.Close()
62-
63-
u, err := url.Parse(ts.URL)
64-
if err != nil {
65-
t.Fatal(err)
66-
}
67-
68-
c := NewDataStream(log.NewNopLogger(), http.DefaultClient, u)
69-
if err != nil {
70-
t.Fatal(err)
71-
}
72-
73-
if err := testutil.CollectAndCompare(c, strings.NewReader(tt.want)); err != nil {
74-
t.Fatal(err)
75-
}
76-
})
30+
for ver, out := range tcs {
31+
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
32+
fmt.Fprintln(w, out)
33+
}))
34+
defer ts.Close()
35+
36+
u, err := url.Parse(ts.URL)
37+
if err != nil {
38+
t.Fatalf("Failed to parse URL: %s", err)
39+
}
40+
s := NewDataStream(log.NewNopLogger(), http.DefaultClient, u)
41+
stats, err := s.fetchAndDecodeDataStreamStats()
42+
if err != nil {
43+
t.Fatalf("Failed to fetch or decode data stream stats: %s", err)
44+
}
45+
t.Logf("[%s] Data Stream Response: %+v", ver, stats)
46+
dataStreamStats := stats.DataStreamStats[0]
47+
48+
if dataStreamStats.BackingIndices != 5 {
49+
t.Errorf("Bad number of backing indices")
50+
}
51+
52+
if dataStreamStats.StoreSizeBytes != 429205396 {
53+
t.Errorf("Bad store size bytes valuee")
54+
}
7755
}
56+
7857
}

fixtures/datastream/7.15.0.json

Lines changed: 0 additions & 24 deletions
This file was deleted.

0 commit comments

Comments
 (0)