From e31f701c6831655f6b96c8a7dcccd32cfab6c098 Mon Sep 17 00:00:00 2001 From: Arve Knudsen Date: Wed, 17 Jan 2024 18:28:06 +0100 Subject: [PATCH] OTLP: Categorize target_info labels Signed-off-by: Arve Knudsen --- cmd/prometheus/main.go | 4 + cmd/promtool/tsdb.go | 15 + model/labels/labels_test.go | 38 +- prompb/types.pb.go | 314 +++++--- prompb/types.proto | 10 +- promql/engine.go | 28 +- promql/functions.go | 2 + promql/value.go | 170 ++-- scrape/helpers_test.go | 62 +- scrape/target.go | 2 + storage/buffer.go | 143 +++- storage/buffer_test.go | 8 + storage/fanout.go | 14 + storage/interface.go | 3 + storage/memoized_iterator.go | 27 +- storage/memoized_iterator_test.go | 58 +- storage/merge.go | 7 + storage/merge_test.go | 4 + storage/remote/codec.go | 83 +- .../prometheusremotewrite/helper.go | 15 +- storage/remote/queue_manager.go | 90 ++- storage/remote/write.go | 17 + storage/remote/write_handler.go | 12 +- storage/remote/write_handler_test.go | 17 + storage/series.go | 36 +- tsdb/agent/db.go | 49 ++ tsdb/block_test.go | 14 +- tsdb/blockwriter.go | 2 +- tsdb/chunkenc/bstream.go | 6 + tsdb/chunkenc/chunk.go | 95 ++- tsdb/chunkenc/float_histogram.go | 14 + tsdb/chunkenc/histogram.go | 14 + tsdb/chunkenc/infometric.go | 324 ++++++++ tsdb/chunkenc/varbit.go | 4 +- tsdb/chunkenc/xor.go | 13 +- tsdb/chunks/chunks.go | 11 +- tsdb/chunks/head_chunks.go | 16 +- tsdb/chunks/samples.go | 16 +- tsdb/compact.go | 12 +- tsdb/db.go | 4 +- tsdb/db_test.go | 58 +- tsdb/docs/format/head_chunks.md | 8 +- tsdb/docs/format/index.md | 2 +- tsdb/docs/usage.md | 4 +- tsdb/exemplar.go | 2 +- tsdb/head.go | 19 +- tsdb/head_append.go | 272 ++++++- tsdb/head_read_test.go | 60 +- tsdb/head_test.go | 28 +- tsdb/head_wal.go | 128 ++- tsdb/index/index.go | 4 +- tsdb/ooo_head.go | 6 +- tsdb/ooo_head_test.go | 2 +- tsdb/querier.go | 32 +- tsdb/querier_test.go | 750 +++++++++--------- tsdb/record/record.go | 107 ++- tsdb/tsdbblockutil.go | 3 + tsdb/wlog/checkpoint.go | 23 +- tsdb/wlog/watcher.go | 36 + tsdb/wlog/watcher_test.go | 7 + web/api/v1/json_codec.go | 2 + web/federate.go | 18 +- 62 files changed, 2537 insertions(+), 807 deletions(-) create mode 100644 tsdb/chunkenc/infometric.go diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index f64c00e824..982d25f14b 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -1527,6 +1527,10 @@ func (n notReadyAppender) Append(ref storage.SeriesRef, l labels.Labels, t int64 return 0, tsdb.ErrNotReady } +func (n notReadyAppender) AppendInfoSample(storage.SeriesRef, labels.Labels, int64, []int) (storage.SeriesRef, error) { + return 0, tsdb.ErrNotReady +} + func (n notReadyAppender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) { return 0, tsdb.ErrNotReady } diff --git a/cmd/promtool/tsdb.go b/cmd/promtool/tsdb.go index 73258754e2..358287dd88 100644 --- a/cmd/promtool/tsdb.go +++ b/cmd/promtool/tsdb.go @@ -636,6 +636,8 @@ func analyzeCompaction(ctx context.Context, block tsdb.BlockReader, indexr tsdb. histogramChunkSamplesCount := make([]int, 0) histogramChunkSize := make([]int, 0) histogramChunkBucketsCount := make([]int, 0) + infoChunkSamplesCount := make([]int, 0) + infoChunkSize := make([]int, 0) var builder labels.ScratchBuilder for postingsr.Next() { var chks []chunks.Meta @@ -688,6 +690,11 @@ func analyzeCompaction(ctx context.Context, block tsdb.BlockReader, indexr tsdb. bucketCount += len(f.NegativeBuckets) } histogramChunkBucketsCount = append(histogramChunkBucketsCount, bucketCount) + case chunkenc.EncInfoMetric: + infoChunkSamplesCount = append(infoChunkSamplesCount, chk.NumSamples()) + infoChunkSize = append(infoChunkSize, len(chk.Bytes())) + default: + return fmt.Errorf("unrecognized encoding: %s", chk.Encoding()) } totalChunks++ } @@ -704,6 +711,10 @@ func analyzeCompaction(ctx context.Context, block tsdb.BlockReader, indexr tsdb. displayHistogram("bytes per histogram chunk", histogramChunkSize, totalChunks) displayHistogram("buckets per histogram chunk", histogramChunkBucketsCount, totalChunks) + + displayHistogram("samples per info metric chunk", infoChunkSamplesCount, totalChunks) + displayHistogram("bytes per info metric chunk", infoChunkSize, totalChunks) + return nil } @@ -771,6 +782,10 @@ func formatSeriesSet(ss storage.SeriesSet) error { ts, h := it.AtHistogram(nil) fmt.Printf("%s %s %d\n", lbs, h.String(), ts) } + for it.Next() == chunkenc.ValInfoSample { + ts, ils := it.AtInfoSample() + fmt.Printf("%s %v %d\n", lbs, ils, ts) + } if it.Err() != nil { return ss.Err() } diff --git a/model/labels/labels_test.go b/model/labels/labels_test.go index 49b4b4e67b..f32099f42d 100644 --- a/model/labels/labels_test.go +++ b/model/labels/labels_test.go @@ -604,24 +604,24 @@ func TestBuilder(t *testing.T) { want: FromStrings("aaa", "111", "ccc", "333"), }, { - set: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}}, + set: []Label{{Name: "aaa", Value: "111"}, {Name: "bbb", Value: "222"}, {Name: "ccc", Value: "333"}}, del: []string{"bbb"}, want: FromStrings("aaa", "111", "ccc", "333"), }, { base: FromStrings("aaa", "111"), - set: []Label{{"bbb", "222"}}, + set: []Label{{Name: "bbb", Value: "222"}}, want: FromStrings("aaa", "111", "bbb", "222"), }, { base: FromStrings("aaa", "111"), - set: []Label{{"bbb", "222"}, {"bbb", "333"}}, + set: []Label{{Name: "bbb", Value: "222"}, {Name: "bbb", Value: "333"}}, want: FromStrings("aaa", "111", "bbb", "333"), }, { base: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"), del: []string{"bbb"}, - set: []Label{{"ddd", "444"}}, + set: []Label{{Name: "ddd", Value: "444"}}, want: FromStrings("aaa", "111", "ccc", "333", "ddd", "444"), }, { // Blank value is interpreted as delete. @@ -630,7 +630,7 @@ func TestBuilder(t *testing.T) { }, { base: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"), - set: []Label{{"bbb", ""}}, + set: []Label{{Name: "bbb", Value: ""}}, want: FromStrings("aaa", "111", "ccc", "333"), }, { @@ -646,7 +646,7 @@ func TestBuilder(t *testing.T) { { base: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"), del: []string{"bbb"}, - set: []Label{{"ddd", "444"}}, + set: []Label{{Name: "ddd", Value: "444"}}, keep: []string{"aaa", "ddd"}, want: FromStrings("aaa", "111", "ddd", "444"), }, @@ -701,19 +701,19 @@ func TestScratchBuilder(t *testing.T) { want: EmptyLabels(), }, { - add: []Label{{"aaa", "111"}}, + add: []Label{{Name: "aaa", Value: "111"}}, want: FromStrings("aaa", "111"), }, { - add: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}}, + add: []Label{{Name: "aaa", Value: "111"}, {Name: "bbb", Value: "222"}, {Name: "ccc", Value: "333"}}, want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"), }, { - add: []Label{{"bbb", "222"}, {"aaa", "111"}, {"ccc", "333"}}, + add: []Label{{Name: "bbb", Value: "222"}, {Name: "aaa", Value: "111"}, {Name: "ccc", Value: "333"}}, want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"), }, { - add: []Label{{"ddd", "444"}}, + add: []Label{{Name: "ddd", Value: "444"}}, want: FromStrings("ddd", "444"), }, } { @@ -793,15 +793,15 @@ func BenchmarkLabels_Hash(b *testing.B) { } var benchmarkLabels = []Label{ - {"job", "node"}, - {"instance", "123.123.1.211:9090"}, - {"path", "/api/v1/namespaces//deployments/"}, - {"method", "GET"}, - {"namespace", "system"}, - {"status", "500"}, - {"prometheus", "prometheus-core-1"}, - {"datacenter", "eu-west-1"}, - {"pod_name", "abcdef-99999-defee"}, + {Name: "job", Value: "node"}, + {Name: "instance", Value: "123.123.1.211:9090"}, + {Name: "path", Value: "/api/v1/namespaces//deployments/"}, + {Name: "method", Value: "GET"}, + {Name: "namespace", Value: "system"}, + {Name: "status", Value: "500"}, + {Name: "prometheus", Value: "prometheus-core-1"}, + {Name: "datacenter", Value: "eu-west-1"}, + {Name: "pod_name", Value: "abcdef-99999-defee"}, } func BenchmarkBuilder(b *testing.B) { diff --git a/prompb/types.pb.go b/prompb/types.pb.go index 93883daa13..d7b70ec67f 100644 --- a/prompb/types.pb.go +++ b/prompb/types.pb.go @@ -239,7 +239,9 @@ type Sample struct { Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` // timestamp is in ms format, see model/timestamp/timestamp.go for // conversion from time.Time to Prometheus timestamp. - Timestamp int64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Timestamp int64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // The set of identifying labels for info metrics, as array indices. + IdentifyingLabels []int32 `protobuf:"varint,3,rep,packed,name=identifyingLabels,proto3" json:"identifyingLabels,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -292,6 +294,13 @@ func (m *Sample) GetTimestamp() int64 { return 0 } +func (m *Sample) GetIdentifyingLabels() []int32 { + if m != nil { + return m.IdentifyingLabels + } + return nil +} + type Exemplar struct { // Optional, can be empty. Labels []Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels"` @@ -1146,76 +1155,77 @@ func init() { func init() { proto.RegisterFile("types.proto", fileDescriptor_d938547f84707355) } var fileDescriptor_d938547f84707355 = []byte{ - // 1092 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdb, 0x6e, 0xdb, 0x46, - 0x13, 0x36, 0x49, 0x89, 0x12, 0x47, 0x87, 0xd0, 0xfb, 0x3b, 0xf9, 0x59, 0xa3, 0x71, 0x54, 0x02, - 0x69, 0x85, 0xa2, 0x90, 0x11, 0xb7, 0x17, 0x0d, 0x1a, 0x14, 0xb0, 0x1d, 0xf9, 0x80, 0x5a, 0x12, - 0xb2, 0x92, 0xd1, 0xa6, 0x37, 0xc2, 0x5a, 0x5a, 0x4b, 0x44, 0xc4, 0x43, 0xb9, 0xab, 0xc0, 0xea, - 0x7b, 0xf4, 0xae, 0x2f, 0xd1, 0xb7, 0x08, 0xd0, 0x9b, 0xf6, 0x05, 0x8a, 0xc2, 0x57, 0x7d, 0x8c, - 0x62, 0x87, 0xa4, 0x48, 0xc5, 0x29, 0xd0, 0xf4, 0x6e, 0xe7, 0x9b, 0x6f, 0x76, 0x3e, 0xee, 0xce, - 0xcc, 0x12, 0x6a, 0x72, 0x15, 0x71, 0xd1, 0x89, 0xe2, 0x50, 0x86, 0x04, 0xa2, 0x38, 0xf4, 0xb9, - 0x9c, 0xf3, 0xa5, 0xd8, 0xdd, 0x99, 0x85, 0xb3, 0x10, 0xe1, 0x7d, 0xb5, 0x4a, 0x18, 0xee, 0xcf, - 0x3a, 0x34, 0x7b, 0x5c, 0xc6, 0xde, 0xa4, 0xc7, 0x25, 0x9b, 0x32, 0xc9, 0xc8, 0x53, 0x28, 0xa9, - 0x3d, 0x1c, 0xad, 0xa5, 0xb5, 0x9b, 0x07, 0x8f, 0x3b, 0xf9, 0x1e, 0x9d, 0x4d, 0x66, 0x6a, 0x8e, - 0x56, 0x11, 0xa7, 0x18, 0x42, 0x3e, 0x03, 0xe2, 0x23, 0x36, 0xbe, 0x66, 0xbe, 0xb7, 0x58, 0x8d, - 0x03, 0xe6, 0x73, 0x47, 0x6f, 0x69, 0x6d, 0x8b, 0xda, 0x89, 0xe7, 0x04, 0x1d, 0x7d, 0xe6, 0x73, - 0x42, 0xa0, 0x34, 0xe7, 0x8b, 0xc8, 0x29, 0xa1, 0x1f, 0xd7, 0x0a, 0x5b, 0x06, 0x9e, 0x74, 0xca, - 0x09, 0xa6, 0xd6, 0xee, 0x0a, 0x20, 0xcf, 0x44, 0x6a, 0x50, 0xb9, 0xec, 0x7f, 0xd3, 0x1f, 0x7c, - 0xdb, 0xb7, 0xb7, 0x94, 0x71, 0x3c, 0xb8, 0xec, 0x8f, 0xba, 0xd4, 0xd6, 0x88, 0x05, 0xe5, 0xd3, - 0xc3, 0xcb, 0xd3, 0xae, 0xad, 0x93, 0x06, 0x58, 0x67, 0xe7, 0xc3, 0xd1, 0xe0, 0x94, 0x1e, 0xf6, - 0x6c, 0x83, 0x10, 0x68, 0xa2, 0x27, 0xc7, 0x4a, 0x2a, 0x74, 0x78, 0xd9, 0xeb, 0x1d, 0xd2, 0x97, - 0x76, 0x99, 0x54, 0xa1, 0x74, 0xde, 0x3f, 0x19, 0xd8, 0x26, 0xa9, 0x43, 0x75, 0x38, 0x3a, 0x1c, - 0x75, 0x87, 0xdd, 0x91, 0x5d, 0x71, 0x9f, 0x81, 0x39, 0x64, 0x7e, 0xb4, 0xe0, 0x64, 0x07, 0xca, - 0xaf, 0xd9, 0x62, 0x99, 0x1c, 0x8b, 0x46, 0x13, 0x83, 0x7c, 0x08, 0x96, 0xf4, 0x7c, 0x2e, 0x24, - 0xf3, 0x23, 0xfc, 0x4e, 0x83, 0xe6, 0x80, 0x1b, 0x42, 0xb5, 0x7b, 0xc3, 0xfd, 0x68, 0xc1, 0x62, - 0xb2, 0x0f, 0xe6, 0x82, 0x5d, 0xf1, 0x85, 0x70, 0xb4, 0x96, 0xd1, 0xae, 0x1d, 0x6c, 0x17, 0xcf, - 0xf5, 0x42, 0x79, 0x8e, 0x4a, 0x6f, 0xfe, 0x78, 0xb4, 0x45, 0x53, 0x5a, 0x9e, 0x50, 0xff, 0xc7, - 0x84, 0xc6, 0xdb, 0x09, 0x7f, 0x2d, 0x83, 0x75, 0xe6, 0x09, 0x19, 0xce, 0x62, 0xe6, 0x93, 0x87, - 0x60, 0x4d, 0xc2, 0x65, 0x20, 0xc7, 0x5e, 0x20, 0x51, 0x76, 0xe9, 0x6c, 0x8b, 0x56, 0x11, 0x3a, - 0x0f, 0x24, 0xf9, 0x08, 0x6a, 0x89, 0xfb, 0x7a, 0x11, 0x32, 0x99, 0xa4, 0x39, 0xdb, 0xa2, 0x80, - 0xe0, 0x89, 0xc2, 0x88, 0x0d, 0x86, 0x58, 0xfa, 0x98, 0x47, 0xa3, 0x6a, 0x49, 0x1e, 0x80, 0x29, - 0x26, 0x73, 0xee, 0x33, 0xbc, 0xb5, 0x6d, 0x9a, 0x5a, 0xe4, 0x31, 0x34, 0x7f, 0xe4, 0x71, 0x38, - 0x96, 0xf3, 0x98, 0x8b, 0x79, 0xb8, 0x98, 0xe2, 0x0d, 0x6a, 0xb4, 0xa1, 0xd0, 0x51, 0x06, 0x92, - 0x8f, 0x53, 0x5a, 0xae, 0xcb, 0x44, 0x5d, 0x1a, 0xad, 0x2b, 0xfc, 0x38, 0xd3, 0xf6, 0x29, 0xd8, - 0x05, 0x5e, 0x22, 0xb0, 0x82, 0x02, 0x35, 0xda, 0x5c, 0x33, 0x13, 0x91, 0xc7, 0xd0, 0x0c, 0xf8, - 0x8c, 0x49, 0xef, 0x35, 0x1f, 0x8b, 0x88, 0x05, 0xc2, 0xa9, 0xe2, 0x09, 0x3f, 0x28, 0x9e, 0xf0, - 0xd1, 0x72, 0xf2, 0x8a, 0xcb, 0x61, 0xc4, 0x82, 0xf4, 0x98, 0x1b, 0x59, 0x8c, 0xc2, 0x04, 0xf9, - 0x04, 0xee, 0xad, 0x37, 0x99, 0xf2, 0x85, 0x64, 0xc2, 0xb1, 0x5a, 0x46, 0x9b, 0xd0, 0xf5, 0xde, - 0xcf, 0x11, 0xdd, 0x20, 0xa2, 0x3a, 0xe1, 0x40, 0xcb, 0x68, 0x6b, 0x39, 0x11, 0xa5, 0x09, 0x25, - 0x2b, 0x0a, 0x85, 0x57, 0x90, 0x55, 0xfb, 0x37, 0xb2, 0xb2, 0x98, 0xb5, 0xac, 0xf5, 0x26, 0xa9, - 0xac, 0x7a, 0x22, 0x2b, 0x83, 0x73, 0x59, 0x6b, 0x62, 0x2a, 0xab, 0x91, 0xc8, 0xca, 0xe0, 0x54, - 0xd6, 0xd7, 0x00, 0x31, 0x17, 0x5c, 0x8e, 0xe7, 0xea, 0xf4, 0x9b, 0xd8, 0xe3, 0x8f, 0x8a, 0x92, - 0xd6, 0xf5, 0xd3, 0xa1, 0x8a, 0x77, 0xe6, 0x05, 0x92, 0x5a, 0x71, 0xb6, 0xdc, 0x2c, 0xc0, 0x7b, - 0x6f, 0x17, 0xe0, 0x17, 0x60, 0xad, 0xa3, 0x36, 0x3b, 0xb5, 0x02, 0xc6, 0xcb, 0xee, 0xd0, 0xd6, - 0x88, 0x09, 0x7a, 0x7f, 0x60, 0xeb, 0x79, 0xb7, 0x1a, 0x47, 0x15, 0x28, 0xa3, 0xe6, 0xa3, 0x3a, - 0x40, 0x7e, 0xed, 0xee, 0x33, 0x80, 0xfc, 0x7c, 0x54, 0xe5, 0x85, 0xd7, 0xd7, 0x82, 0x27, 0xa5, - 0xbc, 0x4d, 0x53, 0x4b, 0xe1, 0x0b, 0x1e, 0xcc, 0xe4, 0x1c, 0x2b, 0xb8, 0x41, 0x53, 0xcb, 0xfd, - 0x4b, 0x03, 0x18, 0x79, 0x3e, 0x1f, 0xf2, 0xd8, 0xe3, 0xe2, 0xfd, 0xfb, 0xef, 0x00, 0x2a, 0x02, - 0x5b, 0x5f, 0x38, 0x3a, 0x46, 0x90, 0x62, 0x44, 0x32, 0x15, 0xd2, 0x90, 0x8c, 0x48, 0xbe, 0x04, - 0x8b, 0xa7, 0x0d, 0x2f, 0x1c, 0x03, 0xa3, 0x76, 0x8a, 0x51, 0xd9, 0x34, 0x48, 0xe3, 0x72, 0x32, - 0xf9, 0x0a, 0x60, 0x9e, 0x1d, 0xbc, 0x70, 0x4a, 0x18, 0x7a, 0xff, 0x9d, 0xd7, 0x92, 0xc6, 0x16, - 0xe8, 0xee, 0x13, 0x28, 0xe3, 0x17, 0xa8, 0xe9, 0x89, 0x13, 0x57, 0x4b, 0xa6, 0xa7, 0x5a, 0x6f, - 0xce, 0x11, 0x2b, 0x9d, 0x23, 0xee, 0x53, 0x30, 0x2f, 0x92, 0xef, 0x7c, 0xdf, 0x83, 0x71, 0x7f, - 0xd2, 0xa0, 0x8e, 0x78, 0x8f, 0xc9, 0xc9, 0x9c, 0xc7, 0xe4, 0xc9, 0xc6, 0x83, 0xf1, 0xf0, 0x4e, - 0x7c, 0xca, 0xeb, 0x14, 0x1e, 0x8a, 0x4c, 0xa8, 0xfe, 0x2e, 0xa1, 0x46, 0x51, 0x68, 0x1b, 0x4a, - 0x38, 0xf6, 0x4d, 0xd0, 0xbb, 0x2f, 0x92, 0x3a, 0xea, 0x77, 0x5f, 0x24, 0x75, 0x44, 0xd5, 0xa8, - 0x57, 0x00, 0xed, 0xda, 0x86, 0xfb, 0x8b, 0xa6, 0x8a, 0x8f, 0x4d, 0x55, 0xed, 0x09, 0xf2, 0x7f, - 0xa8, 0x08, 0xc9, 0xa3, 0xb1, 0x2f, 0x50, 0x97, 0x41, 0x4d, 0x65, 0xf6, 0x84, 0x4a, 0x7d, 0xbd, - 0x0c, 0x26, 0x59, 0x6a, 0xb5, 0x26, 0x1f, 0x40, 0x55, 0x48, 0x16, 0x4b, 0xc5, 0x4e, 0x86, 0x6a, - 0x05, 0xed, 0x9e, 0x20, 0xf7, 0xc1, 0xe4, 0xc1, 0x74, 0x8c, 0x97, 0xa2, 0x1c, 0x65, 0x1e, 0x4c, - 0x7b, 0x82, 0xec, 0x42, 0x75, 0x16, 0x87, 0xcb, 0xc8, 0x0b, 0x66, 0x4e, 0xb9, 0x65, 0xb4, 0x2d, - 0xba, 0xb6, 0x49, 0x13, 0xf4, 0xab, 0x15, 0x0e, 0xb6, 0x2a, 0xd5, 0xaf, 0x56, 0x6a, 0xf7, 0x98, - 0x05, 0x33, 0xae, 0x36, 0xa9, 0x24, 0xbb, 0xa3, 0xdd, 0x13, 0xee, 0xef, 0x1a, 0x94, 0x8f, 0xe7, - 0xcb, 0xe0, 0x15, 0xd9, 0x83, 0x9a, 0xef, 0x05, 0x63, 0xd5, 0x4a, 0xb9, 0x66, 0xcb, 0xf7, 0x02, - 0x55, 0xc3, 0x3d, 0x81, 0x7e, 0x76, 0xb3, 0xf6, 0xa7, 0x6f, 0x8d, 0xcf, 0x6e, 0x52, 0x7f, 0x27, - 0xbd, 0x04, 0x03, 0x2f, 0x61, 0xb7, 0x78, 0x09, 0x98, 0xa0, 0xd3, 0x0d, 0x26, 0xe1, 0xd4, 0x0b, - 0x66, 0xf9, 0x0d, 0xa8, 0x37, 0x1c, 0xbf, 0xaa, 0x4e, 0x71, 0xed, 0x3e, 0x87, 0x6a, 0xc6, 0xba, - 0xd3, 0xbc, 0xdf, 0x0d, 0xd4, 0x13, 0xbb, 0xf1, 0xae, 0xea, 0xe4, 0x7f, 0x70, 0xef, 0xe4, 0x62, - 0x70, 0x38, 0x1a, 0x17, 0x1e, 0x5b, 0xf7, 0x07, 0x68, 0x60, 0x46, 0x3e, 0xfd, 0xaf, 0xad, 0xb7, - 0x0f, 0xe6, 0x44, 0xed, 0x90, 0x75, 0xde, 0xf6, 0x9d, 0xaf, 0xc9, 0x02, 0x12, 0xda, 0xd1, 0xce, - 0x9b, 0xdb, 0x3d, 0xed, 0xb7, 0xdb, 0x3d, 0xed, 0xcf, 0xdb, 0x3d, 0xed, 0x7b, 0x53, 0xb1, 0xa3, - 0xab, 0x2b, 0x13, 0x7f, 0x71, 0x3e, 0xff, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xfb, 0x5f, 0xf2, 0x4d, - 0x13, 0x09, 0x00, 0x00, + // 1112 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0x5e, 0xdb, 0x89, 0x13, 0x9f, 0xfc, 0xd4, 0x3b, 0x6c, 0x4b, 0xa8, 0xe8, 0x36, 0x58, 0x2a, + 0x44, 0xa8, 0x4a, 0xd5, 0x85, 0x0b, 0x2a, 0x10, 0xd2, 0xee, 0x36, 0xfb, 0x23, 0x9a, 0x44, 0x9d, + 0x64, 0x05, 0xe5, 0x26, 0x9a, 0x4d, 0x26, 0x89, 0xa9, 0x3d, 0x36, 0x9e, 0x49, 0xb5, 0xe1, 0x3d, + 0xb8, 0xe3, 0x25, 0x78, 0x8b, 0x4a, 0xdc, 0xc0, 0x0b, 0x20, 0xb4, 0x57, 0x3c, 0x06, 0x9a, 0x19, + 0x3b, 0x76, 0xba, 0x45, 0xa2, 0xdc, 0xcd, 0xf9, 0xce, 0x77, 0x66, 0x3e, 0xcf, 0x39, 0x73, 0x8e, + 0xa1, 0x26, 0xd6, 0x31, 0xe5, 0xdd, 0x38, 0x89, 0x44, 0x84, 0x20, 0x4e, 0xa2, 0x90, 0x8a, 0x25, + 0x5d, 0xf1, 0xbb, 0x7b, 0x8b, 0x68, 0x11, 0x29, 0xf8, 0x91, 0x5c, 0x69, 0x86, 0xf7, 0x8b, 0x09, + 0xcd, 0x3e, 0x15, 0x89, 0x3f, 0xed, 0x53, 0x41, 0x66, 0x44, 0x10, 0xf4, 0x04, 0x4a, 0x72, 0x8f, + 0x96, 0xd1, 0x36, 0x3a, 0xcd, 0x83, 0x07, 0xdd, 0x7c, 0x8f, 0xee, 0x36, 0x33, 0x35, 0xc7, 0xeb, + 0x98, 0x62, 0x15, 0x82, 0x1e, 0x02, 0x0a, 0x15, 0x36, 0x99, 0x93, 0xd0, 0x0f, 0xd6, 0x13, 0x46, + 0x42, 0xda, 0x32, 0xdb, 0x46, 0xc7, 0xc1, 0xae, 0xf6, 0x9c, 0x28, 0xc7, 0x80, 0x84, 0x14, 0x21, + 0x28, 0x2d, 0x69, 0x10, 0xb7, 0x4a, 0xca, 0xaf, 0xd6, 0x12, 0x5b, 0x31, 0x5f, 0xb4, 0xca, 0x1a, + 0x93, 0x6b, 0x6f, 0x0d, 0x90, 0x9f, 0x84, 0x6a, 0x50, 0xb9, 0x18, 0x7c, 0x33, 0x18, 0x7e, 0x3b, + 0x70, 0x77, 0xa4, 0x71, 0x3c, 0xbc, 0x18, 0x8c, 0x7b, 0xd8, 0x35, 0x90, 0x03, 0xe5, 0xd3, 0xc3, + 0x8b, 0xd3, 0x9e, 0x6b, 0xa2, 0x06, 0x38, 0x67, 0xe7, 0xa3, 0xf1, 0xf0, 0x14, 0x1f, 0xf6, 0x5d, + 0x0b, 0x21, 0x68, 0x2a, 0x4f, 0x8e, 0x95, 0x64, 0xe8, 0xe8, 0xa2, 0xdf, 0x3f, 0xc4, 0x2f, 0xdc, + 0x32, 0xaa, 0x42, 0xe9, 0x7c, 0x70, 0x32, 0x74, 0x6d, 0x54, 0x87, 0xea, 0x68, 0x7c, 0x38, 0xee, + 0x8d, 0x7a, 0x63, 0xb7, 0xe2, 0xfd, 0x00, 0xf6, 0x88, 0x84, 0x71, 0x40, 0xd1, 0x1e, 0x94, 0x5f, + 0x91, 0x60, 0xa5, 0xaf, 0xc5, 0xc0, 0xda, 0x40, 0x1f, 0x82, 0x23, 0xfc, 0x90, 0x72, 0x41, 0xc2, + 0x58, 0x7d, 0xa7, 0x85, 0x73, 0x00, 0x3d, 0x84, 0x5d, 0x7f, 0x46, 0x99, 0xf0, 0xe7, 0x6b, 0x9f, + 0x2d, 0x9e, 0x91, 0x4b, 0x1a, 0xf0, 0x96, 0xd5, 0xb6, 0x3a, 0x65, 0x7c, 0xd3, 0xe1, 0x45, 0x50, + 0xed, 0x5d, 0xd1, 0x30, 0x0e, 0x48, 0x82, 0x1e, 0x81, 0x1d, 0x68, 0xba, 0xd1, 0xb6, 0x3a, 0xb5, + 0x83, 0xdd, 0x62, 0x16, 0x14, 0xff, 0xa8, 0xf4, 0xfa, 0xcf, 0xfb, 0x3b, 0x38, 0xa5, 0xe5, 0xf2, + 0xcc, 0x7f, 0x95, 0x67, 0xbd, 0x21, 0xcf, 0xfb, 0xad, 0x0c, 0xce, 0x99, 0xcf, 0x45, 0xb4, 0x48, + 0x48, 0x88, 0xee, 0x81, 0x33, 0x8d, 0x56, 0x4c, 0x4c, 0x7c, 0x26, 0xd4, 0x47, 0x96, 0xce, 0x76, + 0x70, 0x55, 0x41, 0xe7, 0x4c, 0xa0, 0x8f, 0xa0, 0xa6, 0xdd, 0xf3, 0x20, 0x22, 0x42, 0x1f, 0x73, + 0xb6, 0x83, 0x41, 0x81, 0x27, 0x12, 0x43, 0x2e, 0x58, 0x7c, 0x15, 0xaa, 0x73, 0x0c, 0x2c, 0x97, + 0xe8, 0x0e, 0xd8, 0x7c, 0xba, 0xa4, 0x21, 0x51, 0x39, 0xde, 0xc5, 0xa9, 0x85, 0x1e, 0x40, 0xf3, + 0x27, 0x9a, 0x44, 0x13, 0xb1, 0x4c, 0x28, 0x5f, 0x46, 0xc1, 0x4c, 0xe5, 0xdb, 0xc0, 0x0d, 0x89, + 0x8e, 0x33, 0x10, 0x7d, 0x9c, 0xd2, 0x72, 0x5d, 0xb6, 0xd2, 0x65, 0xe0, 0xba, 0xc4, 0x8f, 0x33, + 0x6d, 0x9f, 0x82, 0x5b, 0xe0, 0x69, 0x81, 0x15, 0x25, 0xd0, 0xc0, 0xcd, 0x0d, 0x53, 0x8b, 0x3c, + 0x86, 0x26, 0xa3, 0x0b, 0x22, 0xfc, 0x57, 0x74, 0xc2, 0x63, 0xc2, 0x78, 0xab, 0xaa, 0x6e, 0xf8, + 0x4e, 0xf1, 0x86, 0x8f, 0x56, 0xd3, 0x97, 0x54, 0x8c, 0x62, 0xc2, 0xd2, 0x6b, 0x6e, 0x64, 0x31, + 0x12, 0xe3, 0xe8, 0x13, 0xb8, 0xb5, 0xd9, 0x64, 0x46, 0x03, 0x41, 0x78, 0xcb, 0x69, 0x5b, 0x1d, + 0x84, 0x37, 0x7b, 0x3f, 0x55, 0xe8, 0x16, 0x51, 0xa9, 0xe3, 0x2d, 0x68, 0x5b, 0x1d, 0x23, 0x27, + 0x2a, 0x69, 0x5c, 0xca, 0x8a, 0x23, 0xee, 0x17, 0x64, 0xd5, 0xfe, 0x8b, 0xac, 0x2c, 0x66, 0x23, + 0x6b, 0xb3, 0x49, 0x2a, 0xab, 0xae, 0x65, 0x65, 0x70, 0x2e, 0x6b, 0x43, 0x4c, 0x65, 0x35, 0xb4, + 0xac, 0x0c, 0x4e, 0x65, 0x7d, 0x0d, 0x90, 0x50, 0x4e, 0xc5, 0x64, 0x29, 0x6f, 0xbf, 0xa9, 0x3a, + 0xc2, 0xfd, 0xa2, 0xa4, 0x4d, 0xfd, 0x74, 0xb1, 0xe4, 0x9d, 0xf9, 0x4c, 0x60, 0x27, 0xc9, 0x96, + 0xdb, 0x05, 0x78, 0xeb, 0xcd, 0x02, 0xfc, 0x1c, 0x9c, 0x4d, 0xd4, 0xf6, 0xbb, 0xae, 0x80, 0xf5, + 0xa2, 0x37, 0x72, 0x0d, 0x64, 0x83, 0x39, 0x18, 0xba, 0x66, 0xfe, 0xb6, 0xad, 0xa3, 0x0a, 0x94, + 0x95, 0xe6, 0xa3, 0x3a, 0x40, 0x9e, 0x76, 0xef, 0x2b, 0x80, 0xfc, 0x7e, 0x64, 0xe5, 0x45, 0xf3, + 0x39, 0xa7, 0xba, 0x94, 0x77, 0x71, 0x6a, 0x49, 0x3c, 0xa0, 0x6c, 0x21, 0x96, 0xaa, 0x82, 0x1b, + 0x38, 0xb5, 0xbc, 0xbf, 0x0d, 0x80, 0xb1, 0x1f, 0xd2, 0x11, 0x4d, 0x7c, 0xca, 0xdf, 0xfd, 0xfd, + 0x1d, 0x40, 0x85, 0xab, 0x46, 0xc1, 0x5b, 0xa6, 0x8a, 0x40, 0xc5, 0x08, 0xdd, 0x43, 0xd2, 0x90, + 0x8c, 0x88, 0xbe, 0x00, 0x87, 0xa6, 0x0f, 0x5e, 0xb7, 0x85, 0xda, 0xc1, 0x5e, 0x31, 0x2a, 0xeb, + 0x06, 0x69, 0x5c, 0x4e, 0x46, 0x5f, 0x02, 0x2c, 0xb3, 0x8b, 0xe7, 0xad, 0x92, 0x0a, 0xbd, 0xfd, + 0xd6, 0xb4, 0xa4, 0xb1, 0x05, 0xba, 0xf7, 0x18, 0xca, 0xea, 0x0b, 0x64, 0xaf, 0x55, 0xfd, 0xd9, + 0xd0, 0xbd, 0x56, 0xae, 0xb7, 0xfb, 0x88, 0x93, 0xf6, 0x11, 0xef, 0x09, 0xd8, 0xba, 0x49, 0xbd, + 0xf3, 0xc5, 0x78, 0x3f, 0x1b, 0x50, 0x57, 0x78, 0x9f, 0x88, 0xe9, 0x92, 0x26, 0xe8, 0xf1, 0xd6, + 0x78, 0xb9, 0x77, 0x23, 0x3e, 0xe5, 0x75, 0x0b, 0x63, 0x25, 0x13, 0x6a, 0xbe, 0x4d, 0xa8, 0x55, + 0x14, 0xda, 0x81, 0x92, 0x1a, 0x12, 0x36, 0x98, 0xbd, 0xe7, 0xba, 0x8e, 0x06, 0xbd, 0xe7, 0xba, + 0x8e, 0xb0, 0x1c, 0x0c, 0x12, 0xc0, 0x3d, 0xd7, 0xf2, 0x7e, 0x35, 0x64, 0xf1, 0x91, 0x99, 0xac, + 0x3d, 0x8e, 0xde, 0x87, 0x0a, 0x17, 0x34, 0x9e, 0x84, 0x5c, 0xe9, 0xb2, 0xb0, 0x2d, 0xcd, 0x3e, + 0x97, 0x47, 0xcf, 0x57, 0x6c, 0x9a, 0x1d, 0x2d, 0xd7, 0xe8, 0x03, 0xa8, 0x72, 0x41, 0x12, 0x21, + 0xd9, 0xba, 0xa9, 0x56, 0x94, 0xdd, 0xe7, 0xe8, 0x36, 0xd8, 0x94, 0xcd, 0x26, 0x2a, 0x29, 0xd2, + 0x51, 0xa6, 0x6c, 0xd6, 0xe7, 0xe8, 0x2e, 0x54, 0x17, 0x49, 0xb4, 0x8a, 0x7d, 0xb6, 0x68, 0x95, + 0xdb, 0x56, 0xc7, 0xc1, 0x1b, 0x1b, 0x35, 0xc1, 0xbc, 0x5c, 0xab, 0xc6, 0x56, 0xc5, 0xe6, 0xe5, + 0x5a, 0xee, 0x9e, 0x10, 0xb6, 0xa0, 0x72, 0x93, 0x8a, 0xde, 0x5d, 0xd9, 0x7d, 0xee, 0xfd, 0x61, + 0x40, 0xf9, 0x78, 0xb9, 0x62, 0x2f, 0xd1, 0x3e, 0xd4, 0x42, 0x9f, 0x4d, 0xe4, 0x53, 0xca, 0x35, + 0x3b, 0xa1, 0xcf, 0x64, 0x0d, 0xf7, 0xb9, 0xf2, 0x93, 0xab, 0x8d, 0x3f, 0x9d, 0x4c, 0x21, 0xb9, + 0x4a, 0xfd, 0xdd, 0x34, 0x09, 0x96, 0x4a, 0xc2, 0xdd, 0x62, 0x12, 0xd4, 0x01, 0xdd, 0x1e, 0x9b, + 0x46, 0x33, 0x9f, 0x2d, 0xf2, 0x0c, 0xc8, 0x89, 0xaf, 0xbe, 0xaa, 0x8e, 0xd5, 0xda, 0x7b, 0x0a, + 0xd5, 0x8c, 0x75, 0xe3, 0xf1, 0x7e, 0x37, 0x94, 0x03, 0x79, 0x6b, 0x0a, 0x9b, 0xe8, 0x3d, 0xb8, + 0x75, 0xf2, 0x6c, 0x78, 0x38, 0x9e, 0x14, 0x46, 0xb3, 0xf7, 0x23, 0x34, 0xd4, 0x89, 0x74, 0xf6, + 0x7f, 0x9f, 0xde, 0x23, 0xb0, 0xa7, 0x72, 0x87, 0xec, 0xe5, 0xed, 0xde, 0xf8, 0x9a, 0x2c, 0x40, + 0xd3, 0x8e, 0xf6, 0x5e, 0x5f, 0xef, 0x1b, 0xbf, 0x5f, 0xef, 0x1b, 0x7f, 0x5d, 0xef, 0x1b, 0xdf, + 0xdb, 0x92, 0x1d, 0x5f, 0x5e, 0xda, 0xea, 0x87, 0xe8, 0xb3, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x2d, 0x89, 0xc9, 0x88, 0x41, 0x09, 0x00, 0x00, } func (m *MetricMetadata) Marshal() (dAtA []byte, err error) { @@ -1295,6 +1305,25 @@ func (m *Sample) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.IdentifyingLabels) > 0 { + dAtA2 := make([]byte, len(m.IdentifyingLabels)*10) + var j1 int + for _, num1 := range m.IdentifyingLabels { + num := uint64(num1) + for num >= 1<<7 { + dAtA2[j1] = uint8(uint64(num)&0x7f | 0x80) + num >>= 7 + j1++ + } + dAtA2[j1] = uint8(num) + j1++ + } + i -= j1 + copy(dAtA[i:], dAtA2[:j1]) + i = encodeVarintTypes(dAtA, i, uint64(j1)) + i-- + dAtA[i] = 0x1a + } if m.Timestamp != 0 { i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp)) i-- @@ -1397,30 +1426,30 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) { } if len(m.PositiveCounts) > 0 { for iNdEx := len(m.PositiveCounts) - 1; iNdEx >= 0; iNdEx-- { - f1 := math.Float64bits(float64(m.PositiveCounts[iNdEx])) + f3 := math.Float64bits(float64(m.PositiveCounts[iNdEx])) i -= 8 - encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f1)) + encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f3)) } i = encodeVarintTypes(dAtA, i, uint64(len(m.PositiveCounts)*8)) i-- dAtA[i] = 0x6a } if len(m.PositiveDeltas) > 0 { - var j2 int - dAtA4 := make([]byte, len(m.PositiveDeltas)*10) + var j4 int + dAtA6 := make([]byte, len(m.PositiveDeltas)*10) for _, num := range m.PositiveDeltas { - x3 := (uint64(num) << 1) ^ uint64((num >> 63)) - for x3 >= 1<<7 { - dAtA4[j2] = uint8(uint64(x3)&0x7f | 0x80) - j2++ - x3 >>= 7 - } - dAtA4[j2] = uint8(x3) - j2++ + x5 := (uint64(num) << 1) ^ uint64((num >> 63)) + for x5 >= 1<<7 { + dAtA6[j4] = uint8(uint64(x5)&0x7f | 0x80) + j4++ + x5 >>= 7 + } + dAtA6[j4] = uint8(x5) + j4++ } - i -= j2 - copy(dAtA[i:], dAtA4[:j2]) - i = encodeVarintTypes(dAtA, i, uint64(j2)) + i -= j4 + copy(dAtA[i:], dAtA6[:j4]) + i = encodeVarintTypes(dAtA, i, uint64(j4)) i-- dAtA[i] = 0x62 } @@ -1440,30 +1469,30 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) { } if len(m.NegativeCounts) > 0 { for iNdEx := len(m.NegativeCounts) - 1; iNdEx >= 0; iNdEx-- { - f5 := math.Float64bits(float64(m.NegativeCounts[iNdEx])) + f7 := math.Float64bits(float64(m.NegativeCounts[iNdEx])) i -= 8 - encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f5)) + encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f7)) } i = encodeVarintTypes(dAtA, i, uint64(len(m.NegativeCounts)*8)) i-- dAtA[i] = 0x52 } if len(m.NegativeDeltas) > 0 { - var j6 int - dAtA8 := make([]byte, len(m.NegativeDeltas)*10) + var j8 int + dAtA10 := make([]byte, len(m.NegativeDeltas)*10) for _, num := range m.NegativeDeltas { - x7 := (uint64(num) << 1) ^ uint64((num >> 63)) - for x7 >= 1<<7 { - dAtA8[j6] = uint8(uint64(x7)&0x7f | 0x80) - j6++ - x7 >>= 7 - } - dAtA8[j6] = uint8(x7) - j6++ + x9 := (uint64(num) << 1) ^ uint64((num >> 63)) + for x9 >= 1<<7 { + dAtA10[j8] = uint8(uint64(x9)&0x7f | 0x80) + j8++ + x9 >>= 7 + } + dAtA10[j8] = uint8(x9) + j8++ } - i -= j6 - copy(dAtA[i:], dAtA8[:j6]) - i = encodeVarintTypes(dAtA, i, uint64(j6)) + i -= j8 + copy(dAtA[i:], dAtA10[:j8]) + i = encodeVarintTypes(dAtA, i, uint64(j8)) i-- dAtA[i] = 0x4a } @@ -2044,6 +2073,13 @@ func (m *Sample) Size() (n int) { if m.Timestamp != 0 { n += 1 + sovTypes(uint64(m.Timestamp)) } + if len(m.IdentifyingLabels) > 0 { + l = 0 + for _, e := range m.IdentifyingLabels { + l += sovTypes(uint64(e)) + } + n += 1 + sovTypes(uint64(l)) + l + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2607,6 +2643,82 @@ func (m *Sample) Unmarshal(dAtA []byte) error { break } } + case 3: + if wireType == 0 { + var v int32 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTypes + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.IdentifyingLabels = append(m.IdentifyingLabels, v) + } else if wireType == 2 { + var packedLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTypes + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + packedLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if packedLen < 0 { + return ErrInvalidLengthTypes + } + postIndex := iNdEx + packedLen + if postIndex < 0 { + return ErrInvalidLengthTypes + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var elementCount int + var count int + for _, integer := range dAtA[iNdEx:postIndex] { + if integer < 128 { + count++ + } + } + elementCount = count + if elementCount != 0 && len(m.IdentifyingLabels) == 0 { + m.IdentifyingLabels = make([]int32, 0, elementCount) + } + for iNdEx < postIndex { + var v int32 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTypes + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.IdentifyingLabels = append(m.IdentifyingLabels, v) + } + } else { + return fmt.Errorf("proto: wrong wireType = %d for field IdentifyingLabels", wireType) + } default: iNdEx = preIndex skippy, err := skipTypes(dAtA[iNdEx:]) diff --git a/prompb/types.proto b/prompb/types.proto index 61fc1e0143..08182d61a0 100644 --- a/prompb/types.proto +++ b/prompb/types.proto @@ -43,6 +43,8 @@ message Sample { // timestamp is in ms format, see model/timestamp/timestamp.go for // conversion from time.Time to Prometheus timestamp. int64 timestamp = 2; + // The set of identifying labels for info metrics, as array indices. + repeated int32 identifyingLabels = 3; } message Exemplar { @@ -123,10 +125,10 @@ message BucketSpan { message TimeSeries { // For a timeseries to be valid, and for the samples and exemplars // to be ingested by the remote system properly, the labels field is required. - repeated Label labels = 1 [(gogoproto.nullable) = false]; - repeated Sample samples = 2 [(gogoproto.nullable) = false]; - repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; - repeated Histogram histograms = 4 [(gogoproto.nullable) = false]; + repeated Label labels = 1 [(gogoproto.nullable) = false]; + repeated Sample samples = 2 [(gogoproto.nullable) = false]; + repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false]; + repeated Histogram histograms = 4 [(gogoproto.nullable) = false]; } message Label { diff --git a/promql/engine.go b/promql/engine.go index 5acff14422..5d902da53b 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -228,6 +228,7 @@ func (q *query) Cancel() { // Close implements the Query interface. func (q *query) Close() { + // TODO: Handle info metric samples for _, s := range q.matrix { putFPointSlice(s.Floats) putHPointSlice(s.Histograms) @@ -742,6 +743,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval for i, s := range mat { // Point might have a different timestamp, force it to the evaluation // timestamp as that is when we ran the evaluation. + // TODO: Handle info metric samples if len(s.Histograms) > 0 { vector[i] = Sample{Metric: s.Metric, H: s.Histograms[0].H, T: start} } else { @@ -1188,6 +1190,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) } for si, series := range matrixes[i] { + // TODO: Handle info metric samples switch { case len(series.Floats) > 0 && series.Floats[0].T == ts: vectors[i] = append(vectors[i], Sample{Metric: series.Metric, F: series.Floats[0].F, T: ts}) @@ -1239,6 +1242,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) } mat := make(Matrix, len(result)) for i, s := range result { + // TODO: Handle info metric samples if s.H == nil { mat[i] = Series{Metric: s.Metric, Floats: []FPoint{{T: ts, F: s.F}}} } else { @@ -1262,6 +1266,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) } else { ss = seriesAndTimestamp{Series{Metric: sample.Metric}, ts} } + // TODO: Handle info metric samples if sample.H == nil { if ss.Floats == nil { ss.Floats = getFPointSlice(numSteps) @@ -1280,6 +1285,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) // Reuse the original point slices. for _, m := range origMatrixes { for _, s := range m { + // TODO: Handle info metric samples putFPointSlice(s.Floats) putHPointSlice(s.Histograms) } @@ -1479,6 +1485,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio ev.error(err) } ev.currentSamples -= len(floats) + totalHPointSize(histograms) + // TODO: Handle info metric samples if floats != nil { floats = floats[:0] } @@ -1520,6 +1527,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio if len(floats)+len(histograms) == 0 { continue } + // TODO: Handle info metric samples inMatrix[0].Floats = floats inMatrix[0].Histograms = histograms enh.Ts = ts @@ -1572,6 +1580,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio } ev.samplesStats.UpdatePeak(ev.currentSamples) + // TODO: Handle info metric samples ev.currentSamples -= len(floats) + totalHPointSize(histograms) putFPointSlice(floats) putMatrixSelectorHPointSlice(histograms) @@ -1583,6 +1592,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio steps := int(1 + (ev.endTimestamp-ev.startTimestamp)/ev.interval) // Iterate once to look for a complete series. for _, s := range mat { + // TODO: Handle info metric samples if len(s.Floats)+len(s.Histograms) == steps { return Matrix{}, warnings } @@ -1591,6 +1601,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio found := map[int64]struct{}{} for i, s := range mat { + // TODO: Handle info metric samples for _, p := range s.Floats { found[p.T] = struct{}{} } @@ -1717,6 +1728,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio step++ _, f, h, ok := ev.vectorSelectorSingle(it, e, ts) if ok { + // TODO: Handle info metric samples if h == nil { ev.currentSamples++ ev.samplesStats.IncrementSamplesAtStep(step, 1) @@ -1743,6 +1755,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio } } + // TODO: Handle info metric samples if len(ss.Floats)+len(ss.Histograms) > 0 { mat = append(mat, ss) prevSS = &mat[len(mat)-1] @@ -1836,6 +1849,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio panic(fmt.Errorf("unexpected result in StepInvariantExpr evaluation: %T", expr)) } for i := range mat { + // TODO: Handle info metric samples if len(mat[i].Floats)+len(mat[i].Histograms) != 1 { panic(fmt.Errorf("unexpected number of samples")) } @@ -1950,6 +1964,7 @@ func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, no var h *histogram.FloatHistogram valueType := it.Seek(refTime) + // TODO: Handle info metric samples switch valueType { case chunkenc.ValNone: if it.Err() != nil { @@ -1964,7 +1979,7 @@ func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, no } if valueType == chunkenc.ValNone || t > refTime { var ok bool - t, v, h, ok = it.PeekPrev() + t, v, h, _, ok = it.PeekPrev() if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) { return 0, 0, nil, false } @@ -2070,6 +2085,7 @@ func (ev *evaluator) matrixSelector(node *parser.MatrixSelector) (Matrix, annota Metric: series[i].Labels(), } + // TODO: Handle info metric samples ss.Floats, ss.Histograms = ev.matrixIterSlice(it, mint, maxt, nil, nil) totalSize := int64(len(ss.Floats)) + int64(totalHPointSize(ss.Histograms)) ev.samplesStats.IncrementSamplesAtTimestamp(ev.startTimestamp, totalSize) @@ -2098,6 +2114,7 @@ func (ev *evaluator) matrixIterSlice( ) ([]FPoint, []HPoint) { mintFloats, mintHistograms := mint, mint + // TODO: Handle info metric samples // First floats... if len(floats) > 0 && floats[len(floats)-1].T >= mint { // There is an overlap between previous and current ranges, retain common @@ -2157,6 +2174,7 @@ func (ev *evaluator) matrixIterSlice( buf := it.Buffer() loop: for { + // TODO: Handle info metric samples switch buf.Next() { case chunkenc.ValNone: break loop @@ -2392,6 +2410,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * fl, fr = fr, fl hl, hr = hr, hl } + // TODO: Handle info metric samples floatValue, histogramValue, keep := vectorElemBinop(op, fl, fr, hl, hr) switch { case returnBool: @@ -2428,6 +2447,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching * insertedSigs[insertSig] = struct{}{} } + // TODO: Handle info metric samples enh.Out = append(enh.Out, Sample{ Metric: metric, F: floatValue, @@ -2508,6 +2528,7 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala lf, rf = rf, lf lh, rh = rh, lh } + // TODO: Handle info metric samples float, histogram, keep := vectorElemBinop(op, lf, rf, lh, rh) // Catch cases where the scalar is the LHS in a scalar-vector comparison operation. // We want to always keep the vector element value as the output value, even if it's on the RHS. @@ -2569,6 +2590,7 @@ func scalarBinop(op parser.ItemType, lhs, rhs float64) float64 { } // vectorElemBinop evaluates a binary operation between two Vector elements. +// TODO: Handle info metric samples. func vectorElemBinop(op parser.ItemType, lhs, rhs float64, hlhs, hrhs *histogram.FloatHistogram) (float64, *histogram.FloatHistogram, bool) { switch op { case parser.ADD: @@ -2617,6 +2639,7 @@ func vectorElemBinop(op parser.ItemType, lhs, rhs float64, hlhs, hrhs *histogram } type groupedAggregation struct { + // TODO: Handle info metric samples hasFloat bool // Has at least 1 float64 sample aggregated. hasHistogram bool // Has at least 1 histogram sample aggregated. labels labels.Labels @@ -2713,6 +2736,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, par floatMean: s.F, groupCount: 1, } + // TODO: Handle info metric samples switch { case s.H == nil: newAgg.hasFloat = true @@ -2760,6 +2784,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, par switch op { case parser.SUM: + // TODO: Handle info metric samples if s.H != nil { group.hasHistogram = true if group.histogramValue != nil { @@ -2776,6 +2801,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, par case parser.AVG: group.groupCount++ if s.H != nil { + // TODO: Handle info metric samples group.hasHistogram = true if group.histogramMean != nil { left := s.H.Copy().Div(float64(group.groupCount)) diff --git a/promql/functions.go b/promql/functions.go index da66af2f02..96e093d623 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -85,6 +85,7 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod // Floats and no Histograms to calculate a rate. Otherwise, drop this // Vector element. metricName := samples.Metric.Get(labels.MetricName) + // TODO: Handle info metric samples if len(samples.Histograms) > 0 && len(samples.Floats) > 0 { return enh.Out, annos.Add(annotations.NewMixedFloatsHistogramsWarning(metricName, args[0].PositionRange())) } @@ -116,6 +117,7 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod } prevValue = currPoint.F } + // TODO: Handle info metric samples default: // TODO: add RangeTooShortWarning return enh.Out, annos diff --git a/promql/value.go b/promql/value.go index f129137d80..83d2bd4a4e 100644 --- a/promql/value.go +++ b/promql/value.go @@ -65,9 +65,10 @@ func (s Scalar) MarshalJSON() ([]byte, error) { // Series is a stream of data points belonging to a metric. type Series struct { - Metric labels.Labels `json:"metric"` - Floats []FPoint `json:"values,omitempty"` - Histograms []HPoint `json:"histograms,omitempty"` + Metric labels.Labels `json:"metric"` + Floats []FPoint `json:"values,omitempty"` + Histograms []HPoint `json:"histograms,omitempty"` + InfoSamples []InfoPoint `json:"infoSamples,omitempty"` } func (s Series) String() string { @@ -75,13 +76,16 @@ func (s Series) String() string { // histograms, each sorted by timestamp. Maybe, in mixed series, that's // fine. Maybe, however, primary sorting by timestamp is preferred, in // which case this has to be changed. - vals := make([]string, 0, len(s.Floats)+len(s.Histograms)) + vals := make([]string, 0, len(s.Floats)+len(s.Histograms)+len(s.InfoSamples)) for _, f := range s.Floats { vals = append(vals, f.String()) } for _, h := range s.Histograms { vals = append(vals, h.String()) } + for _, s := range s.InfoSamples { + vals = append(vals, s.String()) + } return fmt.Sprintf("%s =>\n%s", s.Metric, strings.Join(vals, "\n")) } @@ -185,33 +189,81 @@ func totalHPointSize(histograms []HPoint) int { return total } +// InfoPoint represents a single info metric data point for a given timestamp. +type InfoPoint struct { + T int64 + IdentifyingLabels []int +} + +func (p InfoPoint) String() string { + var b strings.Builder + for i, il := range p.IdentifyingLabels { + if i < 0 { + b.WriteRune(',') + } + b.WriteString(strconv.Itoa(il)) + } + return fmt.Sprintf("%s @[%v]", b.String(), p.T) +} + +// MarshalJSON implements json.Marshaler. +// +// JSON marshaling is only needed for the HTTP API. +func (p InfoPoint) MarshalJSON() ([]byte, error) { + return json.Marshal([...]interface{}{float64(p.T) / 1000, p.IdentifyingLabels}) +} + // Sample is a single sample belonging to a metric. It represents either a float -// sample or a histogram sample. If H is nil, it is a float sample. Otherwise, -// it is a histogram sample. +// sample, a histogram sample or an info metric sample. type Sample struct { - T int64 - F float64 - H *histogram.FloatHistogram + T int64 + F float64 + H *histogram.FloatHistogram + IdentifyingLabels []int Metric labels.Labels } func (s Sample) String() string { var str string - if s.H == nil { - p := FPoint{T: s.T, F: s.F} - str = p.String() - } else { + switch { + case s.H != nil: p := HPoint{T: s.T, H: s.H} str = p.String() + case s.IdentifyingLabels != nil: + p := InfoPoint{T: s.T, IdentifyingLabels: s.IdentifyingLabels} + str = p.String() + default: + p := FPoint{T: s.T, F: s.F} + str = p.String() } + return fmt.Sprintf("%s => %s", s.Metric, str) } // MarshalJSON is mirrored in web/api/v1/api.go with jsoniter because FPoint and // HPoint wouldn't be marshaled with jsoniter otherwise. func (s Sample) MarshalJSON() ([]byte, error) { - if s.H == nil { + switch { + case s.H != nil: + h := struct { + M labels.Labels `json:"metric"` + H HPoint `json:"histogram"` + }{ + M: s.Metric, + H: HPoint{T: s.T, H: s.H}, + } + return json.Marshal(h) + case s.IdentifyingLabels != nil: + i := struct { + M labels.Labels `json:"metric"` + Info InfoPoint `json:"identifyingLabels"` + }{ + M: s.Metric, + Info: InfoPoint{T: s.T, IdentifyingLabels: s.IdentifyingLabels}, + } + return json.Marshal(i) + default: f := struct { M labels.Labels `json:"metric"` F FPoint `json:"value"` @@ -221,14 +273,6 @@ func (s Sample) MarshalJSON() ([]byte, error) { } return json.Marshal(f) } - h := struct { - M labels.Labels `json:"metric"` - H HPoint `json:"histogram"` - }{ - M: s.Metric, - H: HPoint{T: s.T, H: s.H}, - } - return json.Marshal(h) } // Vector is basically only an alias for []Sample, but the contract is that @@ -302,7 +346,7 @@ func (m Matrix) String() string { func (m Matrix) TotalSamples() int { numSamples := 0 for _, series := range m { - numSamples += len(series.Floats) + totalHPointSize(series.Histograms) + numSamples += len(series.Floats) + totalHPointSize(series.Histograms) + len(series.InfoSamples) } return numSamples } @@ -417,21 +461,24 @@ func (ss *StorageSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator { } type storageSeriesIterator struct { - floats []FPoint - histograms []HPoint - iFloats, iHistograms int - currT int64 - currF float64 - currH *histogram.FloatHistogram + floats []FPoint + histograms []HPoint + infoSamples []InfoPoint + iFloats, iHistograms, iInfoSamples int + currT int64 + currF float64 + currILs []int + currH *histogram.FloatHistogram } func newStorageSeriesIterator(series Series) *storageSeriesIterator { return &storageSeriesIterator{ - floats: series.Floats, - histograms: series.Histograms, - iFloats: -1, - iHistograms: 0, - currT: math.MinInt64, + floats: series.Floats, + histograms: series.Histograms, + iFloats: -1, + iHistograms: 0, + iInfoSamples: -1, + currT: math.MinInt64, } } @@ -440,13 +487,15 @@ func (ssi *storageSeriesIterator) reset(series Series) { ssi.histograms = series.Histograms ssi.iFloats = -1 ssi.iHistograms = 0 + ssi.iInfoSamples = -1 ssi.currT = math.MinInt64 ssi.currF = 0 ssi.currH = nil + ssi.currILs = nil } func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType { - if ssi.iFloats >= len(ssi.floats) && ssi.iHistograms >= len(ssi.histograms) { + if ssi.iFloats >= len(ssi.floats) && ssi.iHistograms >= len(ssi.histograms) && ssi.iInfoSamples >= len(ssi.infoSamples) { return chunkenc.ValNone } for ssi.currT < t { @@ -457,6 +506,9 @@ func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType { if ssi.currH != nil { return chunkenc.ValFloatHistogram } + if ssi.currILs != nil { + return chunkenc.ValInfoSample + } return chunkenc.ValFloat } @@ -472,34 +524,49 @@ func (ssi *storageSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (i return ssi.currT, ssi.currH } +func (ssi *storageSeriesIterator) AtInfoSample() (int64, []int) { + return ssi.currT, ssi.currILs +} + func (ssi *storageSeriesIterator) AtT() int64 { return ssi.currT } func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { - if ssi.currH != nil { + switch { + case ssi.currH != nil: ssi.iHistograms++ - } else { + case ssi.currILs != nil: + ssi.iInfoSamples++ + default: ssi.iFloats++ } var ( - pickH, pickF = false, false - floatsExhausted = ssi.iFloats >= len(ssi.floats) - histogramsExhausted = ssi.iHistograms >= len(ssi.histograms) + pickH, pickF, pickI = false, false, false + floatsExhausted = ssi.iFloats >= len(ssi.floats) + histogramsExhausted = ssi.iHistograms >= len(ssi.histograms) + infoSamplesExhausted = ssi.iInfoSamples >= len(ssi.infoSamples) + floatT = ssi.floats[ssi.iFloats].T + histogramT = ssi.histograms[ssi.iHistograms].T + infoSampleT = ssi.infoSamples[ssi.iInfoSamples].T ) switch { - case floatsExhausted: - if histogramsExhausted { // Both exhausted! - return chunkenc.ValNone - } - pickH = true - case histogramsExhausted: // and floats not exhausted. + case floatsExhausted && histogramsExhausted && infoSamplesExhausted: + return chunkenc.ValNone + case !floatsExhausted && histogramsExhausted && infoSamplesExhausted: pickF = true + case floatsExhausted && !histogramsExhausted && infoSamplesExhausted: + pickH = true + case floatsExhausted && histogramsExhausted && !infoSamplesExhausted: + pickI = true // From here on, we have to look at timestamps. - case ssi.histograms[ssi.iHistograms].T < ssi.floats[ssi.iFloats].T: - // Next histogram comes before next float. + case histogramT < floatT && histogramT < infoSampleT: + // Next histogram sample comes before other sample types. pickH = true + case infoSampleT < histogramT && infoSampleT < floatT: + // Next info metric sample comes before other sample types. + pickI = true default: // In all other cases, we pick float so that we first iterate // through floats if the timestamp is the same. @@ -519,6 +586,13 @@ func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { ssi.currF = 0 ssi.currH = p.H return chunkenc.ValFloatHistogram + case pickI: + p := ssi.infoSamples[ssi.iInfoSamples] + ssi.currT = p.T + ssi.currILs = p.IdentifyingLabels + ssi.currF = 0 + ssi.currH = nil + return chunkenc.ValFloat default: panic("storageSeriesIterater.Next failed to pick value type") } diff --git a/scrape/helpers_test.go b/scrape/helpers_test.go index 116fa5c94b..4066c26790 100644 --- a/scrape/helpers_test.go +++ b/scrape/helpers_test.go @@ -47,6 +47,10 @@ func (a nopAppender) Append(storage.SeriesRef, labels.Labels, int64, float64) (s return 0, nil } +func (a nopAppender) AppendInfoSample(storage.SeriesRef, labels.Labels, int64, []int) (storage.SeriesRef, error) { + return 0, nil +} + func (a nopAppender) AppendExemplar(storage.SeriesRef, labels.Labels, exemplar.Exemplar) (storage.SeriesRef, error) { return 0, nil } @@ -77,6 +81,12 @@ func equalFloatSamples(a, b floatSample) bool { return labels.Equal(a.metric, b.metric) && a.t == b.t && math.Float64bits(a.f) == math.Float64bits(b.f) } +type infoSample struct { + metric labels.Labels + t int64 + identifyingLabels []int +} + type histogramSample struct { t int64 h *histogram.Histogram @@ -96,17 +106,20 @@ func (a *collectResultAppendable) Appender(_ context.Context) storage.Appender { type collectResultAppender struct { mtx sync.Mutex - next storage.Appender - resultFloats []floatSample - pendingFloats []floatSample - rolledbackFloats []floatSample - resultHistograms []histogramSample - pendingHistograms []histogramSample - rolledbackHistograms []histogramSample - resultExemplars []exemplar.Exemplar - pendingExemplars []exemplar.Exemplar - resultMetadata []metadata.Metadata - pendingMetadata []metadata.Metadata + next storage.Appender + resultFloats []floatSample + pendingFloats []floatSample + rolledbackFloats []floatSample + resultInfoSamples []infoSample + pendingInfoSamples []infoSample + rolledbackInfoSamples []infoSample + resultHistograms []histogramSample + pendingHistograms []histogramSample + rolledbackHistograms []histogramSample + resultExemplars []exemplar.Exemplar + pendingExemplars []exemplar.Exemplar + resultMetadata []metadata.Metadata + pendingMetadata []metadata.Metadata } func (a *collectResultAppender) Append(ref storage.SeriesRef, lset labels.Labels, t int64, v float64) (storage.SeriesRef, error) { @@ -132,6 +145,29 @@ func (a *collectResultAppender) Append(ref storage.SeriesRef, lset labels.Labels return ref, err } +func (a *collectResultAppender) AppendInfoSample(ref storage.SeriesRef, lset labels.Labels, t int64, identifyingLabels []int) (storage.SeriesRef, error) { + a.mtx.Lock() + defer a.mtx.Unlock() + a.pendingInfoSamples = append(a.pendingInfoSamples, infoSample{ + metric: lset, + t: t, + identifyingLabels: identifyingLabels, + }) + + if ref == 0 { + ref = storage.SeriesRef(rand.Uint64()) + } + if a.next == nil { + return ref, nil + } + + ref, err := a.next.AppendInfoSample(ref, lset, t, identifyingLabels) + if err != nil { + return 0, err + } + return ref, err +} + func (a *collectResultAppender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) { a.mtx.Lock() defer a.mtx.Unlock() @@ -176,10 +212,12 @@ func (a *collectResultAppender) Commit() error { a.mtx.Lock() defer a.mtx.Unlock() a.resultFloats = append(a.resultFloats, a.pendingFloats...) + a.resultInfoSamples = append(a.resultInfoSamples, a.pendingInfoSamples...) a.resultExemplars = append(a.resultExemplars, a.pendingExemplars...) a.resultHistograms = append(a.resultHistograms, a.pendingHistograms...) a.resultMetadata = append(a.resultMetadata, a.pendingMetadata...) a.pendingFloats = nil + a.pendingInfoSamples = nil a.pendingExemplars = nil a.pendingHistograms = nil a.pendingMetadata = nil @@ -193,8 +231,10 @@ func (a *collectResultAppender) Rollback() error { a.mtx.Lock() defer a.mtx.Unlock() a.rolledbackFloats = a.pendingFloats + a.rolledbackInfoSamples = a.pendingInfoSamples a.rolledbackHistograms = a.pendingHistograms a.pendingFloats = nil + a.pendingInfoSamples = nil a.pendingHistograms = nil if a.next == nil { return nil diff --git a/scrape/target.go b/scrape/target.go index ad4b4f6857..5e0eb15936 100644 --- a/scrape/target.go +++ b/scrape/target.go @@ -363,6 +363,8 @@ type bucketLimitAppender struct { limit int } +// TODO: Support appending of info metric samples. + func (app *bucketLimitAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels, t int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) { if h != nil { for len(h.PositiveBuckets)+len(h.NegativeBuckets) > app.limit { diff --git a/storage/buffer.go b/storage/buffer.go index 651e5c83e8..6ab8ccbec2 100644 --- a/storage/buffer.go +++ b/storage/buffer.go @@ -94,7 +94,7 @@ func (b *BufferedSeriesIterator) Seek(t int64) chunkenc.ValueType { switch b.valueType { case chunkenc.ValNone: return chunkenc.ValNone - case chunkenc.ValFloat, chunkenc.ValHistogram, chunkenc.ValFloatHistogram: + case chunkenc.ValFloat, chunkenc.ValHistogram, chunkenc.ValFloatHistogram, chunkenc.ValInfoSample: b.lastTime = b.AtT() default: panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) @@ -126,6 +126,9 @@ func (b *BufferedSeriesIterator) Next() chunkenc.ValueType { case chunkenc.ValFloatHistogram: t, fh := b.it.AtFloatHistogram(&b.fhReader) b.buf.addFH(fhSample{t: t, fh: fh}) + case chunkenc.ValInfoSample: + t, ils := b.it.AtInfoSample() + b.buf.addInfoSample(infoSample{t: t, ils: ils}) default: panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) } @@ -152,6 +155,11 @@ func (b *BufferedSeriesIterator) AtFloatHistogram(fh *histogram.FloatHistogram) return b.it.AtFloatHistogram(fh) } +// AtInfoSample returns the current info metric element of the iterator. +func (b *BufferedSeriesIterator) AtInfoSample() (int64, []int) { + return b.it.AtInfoSample() +} + // AtT returns the current timestamp of the iterator. func (b *BufferedSeriesIterator) AtT() int64 { return b.it.AtT() @@ -183,6 +191,10 @@ func (s fSample) FH() *histogram.FloatHistogram { panic("FH() called for fSample") } +func (s fSample) IdentifyingLabels() []int { + panic("IdentifyingLabels() called for fSample") +} + func (s fSample) Type() chunkenc.ValueType { return chunkenc.ValFloat } @@ -208,6 +220,10 @@ func (s hSample) FH() *histogram.FloatHistogram { return s.h.ToFloat(nil) } +func (s hSample) IdentifyingLabels() []int { + panic("IdentifyingLabels() called for hSample") +} + func (s hSample) Type() chunkenc.ValueType { return chunkenc.ValHistogram } @@ -233,10 +249,43 @@ func (s fhSample) FH() *histogram.FloatHistogram { return s.fh } +func (s fhSample) IdentifyingLabels() []int { + panic("IdentifyingLabels() called for fhSample") +} + func (s fhSample) Type() chunkenc.ValueType { return chunkenc.ValFloatHistogram } +type infoSample struct { + t int64 + ils []int +} + +func (s infoSample) T() int64 { + return s.t +} + +func (s infoSample) F() float64 { + panic("F() called for infoSample") +} + +func (s infoSample) H() *histogram.Histogram { + panic("H() called for infoSample") +} + +func (s infoSample) FH() *histogram.FloatHistogram { + panic("FH() called for infoSample") +} + +func (s infoSample) IdentifyingLabels() []int { + return s.ils +} + +func (s infoSample) Type() chunkenc.ValueType { + return chunkenc.ValInfoSample +} + type sampleRing struct { delta int64 @@ -249,6 +298,7 @@ type sampleRing struct { fBuf []fSample hBuf []hSample fhBuf []fhSample + infoBuf []infoSample bufInUse bufType i int // Position of most recent element in ring buffer. @@ -266,6 +316,7 @@ const ( fBuf hBuf fhBuf + infoBuf ) // newSampleRing creates a new sampleRing. If you do not know the prefereed @@ -286,6 +337,8 @@ func newSampleRing(delta int64, size int, typ chunkenc.ValueType) *sampleRing { r.hBuf = make([]hSample, size) case chunkenc.ValFloatHistogram: r.fhBuf = make([]fhSample, size) + case chunkenc.ValInfoSample: + r.infoBuf = make([]infoSample, size) default: // Do not initialize anything because the 1st sample will be // added to one of the other bufs anyway. @@ -315,12 +368,13 @@ func (r *sampleRing) iterator() *SampleRingIterator { // SampleRingIterator is returned by BufferedSeriesIterator.Buffer() and can be // used to iterate samples buffered in the lookback window. type SampleRingIterator struct { - r *sampleRing - i int - t int64 - f float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + r *sampleRing + i int + t int64 + f float64 + h *histogram.Histogram + fh *histogram.FloatHistogram + identifyingLabels []int } func (it *SampleRingIterator) reset(r *sampleRing) { @@ -328,6 +382,7 @@ func (it *SampleRingIterator) reset(r *sampleRing) { it.i = -1 it.h = nil it.fh = nil + it.identifyingLabels = nil } func (it *SampleRingIterator) Next() chunkenc.ValueType { @@ -351,6 +406,11 @@ func (it *SampleRingIterator) Next() chunkenc.ValueType { it.t = s.t it.fh = s.fh return chunkenc.ValFloatHistogram + case infoBuf: + s := it.r.atInfoSample(it.i) + it.t = s.t + it.identifyingLabels = s.ils + return chunkenc.ValInfoSample } s := it.r.at(it.i) it.t = s.T() @@ -363,6 +423,9 @@ func (it *SampleRingIterator) Next() chunkenc.ValueType { it.fh = s.FH() it.h = nil return chunkenc.ValFloatHistogram + case chunkenc.ValInfoSample: + it.identifyingLabels = s.IdentifyingLabels() + return chunkenc.ValInfoSample default: it.f = s.F() return chunkenc.ValFloat @@ -394,6 +457,11 @@ func (it *SampleRingIterator) AtFloatHistogram(fh *histogram.FloatHistogram) (in return it.t, it.fh.Copy() } +// AtInfoSample returns the current info metric element of the iterator. +func (it *SampleRingIterator) AtInfoSample() (int64, []int) { + return it.t, it.identifyingLabels +} + func (it *SampleRingIterator) AtT() int64 { return it.t } @@ -418,6 +486,11 @@ func (r *sampleRing) atFH(i int) fhSample { return r.fhBuf[j] } +func (r *sampleRing) atInfoSample(i int) infoSample { + j := (r.f + i) % len(r.infoBuf) + return r.infoBuf[j] +} + // add adds a sample to the ring buffer and frees all samples that fall out of // the delta range. Note that this method works for any sample // implementation. If you know you are dealing with one of the implementations @@ -535,6 +608,23 @@ func (r *sampleRing) addFH(s fhSample) { } } +// addInfoSample is a version of the add method specialized for infoSample. +func (r *sampleRing) addInfoSample(s infoSample) { + switch r.bufInUse { + case infoBuf: // Add to existing infoSamples. + r.infoBuf = addInfoSample(s, r.infoBuf, r) + case noBuf: // Add first sample. + r.infoBuf = addInfoSample(s, r.infoBuf, r) + r.bufInUse = infoBuf + case iBuf: // Already have interface samples. Add to the interface buf. + r.iBuf = addSample(s, r.iBuf, r) + default: + // Already have specialized samples that are not infoSamples. + // Need to call the checked add method for conversion. + r.add(s) + } +} + // genericAdd is a generic implementation of adding a chunks.Sample // implementation to a buffer of a sample ring. However, the Go compiler // currently (go1.20) decides to not expand the code during compile time, but @@ -749,6 +839,45 @@ func addFH(s fhSample, buf []fhSample, r *sampleRing) []fhSample { return buf } +// addInfoSample is a handcoded specialization of genericAdd (see above). +func addInfoSample(s infoSample, buf []infoSample, r *sampleRing) []infoSample { + l := len(buf) + // Grow the ring buffer if it fits no more elements. + if l == 0 { + buf = make([]infoSample, 16) + l = 16 + } + if l == r.l { + newBuf := make([]infoSample, 2*l) + copy(newBuf[l+r.f:], buf[r.f:]) + copy(newBuf, buf[:r.f]) + + buf = newBuf + r.i = r.f + r.f += l + l = 2 * l + } else { + r.i++ + if r.i >= l { + r.i -= l + } + } + + buf[r.i] = s + r.l++ + + // Free head of the buffer of samples that just fell out of the range. + tmin := s.T() - r.delta + for buf[r.f].T() < tmin { + r.f++ + if r.f >= l { + r.f -= l + } + r.l-- + } + return buf +} + // reduceDelta lowers the buffered time delta, dropping any samples that are // out of the new delta range. func (r *sampleRing) reduceDelta(delta int64) bool { diff --git a/storage/buffer_test.go b/storage/buffer_test.go index 1b24e5da22..582413dec4 100644 --- a/storage/buffer_test.go +++ b/storage/buffer_test.go @@ -348,6 +348,10 @@ func (m *mockSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, return 0, nil // Not really mocked. } +func (m *mockSeriesIterator) AtInfoSample() (int64, []int) { + return 0, nil // Not really mocked. +} + func (m *mockSeriesIterator) AtT() int64 { return 0 // Not really mocked. } @@ -374,6 +378,10 @@ func (it *fakeSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64 return it.idx * it.step, &histogram.FloatHistogram{} // Value doesn't matter. } +func (it *fakeSeriesIterator) AtInfoSample() (int64, []int) { + return it.idx * it.step, nil // Value doesn't matter. +} + func (it *fakeSeriesIterator) AtT() int64 { return it.idx * it.step } diff --git a/storage/fanout.go b/storage/fanout.go index e52342bc7e..77ed993e0e 100644 --- a/storage/fanout.go +++ b/storage/fanout.go @@ -162,6 +162,20 @@ func (f *fanoutAppender) Append(ref SeriesRef, l labels.Labels, t int64, v float return ref, nil } +func (f *fanoutAppender) AppendInfoSample(ref SeriesRef, l labels.Labels, t int64, identifyingLabels []int) (SeriesRef, error) { + ref, err := f.primary.AppendInfoSample(ref, l, t, identifyingLabels) + if err != nil { + return ref, err + } + + for _, appender := range f.secondaries { + if _, err := appender.AppendInfoSample(ref, l, t, identifyingLabels); err != nil { + return 0, err + } + } + return ref, nil +} + func (f *fanoutAppender) AppendExemplar(ref SeriesRef, l labels.Labels, e exemplar.Exemplar) (SeriesRef, error) { ref, err := f.primary.AppendExemplar(ref, l, e) if err != nil { diff --git a/storage/interface.go b/storage/interface.go index 892897e51e..4df2c362b0 100644 --- a/storage/interface.go +++ b/storage/interface.go @@ -245,6 +245,9 @@ type Appender interface { // If the reference is 0 it must not be used for caching. Append(ref SeriesRef, l labels.Labels, t int64, v float64) (SeriesRef, error) + // AppendInfoSample adds an info metric sample for the given series. + AppendInfoSample(ref SeriesRef, l labels.Labels, t int64, identifyingLabels []int) (SeriesRef, error) + // Commit submits the collected samples and purges the batch. If Commit // returns a non-nil error, it also rolls back all modifications made in // the appender so far, as Rollback would do. In any case, an Appender diff --git a/storage/memoized_iterator.go b/storage/memoized_iterator.go index 4ab2aa5d78..e5fd28343d 100644 --- a/storage/memoized_iterator.go +++ b/storage/memoized_iterator.go @@ -32,9 +32,10 @@ type MemoizedSeriesIterator struct { valueType chunkenc.ValueType // Keep track of the previously returned value. - prevTime int64 - prevValue float64 - prevFloatHistogram *histogram.FloatHistogram + prevTime int64 + prevValue float64 + prevFloatHistogram *histogram.FloatHistogram + prevIdentifyingLabels []int } // NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator. @@ -64,11 +65,11 @@ func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) { // PeekPrev returns the previous element of the iterator. If there is none buffered, // ok is false. -func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, fh *histogram.FloatHistogram, ok bool) { +func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, fh *histogram.FloatHistogram, ils []int, ok bool) { if b.prevTime == math.MinInt64 { - return 0, 0, nil, false + return 0, 0, nil, nil, false } - return b.prevTime, b.prevValue, b.prevFloatHistogram, true + return b.prevTime, b.prevValue, b.prevFloatHistogram, b.prevIdentifyingLabels, true } // Seek advances the iterator to the element at time t or greater. @@ -81,6 +82,7 @@ func (b *MemoizedSeriesIterator) Seek(t int64) chunkenc.ValueType { b.prevTime = math.MinInt64 b.valueType = b.it.Seek(t0) + // TODO: Handle info metric samples switch b.valueType { case chunkenc.ValNone: return chunkenc.ValNone @@ -111,9 +113,15 @@ func (b *MemoizedSeriesIterator) Next() chunkenc.ValueType { case chunkenc.ValFloat: b.prevTime, b.prevValue = b.it.At() b.prevFloatHistogram = nil + b.prevIdentifyingLabels = nil case chunkenc.ValHistogram, chunkenc.ValFloatHistogram: - b.prevValue = 0 b.prevTime, b.prevFloatHistogram = b.it.AtFloatHistogram(nil) + b.prevValue = 0 + b.prevIdentifyingLabels = nil + case chunkenc.ValInfoSample: + b.prevTime, b.prevIdentifyingLabels = b.it.AtInfoSample() + b.prevValue = 0 + b.prevFloatHistogram = nil } b.valueType = b.it.Next() @@ -136,6 +144,11 @@ func (b *MemoizedSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHist return b.it.AtFloatHistogram(nil) } +// AtInfoSample returns the current info sample element of the iterator. +func (b *MemoizedSeriesIterator) AtInfoSample() (int64, []int) { + return b.it.AtInfoSample() +} + // Err returns the last encountered error. func (b *MemoizedSeriesIterator) Err() error { return b.it.Err() diff --git a/storage/memoized_iterator_test.go b/storage/memoized_iterator_test.go index d1cd565170..9c37477c6b 100644 --- a/storage/memoized_iterator_test.go +++ b/storage/memoized_iterator_test.go @@ -26,25 +26,33 @@ import ( func TestMemoizedSeriesIterator(t *testing.T) { var it *MemoizedSeriesIterator - sampleEq := func(ets int64, ev float64, efh *histogram.FloatHistogram) { - if efh == nil { - ts, v := it.At() - require.Equal(t, ets, ts, "timestamp mismatch") - require.Equal(t, ev, v, "value mismatch") - } else { + sampleEq := func(ets int64, ev float64, efh *histogram.FloatHistogram, eils []int) { + switch { + case efh != nil: ts, fh := it.AtFloatHistogram() require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, efh, fh, "histogram mismatch") + case eils != nil: + ts, ils := it.AtInfoSample() + require.Equal(t, ets, ts, "timestamp mismatch") + require.Equal(t, eils, ils, "identifying labels mismatch") + default: + ts, v := it.At() + require.Equal(t, ets, ts, "timestamp mismatch") + require.Equal(t, ev, v, "value mismatch") } } - prevSampleEq := func(ets int64, ev float64, efh *histogram.FloatHistogram, eok bool) { - ts, v, fh, ok := it.PeekPrev() + prevSampleEq := func(ets int64, ev float64, efh *histogram.FloatHistogram, eils []int, eok bool) { + ts, v, fh, ils, ok := it.PeekPrev() require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, ets, ts, "timestamp mismatch") - if efh == nil { - require.Equal(t, ev, v, "value mismatch") - } else { + switch { + case efh != nil: require.Equal(t, efh, fh, "histogram mismatch") + case eils != nil: + require.Equal(t, eils, ils, "identifying labels mismatch") + default: + require.Equal(t, ev, v, "value mismatch") } } @@ -69,37 +77,37 @@ func TestMemoizedSeriesIterator(t *testing.T) { }), 2) require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed") - sampleEq(1, 2, nil) - prevSampleEq(0, 0, nil, false) + sampleEq(1, 2, nil, nil) + prevSampleEq(0, 0, nil, nil, false) require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed") - sampleEq(5, 6, nil) - prevSampleEq(4, 5, nil, true) + sampleEq(5, 6, nil, nil) + prevSampleEq(4, 5, nil, nil, true) // Seek to a histogram sample with a previous float sample. require.Equal(t, chunkenc.ValFloatHistogram, it.Seek(102), "seek failed") - sampleEq(102, 10, tsdbutil.GenerateTestFloatHistogram(0)) - prevSampleEq(101, 10, nil, true) + sampleEq(102, 10, tsdbutil.GenerateTestFloatHistogram(0), nil) + prevSampleEq(101, 10, nil, nil, true) // Attempt to seek backwards (no-op). require.Equal(t, chunkenc.ValFloatHistogram, it.Seek(50), "seek failed") - sampleEq(102, 10, tsdbutil.GenerateTestFloatHistogram(0)) - prevSampleEq(101, 10, nil, true) + sampleEq(102, 10, tsdbutil.GenerateTestFloatHistogram(0), nil) + prevSampleEq(101, 10, nil, nil, true) // Seek to a float histogram sample with a previous histogram sample. require.Equal(t, chunkenc.ValFloatHistogram, it.Seek(104), "seek failed") - sampleEq(104, 0, tsdbutil.GenerateTestFloatHistogram(2)) - prevSampleEq(103, 0, tsdbutil.GenerateTestFloatHistogram(1), true) + sampleEq(104, 0, tsdbutil.GenerateTestFloatHistogram(2), nil) + prevSampleEq(103, 0, tsdbutil.GenerateTestFloatHistogram(1), nil, true) // Seek to a float sample with a previous float histogram sample. require.Equal(t, chunkenc.ValFloat, it.Seek(300), "seek failed") - sampleEq(300, 11, nil) - prevSampleEq(299, 0, tsdbutil.GenerateTestFloatHistogram(5), true) + sampleEq(300, 11, nil, nil) + prevSampleEq(299, 0, tsdbutil.GenerateTestFloatHistogram(5), nil, true) // Seek to a float sample with a previous histogram sample. require.Equal(t, chunkenc.ValFloat, it.Seek(400), "seek failed") - sampleEq(400, 12, nil) - prevSampleEq(399, 0, tsdbutil.GenerateTestFloatHistogram(6), true) + sampleEq(400, 12, nil, nil) + prevSampleEq(399, 0, tsdbutil.GenerateTestFloatHistogram(6), nil, true) require.Equal(t, chunkenc.ValNone, it.Seek(1024), "seek succeeded unexpectedly") } diff --git a/storage/merge.go b/storage/merge.go index 0dcbae2a55..320550eb55 100644 --- a/storage/merge.go +++ b/storage/merge.go @@ -558,6 +558,13 @@ func (c *chainSampleIterator) AtFloatHistogram(fh *histogram.FloatHistogram) (in return t, fh } +func (c *chainSampleIterator) AtInfoSample() (int64, []int) { + if c.curr == nil { + panic("chainSampleIterator.AtInfoSample called before first .Next or after .Next returned false.") + } + return c.curr.AtInfoSample() +} + func (c *chainSampleIterator) AtT() int64 { if c.curr == nil { panic("chainSampleIterator.AtT called before first .Next or after .Next returned false.") diff --git a/storage/merge_test.go b/storage/merge_test.go index 4632b170da..947d49c778 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -1652,6 +1652,10 @@ func (e errIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histog return 0, nil } +func (e errIterator) AtInfoSample() (int64, []int) { + return 0, nil +} + func (e errIterator) AtT() int64 { return 0 } diff --git a/storage/remote/codec.go b/storage/remote/codec.go index 1228b23f5c..420e6d5c86 100644 --- a/storage/remote/codec.go +++ b/storage/remote/codec.go @@ -157,6 +157,17 @@ func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult, case chunkenc.ValFloatHistogram: ts, fh := iter.AtFloatHistogram(nil) histograms = append(histograms, FloatHistogramToHistogramProto(ts, fh)) + case chunkenc.ValInfoSample: + ts, ils := iter.AtInfoSample() + identifyingLabels := make([]int32, 0, len(ils)) + for _, il := range ils { + identifyingLabels = append(identifyingLabels, int32(il)) + } + samples = append(samples, prompb.Sample{ + Timestamp: ts, + Value: 1, + IdentifyingLabels: identifyingLabels, + }) default: return nil, ss.Warnings(), fmt.Errorf("unrecognized value type: %s", valType) } @@ -366,9 +377,10 @@ func (c *concreteSeriesSet) Warnings() annotations.Annotations { return nil } // concreteSeries implements storage.Series. type concreteSeries struct { - labels labels.Labels - floats []prompb.Sample - histograms []prompb.Histogram + labels labels.Labels + floats []prompb.Sample + histograms []prompb.Histogram + infoSamples []prompb.Sample } func (c *concreteSeries) Labels() labels.Labels { @@ -387,6 +399,7 @@ func (c *concreteSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator { type concreteSeriesIterator struct { floatsCur int histogramsCur int + infosCur int curValType chunkenc.ValueType series *concreteSeries } @@ -395,6 +408,7 @@ func newConcreteSeriesIterator(series *concreteSeries) chunkenc.Iterator { return &concreteSeriesIterator{ floatsCur: -1, histogramsCur: -1, + infosCur: -1, curValType: chunkenc.ValNone, series: series, } @@ -403,6 +417,7 @@ func newConcreteSeriesIterator(series *concreteSeries) chunkenc.Iterator { func (c *concreteSeriesIterator) reset(series *concreteSeries) { c.floatsCur = -1 c.histogramsCur = -1 + c.infosCur = -1 c.curValType = chunkenc.ValNone c.series = series } @@ -415,26 +430,43 @@ func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType { if c.histogramsCur == -1 { c.histogramsCur = 0 } - if c.floatsCur >= len(c.series.floats) && c.histogramsCur >= len(c.series.histograms) { + if c.infosCur == -1 { + c.infosCur = 0 + } + if c.floatsCur >= len(c.series.floats) && c.histogramsCur >= len(c.series.histograms) && c.infosCur >= len(c.series.infoSamples) { return chunkenc.ValNone } // No-op check. - if (c.curValType == chunkenc.ValFloat && c.series.floats[c.floatsCur].Timestamp >= t) || - ((c.curValType == chunkenc.ValHistogram || c.curValType == chunkenc.ValFloatHistogram) && c.series.histograms[c.histogramsCur].Timestamp >= t) { - return c.curValType + switch c.curValType { + case chunkenc.ValFloat: + if c.series.floats[c.floatsCur].Timestamp >= t { + return c.curValType + } + case chunkenc.ValHistogram, chunkenc.ValFloatHistogram: + if c.series.histograms[c.histogramsCur].Timestamp >= t { + return c.curValType + } + case chunkenc.ValInfoSample: + if c.series.infoSamples[c.infosCur].Timestamp >= t { + return c.curValType + } } c.curValType = chunkenc.ValNone - // Binary search between current position and end for both float and histograms samples. + // Binary search between current position and end for all sample types. c.floatsCur += sort.Search(len(c.series.floats)-c.floatsCur, func(n int) bool { return c.series.floats[n+c.floatsCur].Timestamp >= t }) c.histogramsCur += sort.Search(len(c.series.histograms)-c.histogramsCur, func(n int) bool { return c.series.histograms[n+c.histogramsCur].Timestamp >= t }) + c.infosCur += sort.Search(len(c.series.infoSamples)-c.infosCur, func(n int) bool { + return c.series.infoSamples[n+c.infosCur].Timestamp >= t + }) switch { + // TODO: Handle cases with overlapping info samples case c.floatsCur < len(c.series.floats) && c.histogramsCur < len(c.series.histograms): // If float samples and histogram samples have overlapping timestamps prefer the float samples. if c.series.floats[c.floatsCur].Timestamp <= c.series.histograms[c.histogramsCur].Timestamp { @@ -455,6 +487,8 @@ func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType { c.curValType = chunkenc.ValFloat case c.histogramsCur < len(c.series.histograms): c.curValType = getHistogramValType(&c.series.histograms[c.histogramsCur]) + case c.infosCur < len(c.series.infoSamples): + c.curValType = chunkenc.ValInfoSample } return c.curValType } @@ -498,11 +532,27 @@ func (c *concreteSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (in } } +// AtInfoSample implements chunkenc.Iterator. +func (c *concreteSeriesIterator) AtInfoSample() (int64, []int) { + if c.curValType != chunkenc.ValInfoSample { + panic("iterator is not on an info metric sample") + } + s := c.series.infoSamples[c.infosCur] + ils := make([]int, 0, len(s.IdentifyingLabels)) + for _, il := range s.IdentifyingLabels { + ils = append(ils, int(il)) + } + return s.Timestamp, ils +} + // AtT implements chunkenc.Iterator. func (c *concreteSeriesIterator) AtT() int64 { if c.curValType == chunkenc.ValHistogram || c.curValType == chunkenc.ValFloatHistogram { return c.series.histograms[c.histogramsCur].Timestamp } + if c.curValType == chunkenc.ValInfoSample { + return c.series.infoSamples[c.infosCur].Timestamp + } return c.series.floats[c.floatsCur].Timestamp } @@ -518,24 +568,33 @@ func (c *concreteSeriesIterator) Next() chunkenc.ValueType { if c.histogramsCur+1 < len(c.series.histograms) { peekHistTS = c.series.histograms[c.histogramsCur+1].Timestamp } + peekInfoTS := noTS + if c.infosCur+1 < len(c.series.infoSamples) { + peekInfoTS = c.series.infoSamples[c.infosCur+1].Timestamp + } c.curValType = chunkenc.ValNone switch { - case peekFloatTS < peekHistTS: + case peekFloatTS < peekHistTS && peekFloatTS < peekInfoTS: c.floatsCur++ c.curValType = chunkenc.ValFloat - case peekHistTS < peekFloatTS: + case peekHistTS < peekFloatTS && peekHistTS < peekInfoTS: c.histogramsCur++ c.curValType = chunkenc.ValHistogram - case peekFloatTS == noTS && peekHistTS == noTS: + case peekInfoTS < peekFloatTS && peekInfoTS < peekHistTS: + c.infosCur++ + c.curValType = chunkenc.ValInfoSample + case peekFloatTS == noTS && peekHistTS == noTS && peekInfoTS == noTS: // This only happens when the iterator is exhausted; we set the cursors off the end to prevent // Seek() from returning anything afterwards. c.floatsCur = len(c.series.floats) c.histogramsCur = len(c.series.histograms) + c.infosCur = len(c.series.infoSamples) default: - // Prefer float samples to histogram samples if there's a conflict. We advance the cursor for histograms + // Prefer float samples to other samples if there's a conflict. We advance the cursor for histograms // anyway otherwise the histogram sample will get selected on the next call to Next(). c.floatsCur++ c.histogramsCur++ + c.infosCur++ c.curValType = chunkenc.ValFloat } return c.curValType diff --git a/storage/remote/otlptranslator/prometheusremotewrite/helper.go b/storage/remote/otlptranslator/prometheusremotewrite/helper.go index 817cbaba7d..6ed90449d1 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/helper.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/helper.go @@ -565,10 +565,23 @@ func addResourceTargetInfo(resource pcommon.Resource, settings Settings, timesta name = settings.Namespace + "_" + name } labels := createAttributes(resource, attributes, settings.ExternalLabels, model.MetricNameLabel, name) + // Ensure consistent label ordering + sort.Sort(ByLabelName(labels)) + identifyingLabels := make([]int32, 0, 2) + for i, l := range labels { + if l.Name == model.InstanceLabel || l.Name == model.JobLabel { + identifyingLabels = append(identifyingLabels, int32(i)) + } + } + if len(identifyingLabels) != 2 { + // target_info has to be identified by the job/instance tuple, one of them isn't enough on its own. + identifyingLabels = nil + } sample := &prompb.Sample{ Value: float64(1), // convert ns to ms - Timestamp: convertTimeStamp(timestamp), + Timestamp: convertTimeStamp(timestamp), + IdentifyingLabels: identifyingLabels, } addSample(tsMap, sample, labels, infoType) } diff --git a/storage/remote/queue_manager.go b/storage/remote/queue_manager.go index 01d2db06a5..c8bbb4830c 100644 --- a/storage/remote/queue_manager.go +++ b/storage/remote/queue_manager.go @@ -846,6 +846,66 @@ outer: return true } +func (t *QueueManager) AppendInfoSamples(infoSamples []record.RefInfoSample) bool { + /* + if !t.sendInfoSamples { + return true + } + */ + currentTime := time.Now() +outer: + for _, s := range infoSamples { + if isSampleOld(currentTime, time.Duration(t.cfg.SampleAgeLimit), s.T) { + t.metrics.droppedSamplesTotal.WithLabelValues(reasonTooOld).Inc() + continue + } + + t.seriesMtx.Lock() + lbls, ok := t.seriesLabels[s.Ref] + if !ok { + t.dataDropped.incr(1) + if _, ok := t.droppedSeries[s.Ref]; !ok { + level.Info(t.logger).Log("msg", "Dropped info sample for series that was not explicitly dropped via relabelling", "ref", s.Ref) + t.metrics.droppedSamplesTotal.WithLabelValues(reasonUnintentionalDroppedSeries).Inc() + } else { + t.metrics.droppedSamplesTotal.WithLabelValues(reasonDroppedSeries).Inc() + } + t.seriesMtx.Unlock() + continue + } + t.seriesMtx.Unlock() + + // Start with a very small backoff. This should not be t.cfg.MinBackoff + // as it can happen without errors, and we want to pickup work after + // filling a queue/resharding as quickly as possible. + // TODO: Consider using the average duration of a request as the backoff. + backoff := model.Duration(5 * time.Millisecond) + for { + select { + case <-t.quit: + return false + default: + } + if t.shards.enqueue(s.Ref, timeSeries{ + seriesLabels: lbls, + timestamp: s.T, + identifyingLabels: s.IdentifyingLabels, + sType: tInfoMetric, + }) { + continue outer + } + + t.metrics.enqueueRetriesTotal.Inc() + time.Sleep(time.Duration(backoff)) + backoff *= 2 + if backoff > t.cfg.MaxBackoff { + backoff = t.cfg.MaxBackoff + } + } + } + return true +} + // Start the queue manager sending samples to the remote storage. // Does not block. func (t *QueueManager) Start() { @@ -1260,6 +1320,9 @@ func (s *shards) enqueue(ref chunks.HeadSeriesRef, data timeSeries) bool { case tHistogram, tFloatHistogram: s.qm.metrics.pendingHistograms.Inc() s.enqueuedHistograms.Inc() + case tInfoMetric: + s.qm.metrics.pendingSamples.Inc() + s.enqueuedSamples.Inc() } return true } @@ -1279,13 +1342,14 @@ type queue struct { } type timeSeries struct { - seriesLabels labels.Labels - value float64 - histogram *histogram.Histogram - floatHistogram *histogram.FloatHistogram - timestamp int64 - exemplarLabels labels.Labels - // The type of series: sample, exemplar, or histogram. + seriesLabels labels.Labels + value float64 + histogram *histogram.Histogram + floatHistogram *histogram.FloatHistogram + identifyingLabels []int + timestamp int64 + exemplarLabels labels.Labels + // The type of series: sample, exemplar, histogram, or info metric. sType seriesType } @@ -1296,6 +1360,7 @@ const ( tExemplar tHistogram tFloatHistogram + tInfoMetric ) func newQueue(batchSize, capacity int) *queue { @@ -1528,6 +1593,17 @@ func (s *shards) populateTimeSeries(batch []timeSeries, pendingData []prompb.Tim case tFloatHistogram: pendingData[nPending].Histograms = append(pendingData[nPending].Histograms, FloatHistogramToHistogramProto(d.timestamp, d.floatHistogram)) nPendingHistograms++ + case tInfoMetric: + identifyingLabels := make([]int32, 0, len(d.identifyingLabels)) + for _, idx := range d.identifyingLabels { + identifyingLabels = append(identifyingLabels, int32(idx)) + } + pendingData[nPending].Samples = append(pendingData[nPending].Samples, prompb.Sample{ + Value: 1, + Timestamp: d.timestamp, + IdentifyingLabels: identifyingLabels, + }) + nPendingSamples++ } } return nPendingSamples, nPendingExemplars, nPendingHistograms diff --git a/storage/remote/write.go b/storage/remote/write.go index 66455cb4dd..b62c4b70c1 100644 --- a/storage/remote/write.go +++ b/storage/remote/write.go @@ -52,6 +52,12 @@ var ( Name: "histograms_in_total", Help: "HistogramSamples in to remote storage, compare to histograms out for queue managers.", }) + infoSamplesIn = promauto.NewCounter(prometheus.CounterOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: "info_metric_samples_in_total", + Help: "Info metric samples in to remote storage, compare to info metric samples out for queue managers.", + }) ) // WriteStorage represents all the remote write storage. @@ -269,6 +275,7 @@ func (rws *WriteStorage) Close() error { type timestampTracker struct { writeStorage *WriteStorage samples int64 + infoSamples int64 exemplars int64 histograms int64 highestTimestamp int64 @@ -284,6 +291,15 @@ func (t *timestampTracker) Append(_ storage.SeriesRef, _ labels.Labels, ts int64 return 0, nil } +// AppendInfoSample implements storage.Appender. +func (t *timestampTracker) AppendInfoSample(_ storage.SeriesRef, _ labels.Labels, ts int64, _ []int) (storage.SeriesRef, error) { + t.infoSamples++ + if ts > t.highestTimestamp { + t.highestTimestamp = ts + } + return 0, nil +} + func (t *timestampTracker) AppendExemplar(_ storage.SeriesRef, _ labels.Labels, _ exemplar.Exemplar) (storage.SeriesRef, error) { t.exemplars++ return 0, nil @@ -315,6 +331,7 @@ func (t *timestampTracker) Commit() error { samplesIn.Add(float64(t.samples)) exemplarsIn.Add(float64(t.exemplars)) histogramsIn.Add(float64(t.histograms)) + infoSamplesIn.Add(float64(t.infoSamples)) t.highestRecvTimestamp.Set(float64(t.highestTimestamp / 1000)) return nil } diff --git a/storage/remote/write_handler.go b/storage/remote/write_handler.go index d0d96b09d5..7c5ecde8f6 100644 --- a/storage/remote/write_handler.go +++ b/storage/remote/write_handler.go @@ -124,7 +124,16 @@ func (h *writeHandler) write(ctx context.Context, req *prompb.WriteRequest) (err } var ref storage.SeriesRef for _, s := range ts.Samples { - ref, err = app.Append(ref, labels, s.Timestamp, s.Value) + if len(s.IdentifyingLabels) == 0 { + ref, err = app.Append(ref, labels, s.Timestamp, s.Value) + } else { + // This is an info metric sample + ils := make([]int, 0, len(s.IdentifyingLabels)) + for _, idx := range s.IdentifyingLabels { + ils = append(ils, int(idx)) + } + ref, err = app.AppendInfoSample(ref, labels, s.Timestamp, ils) + } if err != nil { unwrappedErr := errors.Unwrap(err) if unwrappedErr == nil { @@ -135,7 +144,6 @@ func (h *writeHandler) write(ctx context.Context, req *prompb.WriteRequest) (err } return err } - } for _, ep := range ts.Exemplars { diff --git a/storage/remote/write_handler_test.go b/storage/remote/write_handler_test.go index 5125290f7c..6cc45508d9 100644 --- a/storage/remote/write_handler_test.go +++ b/storage/remote/write_handler_test.go @@ -269,6 +269,7 @@ func genSeriesWithSample(numSeries int, ts int64) []prompb.TimeSeries { type mockAppendable struct { latestSample int64 samples []mockSample + infoSamples []mockInfoSample latestExemplar int64 exemplars []mockExemplar latestHistogram int64 @@ -282,6 +283,12 @@ type mockSample struct { v float64 } +type mockInfoSample struct { + l labels.Labels + t int64 + identifyingLabels []int +} + type mockExemplar struct { l labels.Labels el labels.Labels @@ -317,6 +324,16 @@ func (m *mockAppendable) Append(_ storage.SeriesRef, l labels.Labels, t int64, v return 0, nil } +func (m *mockAppendable) AppendInfoSample(_ storage.SeriesRef, l labels.Labels, t int64, identifyingLabels []int) (storage.SeriesRef, error) { + if t < m.latestSample { + return 0, storage.ErrOutOfOrderSample + } + + m.latestSample = t + m.infoSamples = append(m.infoSamples, mockInfoSample{l: l, t: t, identifyingLabels: identifyingLabels}) + return 0, nil +} + func (m *mockAppendable) Commit() error { return m.commitErr } diff --git a/storage/series.go b/storage/series.go index 175276469e..a3638394c3 100644 --- a/storage/series.go +++ b/storage/series.go @@ -57,8 +57,8 @@ func NewListSeries(lset labels.Labels, s []chunks.Sample) *SeriesEntry { } } -// NewListChunkSeriesFromSamples returns chunk series entry that allows to iterate over provided samples. -// NOTE: It uses inefficient chunks encoding implementation, not caring about chunk size. +// NewListChunkSeriesFromSamples returns a chunk series entry that allows to iterate over provided samples. +// NOTE: It uses an inefficient chunks encoding implementation, not caring about chunk size. // Use only for testing. func NewListChunkSeriesFromSamples(lset labels.Labels, samples ...[]chunks.Sample) *ChunkSeriesEntry { chksFromSamples := make([]chunks.Meta, 0, len(samples)) @@ -136,6 +136,11 @@ func (it *listSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64 return s.T(), s.FH() } +func (it *listSeriesIterator) AtInfoSample() (int64, []int) { + s := it.samples.Get(it.idx) + return s.T(), s.IdentifyingLabels() +} + func (it *listSeriesIterator) AtT() int64 { s := it.samples.Get(it.idx) return s.T() @@ -330,10 +335,11 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator { lastType = typ var ( - t int64 - v float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + t int64 + v float64 + h *histogram.Histogram + fh *histogram.FloatHistogram + ils []int ) switch typ { case chunkenc.ValFloat: @@ -369,6 +375,9 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator { } chk = newChk } + case chunkenc.ValInfoSample: + t, ils = seriesIter.AtInfoSample() + app.AppendInfoSample(t, ils) default: return errChunksIterator{err: fmt.Errorf("unknown sample type %s", typ.String())} } @@ -428,14 +437,16 @@ func (e errChunksIterator) Err() error { return e.err } // ExpandSamples iterates over all samples in the iterator, buffering all in slice. // Optionally it takes samples constructor, useful when you want to compare sample slices with different // sample implementations. if nil, sample type from this package will be used. -func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample) ([]chunks.Sample, error) { +func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ils []int) chunks.Sample) ([]chunks.Sample, error) { if newSampleFn == nil { - newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { + newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ils []int) chunks.Sample { switch { case h != nil: return hSample{t, h} case fh != nil: return fhSample{t, fh} + case ils != nil: + return infoSample{t, ils} default: return fSample{t, f} } @@ -453,13 +464,16 @@ func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, if math.IsNaN(f) { f = -42 } - result = append(result, newSampleFn(t, f, nil, nil)) + result = append(result, newSampleFn(t, f, nil, nil, nil)) case chunkenc.ValHistogram: t, h := iter.AtHistogram(nil) - result = append(result, newSampleFn(t, 0, h, nil)) + result = append(result, newSampleFn(t, 0, h, nil, nil)) case chunkenc.ValFloatHistogram: t, fh := iter.AtFloatHistogram(nil) - result = append(result, newSampleFn(t, 0, nil, fh)) + result = append(result, newSampleFn(t, 0, nil, fh, nil)) + case chunkenc.ValInfoSample: + t, ils := iter.AtInfoSample() + result = append(result, newSampleFn(t, 0, nil, nil, ils)) } } } diff --git a/tsdb/agent/db.go b/tsdb/agent/db.go index 513c2ed5a3..a4ebeea64e 100644 --- a/tsdb/agent/db.go +++ b/tsdb/agent/db.go @@ -763,6 +763,7 @@ type appender struct { pendingSeries []record.RefSeries pendingSamples []record.RefSample + pendingInfoSamples []record.RefInfoSample pendingHistograms []record.RefHistogramSample pendingFloatHistograms []record.RefFloatHistogramSample pendingExamplars []record.RefExemplar @@ -771,6 +772,10 @@ type appender struct { // Series lock is not held on elements. sampleSeries []*memSeries + // Pointers to the series referenced by each element of pendingInfoSamples. + // Series lock is not held on elements. + infoSampleSeries []*memSeries + // Pointers to the series referenced by each element of pendingHistograms. // Series lock is not held on elements. histogramSeries []*memSeries @@ -824,6 +829,50 @@ func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v flo return storage.SeriesRef(series.ref), nil } +func (a *appender) AppendInfoSample(ref storage.SeriesRef, l labels.Labels, t int64, identifyingLabels []int) (storage.SeriesRef, error) { + // series references and chunk references are identical for agent mode. + headRef := chunks.HeadSeriesRef(ref) + + series := a.series.GetByID(headRef) + if series == nil { + // Ensure no empty or duplicate labels have gotten through. This mirrors the + // equivalent validation code in the TSDB's headAppender. + l = l.WithoutEmpty() + if l.IsEmpty() { + return 0, fmt.Errorf("empty labelset: %w", tsdb.ErrInvalidSample) + } + + if lbl, dup := l.HasDuplicateLabelNames(); dup { + return 0, fmt.Errorf(`label name "%s" is not unique: %w`, lbl, tsdb.ErrInvalidSample) + } + + var created bool + series, created = a.getOrCreate(l) + if created { + a.pendingSeries = append(a.pendingSeries, record.RefSeries{ + Ref: series.ref, + Labels: l, + }) + + a.metrics.numActiveSeries.Inc() + } + } + + series.Lock() + defer series.Unlock() + + // NOTE: always modify pendingInfoSamples and infoSampleSeries together. + a.pendingInfoSamples = append(a.pendingInfoSamples, record.RefInfoSample{ + Ref: series.ref, + T: t, + IdentifyingLabels: identifyingLabels, + }) + a.infoSampleSeries = append(a.infoSampleSeries, series) + + // a.metrics.totalAppendedSamples.WithLabelValues(sampleMetricTypeFloat).Inc() + return storage.SeriesRef(series.ref), nil +} + func (a *appender) getOrCreate(l labels.Labels) (series *memSeries, created bool) { hash := l.Hash() diff --git a/tsdb/block_test.go b/tsdb/block_test.go index 85bfc43f6f..a2de16e63e 100644 --- a/tsdb/block_test.go +++ b/tsdb/block_test.go @@ -174,7 +174,7 @@ func TestCorruptedChunk(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tmpdir := t.TempDir() - series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{1, 1, nil, nil}}) + series := storage.NewListSeries(labels.FromStrings("a", "b"), []chunks.Sample{sample{1, 1, nil, nil, nil}}) blockDir := createBlock(t, tmpdir, []storage.Series{series}) files, err := sequenceFiles(chunkDir(blockDir)) require.NoError(t, err) @@ -234,12 +234,12 @@ func TestLabelValuesWithMatchers(t *testing.T) { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "tens", fmt.Sprintf("value%d", i/10), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) } // Add another series with an overlapping unique label, but leaving out the tens label seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "unique", "value99", - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) blockDir := createBlock(t, tmpdir, seriesEntries) files, err := sequenceFiles(chunkDir(blockDir)) @@ -422,7 +422,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) { "a_unique", fmt.Sprintf("value%d", i), "b_tens", fmt.Sprintf("value%d", i/(metricCount/10)), "c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1" - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) } blockDir := createBlock(b, tmpdir, seriesEntries) @@ -459,13 +459,13 @@ func TestLabelNamesWithMatchers(t *testing.T) { for i := 0; i < 100; i++ { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) if i%10 == 0 { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings( "tens", fmt.Sprintf("value%d", i/10), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) } if i%20 == 0 { @@ -473,7 +473,7 @@ func TestLabelNamesWithMatchers(t *testing.T) { "tens", fmt.Sprintf("value%d", i/10), "twenties", fmt.Sprintf("value%d", i/20), "unique", fmt.Sprintf("value%d", i), - ), []chunks.Sample{sample{100, 0, nil, nil}})) + ), []chunks.Sample{sample{100, 0, nil, nil, nil}})) } } diff --git a/tsdb/blockwriter.go b/tsdb/blockwriter.go index 73bc5f1e35..32346d69d0 100644 --- a/tsdb/blockwriter.go +++ b/tsdb/blockwriter.go @@ -42,7 +42,7 @@ type BlockWriter struct { // ErrNoSeriesAppended is returned if the series count is zero while flushing blocks. var ErrNoSeriesAppended = errors.New("no series appended, aborting") -// NewBlockWriter create a new block writer. +// NewBlockWriter creates a new block writer. // // The returned writer accumulates all the series in the Head block until `Flush` is called. // diff --git a/tsdb/chunkenc/bstream.go b/tsdb/chunkenc/bstream.go index 7b17f4686b..737a7d8b9f 100644 --- a/tsdb/chunkenc/bstream.go +++ b/tsdb/chunkenc/bstream.go @@ -63,6 +63,12 @@ const ( one bit = true ) +// Reset resets b around stream. +func (b *bstream) Reset(stream []byte) { + b.stream = stream + b.count = 0 +} + func (b *bstream) writeBit(bit bit) { if b.count == 0 { b.stream = append(b.stream, 0) diff --git a/tsdb/chunkenc/chunk.go b/tsdb/chunkenc/chunk.go index 21c41257b5..4d931dfd82 100644 --- a/tsdb/chunkenc/chunk.go +++ b/tsdb/chunkenc/chunk.go @@ -30,6 +30,7 @@ const ( EncXOR EncHistogram EncFloatHistogram + EncInfoMetric ) func (e Encoding) String() string { @@ -42,13 +43,15 @@ func (e Encoding) String() string { return "histogram" case EncFloatHistogram: return "floathistogram" + case EncInfoMetric: + return "infometric" } return "" } // IsValidEncoding returns true for supported encodings. func IsValidEncoding(e Encoding) bool { - return e == EncXOR || e == EncHistogram || e == EncFloatHistogram + return e == EncXOR || e == EncHistogram || e == EncFloatHistogram || e == EncInfoMetric } const ( @@ -87,6 +90,9 @@ type Chunk interface { // There's no strong guarantee that no samples will be appended once // Compact() is called. Implementing this function is optional. Compact() + + // Reset resets the chunk given stream. + Reset(stream []byte) } type Iterable interface { @@ -113,6 +119,9 @@ type Appender interface { // The Appender app that can be used for the next append is always returned. AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) AppendFloatHistogram(prev *FloatHistogramAppender, t int64, h *histogram.FloatHistogram, appendOnly bool) (c Chunk, isRecoded bool, app Appender, err error) + + // AppendInfoSample appends an info metric sample with its identifying label set indices. + AppendInfoSample(int64, []int) } // Iterator is a simple iterator that can only get the next value. @@ -145,6 +154,9 @@ type Iterator interface { // The method accepts an optional FloatHistogram object which will be // reused when not nil. Otherwise, a new FloatHistogram object will be allocated. AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) + // AtInfoSample returns the current timestamp/identifying labels pair if the value is an + // info metric sample. + AtInfoSample() (int64, []int) // AtT returns the current timestamp. // Before the iterator has advanced, the behaviour is unspecified. AtT() int64 @@ -162,6 +174,7 @@ const ( ValFloat // A simple float, retrieved with At. ValHistogram // A histogram, retrieve with AtHistogram, but AtFloatHistogram works, too. ValFloatHistogram // A floating-point histogram, retrieve with AtFloatHistogram. + ValInfoSample // An info metric sample. ) func (v ValueType) String() string { @@ -174,6 +187,8 @@ func (v ValueType) String() string { return "histogram" case ValFloatHistogram: return "floathistogram" + case ValInfoSample: + return "infometric" default: return "unknown" } @@ -187,6 +202,8 @@ func (v ValueType) ChunkEncoding() Encoding { return EncHistogram case ValFloatHistogram: return EncFloatHistogram + case ValInfoSample: + return EncInfoMetric default: return EncNone } @@ -200,6 +217,8 @@ func (v ValueType) NewChunk() (Chunk, error) { return NewHistogramChunk(), nil case ValFloatHistogram: return NewFloatHistogramChunk(), nil + case ValInfoSample: + return NewInfoSampleChunk(), nil default: return nil, fmt.Errorf("value type %v unsupported", v) } @@ -234,6 +253,10 @@ func (it *mockSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64 return math.MinInt64, nil } +func (it *mockSeriesIterator) AtInfoSample() (int64, []int) { + return math.MinInt64, nil +} + func (it *mockSeriesIterator) AtT() int64 { return it.timeStamps[it.currIndex] } @@ -265,8 +288,9 @@ func (nopIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogra func (nopIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) { return math.MinInt64, nil } -func (nopIterator) AtT() int64 { return math.MinInt64 } -func (nopIterator) Err() error { return nil } +func (nopIterator) AtInfoSample() (int64, []int) { return math.MinInt64, nil } +func (nopIterator) AtT() int64 { return math.MinInt64 } +func (nopIterator) Err() error { return nil } // Pool is used to create and reuse chunk references to avoid allocations. type Pool interface { @@ -279,6 +303,7 @@ type pool struct { xor sync.Pool histogram sync.Pool floatHistogram sync.Pool + infoSample sync.Pool } // NewPool returns a new pool. @@ -299,68 +324,78 @@ func NewPool() Pool { return &FloatHistogramChunk{b: bstream{}} }, }, + infoSample: sync.Pool{ + New: func() interface{} { + return &InfoSampleChunk{b: bstream{}} + }, + }, } } func (p *pool) Get(e Encoding, b []byte) (Chunk, error) { + var c Chunk switch e { case EncXOR: - c := p.xor.Get().(*XORChunk) - c.b.stream = b - c.b.count = 0 - return c, nil + c = p.xor.Get().(*XORChunk) + case EncInfoMetric: + c = p.infoSample.Get().(*InfoSampleChunk) case EncHistogram: - c := p.histogram.Get().(*HistogramChunk) - c.b.stream = b - c.b.count = 0 - return c, nil + c = p.histogram.Get().(*HistogramChunk) case EncFloatHistogram: - c := p.floatHistogram.Get().(*FloatHistogramChunk) - c.b.stream = b - c.b.count = 0 - return c, nil + c = p.floatHistogram.Get().(*FloatHistogramChunk) + default: + return nil, fmt.Errorf("invalid chunk encoding %q", e) } - return nil, fmt.Errorf("invalid chunk encoding %q", e) + + c.Reset(b) + return c, nil } func (p *pool) Put(c Chunk) error { + var sp *sync.Pool switch c.Encoding() { case EncXOR: - xc, ok := c.(*XORChunk) + _, ok := c.(*XORChunk) // This may happen often with wrapped chunks. Nothing we can really do about // it but returning an error would cause a lot of allocations again. Thus, // we just skip it. if !ok { return nil } - xc.b.stream = nil - xc.b.count = 0 - p.xor.Put(c) + sp = &p.xor case EncHistogram: - sh, ok := c.(*HistogramChunk) + _, ok := c.(*HistogramChunk) // This may happen often with wrapped chunks. Nothing we can really do about // it but returning an error would cause a lot of allocations again. Thus, // we just skip it. if !ok { return nil } - sh.b.stream = nil - sh.b.count = 0 - p.histogram.Put(c) + sp = &p.histogram case EncFloatHistogram: - sh, ok := c.(*FloatHistogramChunk) + _, ok := c.(*FloatHistogramChunk) // This may happen often with wrapped chunks. Nothing we can really do about // it but returning an error would cause a lot of allocations again. Thus, // we just skip it. if !ok { return nil } - sh.b.stream = nil - sh.b.count = 0 - p.floatHistogram.Put(c) + sp = &p.floatHistogram + case EncInfoMetric: + _, ok := c.(*InfoSampleChunk) + // This may happen often with wrapped chunks. Nothing we can really do about + // it but returning an error would cause a lot of allocations again. Thus, + // we just skip it. + if !ok { + return nil + } + sp = &p.infoSample default: return fmt.Errorf("invalid chunk encoding %q", c.Encoding()) } + + c.Reset(nil) + sp.Put(c) return nil } @@ -375,6 +410,8 @@ func FromData(e Encoding, d []byte) (Chunk, error) { return &HistogramChunk{b: bstream{count: 0, stream: d}}, nil case EncFloatHistogram: return &FloatHistogramChunk{b: bstream{count: 0, stream: d}}, nil + case EncInfoMetric: + return &InfoSampleChunk{b: bstream{count: 0, stream: d}}, nil } return nil, fmt.Errorf("invalid chunk encoding %q", e) } @@ -388,6 +425,8 @@ func NewEmptyChunk(e Encoding) (Chunk, error) { return NewHistogramChunk(), nil case EncFloatHistogram: return NewFloatHistogramChunk(), nil + case EncInfoMetric: + return NewInfoSampleChunk(), nil } return nil, fmt.Errorf("invalid chunk encoding %q", e) } diff --git a/tsdb/chunkenc/float_histogram.go b/tsdb/chunkenc/float_histogram.go index 88d189254f..1834c17a8d 100644 --- a/tsdb/chunkenc/float_histogram.go +++ b/tsdb/chunkenc/float_histogram.go @@ -44,6 +44,10 @@ func NewFloatHistogramChunk() *FloatHistogramChunk { return &FloatHistogramChunk{b: bstream{stream: b, count: 0}} } +func (c *FloatHistogramChunk) Reset(stream []byte) { + c.b.Reset(stream) +} + // xorValue holds all the necessary information to encode // and decode XOR encoded float64 values. type xorValue struct { @@ -211,6 +215,12 @@ func (a *FloatHistogramAppender) Append(int64, float64) { panic("appended a float sample to a histogram chunk") } +// AppendInfoSample implements Appender. This implementation panics because info metric +// samples must never be appended to a float histogram chunk. +func (a *FloatHistogramAppender) AppendInfoSample(int64, []int) { + panic("appended an info metric sample to a histogram chunk") +} + // appendable returns whether the chunk can be appended to, and if so whether // any recoding needs to happen using the provided inserts (in case of any new // buckets, positive or negative range, respectively). If the sample is a gauge @@ -774,6 +784,10 @@ func (it *floatHistogramIterator) AtFloatHistogram(fh *histogram.FloatHistogram) return it.t, fh } +func (it *floatHistogramIterator) AtInfoSample() (int64, []int) { + panic("cannot call floatHistogramIterator.AtInfoSample") +} + func (it *floatHistogramIterator) AtT() int64 { return it.t } diff --git a/tsdb/chunkenc/histogram.go b/tsdb/chunkenc/histogram.go index cb09eda26d..6b233c678f 100644 --- a/tsdb/chunkenc/histogram.go +++ b/tsdb/chunkenc/histogram.go @@ -45,6 +45,10 @@ func NewHistogramChunk() *HistogramChunk { return &HistogramChunk{b: bstream{stream: b, count: 0}} } +func (c *HistogramChunk) Reset(stream []byte) { + c.b.Reset(stream) +} + // Encoding returns the encoding type. func (c *HistogramChunk) Encoding() Encoding { return EncHistogram @@ -229,6 +233,12 @@ func (a *HistogramAppender) Append(int64, float64) { panic("appended a float sample to a histogram chunk") } +// AppendInfoSample implements Appender. This implementation panics because info metric +// samples must never be appended to a histogram chunk. +func (a *HistogramAppender) AppendInfoSample(int64, []int) { + panic("appended an info metric sample to a histogram chunk") +} + // appendable returns whether the chunk can be appended to, and if so whether // any recoding needs to happen using the provided inserts (in case of any new // buckets, positive or negative range, respectively). If the sample is a gauge @@ -868,6 +878,10 @@ func (it *histogramIterator) AtFloatHistogram(fh *histogram.FloatHistogram) (int return it.t, fh } +func (it *histogramIterator) AtInfoSample() (int64, []int) { + panic("cannot call histogramIterator.AtInfoSample") +} + func (it *histogramIterator) AtT() int64 { return it.t } diff --git a/tsdb/chunkenc/infometric.go b/tsdb/chunkenc/infometric.go new file mode 100644 index 0000000000..6c96682e5e --- /dev/null +++ b/tsdb/chunkenc/infometric.go @@ -0,0 +1,324 @@ +// Copyright 2024 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package chunkenc + +import ( + "encoding/binary" + "math" + "slices" + + "github.com/prometheus/prometheus/model/histogram" +) + +// InfoSampleChunk holds encoded info metric sample data. +type InfoSampleChunk struct { + b bstream +} + +// NewInfoSampleChunk returns a new chunk with info metric encoding of the given +// size. +func NewInfoSampleChunk() *InfoSampleChunk { + b := make([]byte, 3, 128) + return &InfoSampleChunk{b: bstream{stream: b, count: 0}} +} + +func (c *InfoSampleChunk) Reset(stream []byte) { + c.b.Reset(stream) +} + +// Encoding returns the encoding type. +func (c *InfoSampleChunk) Encoding() Encoding { + return EncInfoMetric +} + +// Bytes returns the underlying byte slice of the chunk. +func (c *InfoSampleChunk) Bytes() []byte { + return c.b.bytes() +} + +// NumSamples returns the number of samples in the chunk. +func (c *InfoSampleChunk) NumSamples() int { + return int(binary.BigEndian.Uint16(c.Bytes())) +} + +// Compact implements the Chunk interface. +func (c *InfoSampleChunk) Compact() { + if l := len(c.b.stream); cap(c.b.stream) > l+chunkCompactCapacityThreshold { + buf := make([]byte, l) + copy(buf, c.b.stream) + c.b.stream = buf + } +} + +// Appender implements the Chunk interface. +// It is not valid to call Appender() multiple times concurrently or to use multiple +// Appenders on the same chunk. +func (c *InfoSampleChunk) Appender() (Appender, error) { + it := c.iterator(nil) + + // To get an appender, we must know the state it would have if we had + // appended all existing data from scratch. We iterate through the end + // and populate via the iterator's state. + for it.Next() != ValNone { + } + if err := it.Err(); err != nil { + return nil, err + } + + return &infoSampleAppender{ + b: &c.b, + t: it.t, + tDelta: it.tDelta, + lDelta: it.lDelta, + ils: it.ils, + }, nil +} + +// Iterator implements the Chunk interface. +func (c *InfoSampleChunk) Iterator(it Iterator) Iterator { + return c.iterator(it) +} + +func (c *InfoSampleChunk) iterator(it Iterator) *infoSampleIterator { + if iter, ok := it.(*infoSampleIterator); ok { + iter.Reset(c.b.bytes()) + return iter + } + + iter := &infoSampleIterator{ + // The first 2 bytes contain chunk headers. + // We skip that for actual samples. + br: newBReader(c.b.bytes()[2:]), + numTotal: binary.BigEndian.Uint16(c.b.bytes()), + t: math.MinInt64, + } + // The first 3 bytes contain chunk headers. + // We skip that for actual samples. + _, _ = iter.br.readBits(24) + return iter +} + +type infoSampleAppender struct { + b *bstream + + t int64 + tDelta int64 + lDelta int + ils []int +} + +// Append implements Appender. This implementation panics because normal float +// samples must never be appended to an info sample chunk. +func (a *infoSampleAppender) Append(int64, float64) { + panic("appended a float sample to an info sample chunk") +} + +// Append implements Appender. This implementation panics because float histogram +// samples must never be appended to an info sample chunk. +func (a *infoSampleAppender) AppendFloatHistogram(*FloatHistogramAppender, int64, *histogram.FloatHistogram, bool) (Chunk, bool, Appender, error) { + panic("appended a float histogram sample to an info sample chunk") +} + +// Append implements Appender. This implementation panics because histogram +// samples must never be appended to an info sample chunk. +func (a *infoSampleAppender) AppendHistogram(prev *HistogramAppender, t int64, h *histogram.Histogram, appendOnly bool) (Chunk, bool, Appender, error) { + panic("appended a histogram sample to an info sample chunk") +} + +// AppendInfoSample implements Appender. +func (a *infoSampleAppender) AppendInfoSample(t int64, identifyingLabels []int) { + num := binary.BigEndian.Uint16(a.b.bytes()) + + if num == 0 { + putVarbitInt(a.b, t) + // The labels change + putVarbitInt(a.b, 1) + putVarbitInt(a.b, int64(len(identifyingLabels))) + for _, ix := range identifyingLabels { + putVarbitInt(a.b, int64(ix)) + } + } else { + tDelta := t - a.t + tDod := tDelta - a.tDelta + + putVarbitInt(a.b, tDod) + + a.tDelta = tDelta + + if slices.Equal(a.ils, identifyingLabels) { + // The labels do not change + putVarbitInt(a.b, 0) + } else { + // The labels change + putVarbitInt(a.b, 1) + lDelta := len(identifyingLabels) - len(a.ils) + lDod := int64(lDelta - a.lDelta) + a.lDelta = lDelta + + putVarbitInt(a.b, lDod) + for _, ix := range identifyingLabels { + putVarbitInt(a.b, int64(ix)) + } + } + } + + a.t = t + a.ils = identifyingLabels +} + +type infoSampleIterator struct { + br bstreamReader + numTotal uint16 + numRead uint16 + + t int64 + tDelta int64 + l int + lDelta int + ils []int + + err error +} + +func (it *infoSampleIterator) Seek(t int64) ValueType { + if it.err != nil { + return ValNone + } + + for t > it.t || it.numRead == 0 { + if it.Next() == ValNone { + return ValNone + } + } + return ValInfoSample +} + +func (it *infoSampleIterator) At() (int64, float64) { + return it.t, 1 +} + +func (it *infoSampleIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) { + panic("cannot call infoSampleIterator.AtHistogram") +} + +func (it *infoSampleIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) { + panic("cannot call infoSampleIterator.AtFloatHistogram") +} + +func (it *infoSampleIterator) AtInfoSample() (int64, []int) { + return it.t, it.ils +} + +func (it *infoSampleIterator) AtT() int64 { + return it.t +} + +func (it *infoSampleIterator) Err() error { + return it.err +} + +func (it *infoSampleIterator) Reset(b []byte) { + // The first 2 bytes contain chunk headers. + // We skip that for actual samples. + it.br = newBReader(b[2:]) + it.numTotal = binary.BigEndian.Uint16(b) + + it.numRead = 0 + it.t = 0 + it.tDelta = 0 + it.l = 0 + it.lDelta = 0 + it.ils = nil + it.err = nil +} + +func (it *infoSampleIterator) Next() ValueType { + if it.err != nil || it.numRead == it.numTotal { + return ValNone + } + + if it.numRead == 0 { + t, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + if _, err := readVarbitInt(&it.br); err != nil { + it.err = err + return ValNone + } + l, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + for i := 0; i < int(l); i++ { + ix, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + it.ils = append(it.ils, int(ix)) + } + + it.t = t + it.l = int(l) + + it.numRead++ + return ValInfoSample + } + + tDod, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + + it.tDelta += tDod + it.t += it.tDelta + + changed, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + + if changed == 0 { + it.numRead++ + return ValInfoSample + } + + // The identifying labels change + lDod, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + + it.lDelta += int(lDod) + it.l += it.lDelta + it.ils = make([]int, 0, it.l) + for i := 0; i < it.l; i++ { + ix, err := readVarbitInt(&it.br) + if err != nil { + it.err = err + return ValNone + } + + it.ils = append(it.ils, int(ix)) + } + + it.numRead++ + return ValInfoSample +} diff --git a/tsdb/chunkenc/varbit.go b/tsdb/chunkenc/varbit.go index b43574dcb6..574edec48b 100644 --- a/tsdb/chunkenc/varbit.go +++ b/tsdb/chunkenc/varbit.go @@ -61,7 +61,7 @@ func putVarbitInt(b *bstream, val int64) { } } -// readVarbitInt reads an int64 encoced with putVarbitInt. +// readVarbitInt reads an int64 encoded with putVarbitInt. func readVarbitInt(b *bstreamReader) (int64, error) { var d byte for i := 0; i < 8; i++ { @@ -166,7 +166,7 @@ func putVarbitUint(b *bstream, val uint64) { } } -// readVarbitUint reads a uint64 encoced with putVarbitUint. +// readVarbitUint reads a uint64 encoded with putVarbitUint. func readVarbitUint(b *bstreamReader) (uint64, error) { var d byte for i := 0; i < 8; i++ { diff --git a/tsdb/chunkenc/xor.go b/tsdb/chunkenc/xor.go index 07b9238315..0bc89099ba 100644 --- a/tsdb/chunkenc/xor.go +++ b/tsdb/chunkenc/xor.go @@ -66,6 +66,10 @@ func NewXORChunk() *XORChunk { return &XORChunk{b: bstream{stream: b, count: 0}} } +func (c *XORChunk) Reset(stream []byte) { + c.b.Reset(stream) +} + // Encoding returns the encoding type. func (c *XORChunk) Encoding() Encoding { return EncXOR @@ -171,7 +175,6 @@ func (a *xorAppender) Append(t int64, v float64) { } a.writeVDelta(v) - default: tDelta = uint64(t - a.t) dod := int64(tDelta - a.tDelta) @@ -220,6 +223,10 @@ func (a *xorAppender) writeVDelta(v float64) { xorWrite(a.b, v, a.v, &a.leading, &a.trailing) } +func (a *xorAppender) AppendInfoSample(int64, []int) { + panic("appended an info metric sample to a float chunk") +} + func (a *xorAppender) AppendHistogram(*HistogramAppender, int64, *histogram.Histogram, bool) (Chunk, bool, Appender, error) { panic("appended a histogram sample to a float chunk") } @@ -268,6 +275,10 @@ func (it *xorIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *hist panic("cannot call xorIterator.AtFloatHistogram") } +func (it *xorIterator) AtInfoSample() (int64, []int) { + panic("cannot call xorIterator.AtInfoSample") +} + func (it *xorIterator) AtT() int64 { return it.t } diff --git a/tsdb/chunks/chunks.go b/tsdb/chunks/chunks.go index 0826f69670..6ef02a4734 100644 --- a/tsdb/chunks/chunks.go +++ b/tsdb/chunks/chunks.go @@ -191,6 +191,8 @@ func ChunkFromSamplesGeneric(s Samples) (Meta, error) { if newChunk != nil { return emptyChunk, fmt.Errorf("did not expect to start a second chunk") } + case chunkenc.ValInfoSample: + ca.AppendInfoSample(s.Get(i).T(), s.Get(i).IdentifyingLabels()) default: panic(fmt.Sprintf("unknown sample type %s", sampleType.String())) } @@ -222,6 +224,9 @@ func ChunkMetasToSamples(chunks []Meta) (result []Sample) { case chunkenc.ValFloatHistogram: t, fh := it.AtFloatHistogram(nil) result = append(result, sample{t: t, fh: fh}) + case chunkenc.ValInfoSample: + t, ils := it.AtInfoSample() + result = append(result, sample{t: t, ils: ils}) default: panic("unexpected value type") } @@ -233,7 +238,7 @@ func ChunkMetasToSamples(chunks []Meta) (result []Sample) { // Iterator iterates over the chunks of a single time series. type Iterator interface { // At returns the current meta. - // It depends on implementation if the chunk is populated or not. + // It depends on implementation whether the chunk is populated or not. At() Meta // Next advances the iterator by one. Next() bool @@ -478,7 +483,7 @@ func (w *Writer) WriteChunks(chks ...Meta) error { // the batch is too large to fit in the current segment. cutNewBatch := (i != 0) && (batchSize+SegmentHeaderSize > w.segmentSize) - // When the segment already has some data than + // When the segment already has some data // the first batch size calculation should account for that. if firstBatch && w.n > SegmentHeaderSize { cutNewBatch = batchSize+w.n > w.segmentSize @@ -717,7 +722,7 @@ func nextSequenceFile(dir string) (string, int, error) { } // It is not necessary that we find the files in number order, // for example with '1000000' and '200000', '1000000' would come first. - // Though this is a very very race case, we check anyway for the max id. + // Though this is a very very rare case, we check anyway for the max id. if j > i { i = j } diff --git a/tsdb/chunks/head_chunks.go b/tsdb/chunks/head_chunks.go index d3d0d5b8d3..a1de820ee5 100644 --- a/tsdb/chunks/head_chunks.go +++ b/tsdb/chunks/head_chunks.go @@ -188,8 +188,8 @@ func (f *chunkPos) bytesToWriteForChunk(chkLen uint64) uint64 { return bytes } -// ChunkDiskMapper is for writing the Head block chunks to the disk -// and access chunks via mmapped file. +// ChunkDiskMapper is for writing the Head block chunks to disk +// and access chunks via mmapped files. type ChunkDiskMapper struct { /// Writer. dir *os.File @@ -231,7 +231,7 @@ type ChunkDiskMapper struct { closed bool } -// mmappedChunkFile provides mmapp access to an entire head chunks file that holds many chunks. +// mmappedChunkFile provides mmap access to an entire head chunks file that holds many chunks. type mmappedChunkFile struct { byteSlice ByteSlice maxt int64 // Max timestamp among all of this file's chunks. @@ -240,7 +240,7 @@ type mmappedChunkFile struct { // NewChunkDiskMapper returns a new ChunkDiskMapper against the given directory // using the default head chunk file duration. // NOTE: 'IterateAllChunks' method needs to be called at least once after creating ChunkDiskMapper -// to set the maxt of all the file. +// to set the maxt of all files. func NewChunkDiskMapper(reg prometheus.Registerer, dir string, pool chunkenc.Pool, writeBufferSize, writeQueueSize int) (*ChunkDiskMapper, error) { // Validate write buffer size. if writeBufferSize < MinWriteBufferSize || writeBufferSize > MaxWriteBufferSize { @@ -425,8 +425,8 @@ func repairLastChunkFile(files map[int]string) (_ map[int]string, returnErr erro return files, nil } -// WriteChunk writes the chunk to the disk. -// The returned chunk ref is the reference from where the chunk encoding starts for the chunk. +// WriteChunk writes the chunk to disk. +// The returned chunk ref is the reference for where the chunk encoding starts for the chunk. func (cdm *ChunkDiskMapper) WriteChunk(seriesRef HeadSeriesRef, mint, maxt int64, chk chunkenc.Chunk, isOOO bool, callback func(err error)) (chkRef ChunkDiskMapperRef) { // cdm.evtlPosMtx must be held to serialize the calls to cdm.evtlPos.getNextChunkRef() and the writing of the chunk (either with or without queue). cdm.evtlPosMtx.Lock() @@ -785,7 +785,7 @@ func (cdm *ChunkDiskMapper) Chunk(ref ChunkDiskMapperRef) (chunkenc.Chunk, error // IterateAllChunks iterates all mmappedChunkFiles (in order of head chunk file name/number) and all the chunks within it // and runs the provided function with information about each chunk. It returns on the first error encountered. // NOTE: This method needs to be called at least once after creating ChunkDiskMapper -// to set the maxt of all the file. +// to set the maxt of all files. func (cdm *ChunkDiskMapper) IterateAllChunks(f func(seriesRef HeadSeriesRef, chunkRef ChunkDiskMapperRef, mint, maxt int64, numSamples uint16, encoding chunkenc.Encoding, isOOO bool) error) (err error) { cdm.writePathMtx.Lock() defer cdm.writePathMtx.Unlock() @@ -905,7 +905,7 @@ func (cdm *ChunkDiskMapper) IterateAllChunks(f func(seriesRef HeadSeriesRef, chu return nil } -// Truncate deletes the head chunk files whose file number is less than given fileNo. +// Truncate deletes the head chunk files which file numbers are less than given fileNo. func (cdm *ChunkDiskMapper) Truncate(fileNo uint32) error { cdm.readPathMtx.RLock() diff --git a/tsdb/chunks/samples.go b/tsdb/chunks/samples.go index 638660c70c..d587b59093 100644 --- a/tsdb/chunks/samples.go +++ b/tsdb/chunks/samples.go @@ -28,6 +28,7 @@ type Sample interface { F() float64 H() *histogram.Histogram FH() *histogram.FloatHistogram + IdentifyingLabels() []int Type() chunkenc.ValueType } @@ -37,10 +38,11 @@ func (s SampleSlice) Get(i int) Sample { return s[i] } func (s SampleSlice) Len() int { return len(s) } type sample struct { - t int64 - f float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + t int64 + f float64 + h *histogram.Histogram + fh *histogram.FloatHistogram + ils []int } func (s sample) T() int64 { @@ -59,12 +61,18 @@ func (s sample) FH() *histogram.FloatHistogram { return s.fh } +func (s sample) IdentifyingLabels() []int { + return s.ils +} + func (s sample) Type() chunkenc.ValueType { switch { case s.h != nil: return chunkenc.ValHistogram case s.fh != nil: return chunkenc.ValFloatHistogram + case s.ils != nil: + return chunkenc.ValInfoSample default: return chunkenc.ValFloat } diff --git a/tsdb/compact.go b/tsdb/compact.go index 7cf718d8e1..a91b155770 100644 --- a/tsdb/compact.go +++ b/tsdb/compact.go @@ -300,7 +300,7 @@ func (c *LeveledCompactor) plan(dms []dirMeta) ([]string, error) { meta := dms[i].meta if meta.MaxTime-meta.MinTime < c.ranges[len(c.ranges)/2] { // If the block is entirely deleted, then we don't care about the block being big enough. - // TODO: This is assuming single tombstone is for distinct series, which might be no true. + // TODO: This is assuming single tombstone is for distinct series, which might not be true. if meta.Stats.NumTombstones > 0 && meta.Stats.NumTombstones >= meta.Stats.NumSeries { return []string{dms[i].dir}, nil } @@ -400,7 +400,7 @@ func splitByRange(ds []dirMeta, tr int64) [][]dirMeta { t0 = tr * ((m.MinTime - tr + 1) / tr) } // Skip blocks that don't fall into the range. This can happen via mis-alignment or - // by being the multiple of the intended range. + // by being a multiple of the intended range. if m.MaxTime > t0+tr { i++ continue @@ -423,7 +423,7 @@ func splitByRange(ds []dirMeta, tr int64) [][]dirMeta { return splitDirs } -// CompactBlockMetas merges many block metas into one, combining it's source blocks together +// CompactBlockMetas merges many block metas into one, combining its source blocks together // and adjusting compaction level. Min/Max time of result block meta covers all input blocks. func CompactBlockMetas(uid ulid.ULID, blocks ...*BlockMeta) *BlockMeta { res := &BlockMeta{ @@ -484,7 +484,7 @@ func (c *LeveledCompactor) Compact(dest string, dirs []string, open []*Block) (u return ulids[0], nil } -// shardedBlock describes single *output* block during compaction. This struct is passed between +// shardedBlock describes a single *output* block during compaction. This struct is passed between // compaction methods to wrap output block details, index and chunk writer together. // Shard index is determined by the position of this structure in the slice of output blocks. type shardedBlock struct { @@ -1136,7 +1136,7 @@ func (c DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *Compa chksIter := s.Iterator(chksIter) var chks []chunks.Meta for chksIter.Next() { - // We are not iterating in streaming way over chunk as + // We are not iterating in a streaming way over chunks as // it's more efficient to do bulk write for index and // chunk file purposes. chks = append(chks, chksIter.At()) @@ -1145,7 +1145,7 @@ func (c DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *Compa return fmt.Errorf("chunk iter: %w", err) } - // Skip the series with all deleted chunks. + // Skip series with all deleted chunks. if len(chks) == 0 { continue } diff --git a/tsdb/db.go b/tsdb/db.go index 03197588f6..c58b78ee83 100644 --- a/tsdb/db.go +++ b/tsdb/db.go @@ -265,7 +265,7 @@ type DB struct { compactor Compactor blocksToDelete BlocksToDeleteFunc - // Mutex for that must be held when modifying the general block layout or lastGarbageCollectedMmapRef. + // Mutex that must be held when modifying the general block layout or lastGarbageCollectedMmapRef. mtx sync.RWMutex blocks []*Block @@ -1499,7 +1499,7 @@ func (db *DB) reloadBlocks() (err error) { db.metrics.reloads.Inc() }() - // Now that we reload TSDB every minute, there is high chance for race condition with a reload + // Now that we reload TSDB every minute, there is a high chance for race condition with a reload // triggered by CleanTombstones(). We need to lock the reload to avoid the situation where // a normal reload and CleanTombstones try to delete the same block. db.mtx.Lock() diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 1073b66e40..13a6f0d153 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -464,7 +464,7 @@ Outer: expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -623,7 +623,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil, nil}}, }, ssMap) // Append Out of Order Value. @@ -640,7 +640,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]chunks.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil, nil}, sample{10, 3, nil, nil, nil}}, }, ssMap) } @@ -820,7 +820,7 @@ Outer: expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -925,7 +925,7 @@ func TestDB_e2e(t *testing.T) { for i := 0; i < numDatapoints; i++ { v := rand.Float64() - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{ts, v, nil, nil, nil}) _, err := app.Append(0, lset, ts, v) require.NoError(t, err) @@ -1257,7 +1257,7 @@ func TestTombstoneClean(t *testing.T) { expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -2769,11 +2769,11 @@ func assureChunkFromSamples(t *testing.T, samples []chunks.Sample) chunks.Meta { // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // that the resulted segments includes the expected chunks data. func TestChunkWriter_ReadAfterWrite(t *testing.T) { - chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}) - chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}) - chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}) - chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}) - chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}) + chk1 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil, nil}}) + chk2 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil, nil}}) + chk3 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil, nil}}) + chk4 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil, nil}}) + chk5 := assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil, nil}}) chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size tests := []struct { @@ -2974,11 +2974,11 @@ func TestRangeForTimestamp(t *testing.T) { // Regression test for https://github.com/prometheus/prometheus/pull/6514. func TestChunkReader_ConcurrentReads(t *testing.T) { chks := []chunks.Meta{ - assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil}}), - assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{1, 1, nil, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{1, 2, nil, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{1, 3, nil, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{1, 4, nil, nil, nil}}), + assureChunkFromSamples(t, []chunks.Sample{sample{1, 5, nil, nil, nil}}), } tempDir := t.TempDir() @@ -3040,7 +3040,7 @@ func TestCompactHead(t *testing.T) { val := rand.Float64() _, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val) require.NoError(t, err) - expSamples = append(expSamples, sample{int64(i), val, nil, nil}) + expSamples = append(expSamples, sample{int64(i), val, nil, nil, nil}) } require.NoError(t, app.Commit()) @@ -3068,7 +3068,7 @@ func TestCompactHead(t *testing.T) { series = seriesSet.At().Iterator(series) for series.Next() == chunkenc.ValFloat { time, val := series.At() - actSamples = append(actSamples, sample{time, val, nil, nil}) + actSamples = append(actSamples, sample{time, val, nil, nil, nil}) } require.NoError(t, series.Err()) } @@ -4470,8 +4470,8 @@ func TestOOOCompaction(t *testing.T) { fromMins, toMins := r[0], r[1] for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) - series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) + series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil, nil}) } } expRes := map[string][]chunks.Sample{ @@ -4538,8 +4538,8 @@ func TestOOOCompaction(t *testing.T) { series2Samples := make([]chunks.Sample, 0, toMins-fromMins+1) for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) - series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) + series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil, nil}) } expRes := map[string][]chunks.Sample{ series1.String(): series1Samples, @@ -4670,8 +4670,8 @@ func TestOOOCompactionWithNormalCompaction(t *testing.T) { series2Samples := make([]chunks.Sample, 0, toMins-fromMins+1) for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) - series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) + series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil, nil}) } expRes := map[string][]chunks.Sample{ series1.String(): series1Samples, @@ -4771,8 +4771,8 @@ func TestOOOCompactionWithDisabledWriteLog(t *testing.T) { series2Samples := make([]chunks.Sample, 0, toMins-fromMins+1) for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) - series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) + series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil, nil}) } expRes := map[string][]chunks.Sample{ series1.String(): series1Samples, @@ -4865,8 +4865,8 @@ func TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T) { series2Samples := make([]chunks.Sample, 0, toMins-fromMins+1) for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) - series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) + series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil, nil}) } expRes := map[string][]chunks.Sample{ series1.String(): series1Samples, @@ -5561,7 +5561,7 @@ func TestOOOCompactionFailure(t *testing.T) { series1Samples := make([]chunks.Sample, 0, toMins-fromMins+1) for min := fromMins; min <= toMins; min++ { ts := min * time.Minute.Milliseconds() - series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil}) + series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil, nil}) } expRes := map[string][]chunks.Sample{ series1.String(): series1Samples, diff --git a/tsdb/docs/format/head_chunks.md b/tsdb/docs/format/head_chunks.md index 813d4008ca..3d91fc12b4 100644 --- a/tsdb/docs/format/head_chunks.md +++ b/tsdb/docs/format/head_chunks.md @@ -27,10 +27,10 @@ in-file offset (lower 4 bytes) and segment sequence number (upper 4 bytes). # Chunk -Unlike chunks in the on-disk blocks, here we additionally store series -reference that the chunks belongs to and the mint/maxt of the chunks. This is -because we don't have an index associated with these chunks, hence these meta -information are used while replaying the chunks. +Unlike chunks in the on-disk blocks, here we additionally the store series +reference that each chunk belongs to and the mint/maxt of the chunks. This is +because we don't have an index associated with these chunks, hence this metadata +is used while replaying the chunks. ``` ┌─────────────────────┬───────────────────────┬───────────────────────┬───────────────────┬───────────────┬──────────────┬────────────────┐ diff --git a/tsdb/docs/format/index.md b/tsdb/docs/format/index.md index 53b77d9abe..e0ef21bd5b 100644 --- a/tsdb/docs/format/index.md +++ b/tsdb/docs/format/index.md @@ -40,7 +40,7 @@ Most of the sections described below start with a `len` field. It always specifi ### Symbol Table -The symbol table holds a sorted list of deduplicated strings that occurred in label pairs of the stored series. They can be referenced from subsequent sections and significantly reduce the total index size. +The symbol table holds a sorted list of deduplicated strings that occur in label pairs of the stored series. They can be referenced from subsequent sections and significantly reduce the total index size. The section contains a sequence of the string entries, each prefixed with the string's length in raw bytes. All strings are utf-8 encoded. Strings are referenced by sequential indexing. The strings are sorted in lexicographically ascending order. diff --git a/tsdb/docs/usage.md b/tsdb/docs/usage.md index e70b248137..70af41b4d1 100644 --- a/tsdb/docs/usage.md +++ b/tsdb/docs/usage.md @@ -1,6 +1,6 @@ # Usage -TSDB can be - and is - used by other applications such as [Cortex](https://cortexmetrics.io/) and [Thanos](https://thanos.io/). +TSDB can be - and is - used by other applications such as [Cortex](https://cortexmetrics.io/), [Thanos](https://thanos.io/) and [Grafana Mimir](https://grafana.com/oss/mimir/). This directory contains documentation for any developers who wish to work on or with TSDB. For a full example of instantiating a database, adding and querying data, see the [tsdb example in the docs](https://pkg.go.dev/github.com/prometheus/prometheus/tsdb). @@ -18,7 +18,7 @@ A `DB` has the following main components: * [`Head`](https://pkg.go.dev/github.com/prometheus/prometheus/tsdb#DB.Head) * [Blocks (persistent blocks)](https://pkg.go.dev/github.com/prometheus/prometheus/tsdb#DB.Blocks) -The `Head` is responsible for a lot. Here are its main components: +The `Head` is responsible for a lot. Here are its main components: * [WAL](https://pkg.go.dev/github.com/prometheus/prometheus/tsdb/wal#WAL) (Write Ahead Log). * [`stripeSeries`](https://github.com/prometheus/prometheus/blob/411021ada9ab41095923b8d2df9365b632fd40c3/tsdb/head.go#L1292): diff --git a/tsdb/exemplar.go b/tsdb/exemplar.go index 3dd784c623..213fff5c25 100644 --- a/tsdb/exemplar.go +++ b/tsdb/exemplar.go @@ -111,7 +111,7 @@ func NewExemplarMetrics(reg prometheus.Registerer) *ExemplarMetrics { return &m } -// NewCircularExemplarStorage creates an circular in memory exemplar storage. +// NewCircularExemplarStorage creates a circular in memory exemplar storage. // If we assume the average case 95 bytes per exemplar we can fit 5651272 exemplars in // 1GB of extra memory, accounting for the fact that this is heap allocated space. // If len <= 0, then the exemplar storage is essentially a noop storage but can later be diff --git a/tsdb/head.go b/tsdb/head.go index 2027f14c68..567c983739 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -103,6 +103,7 @@ type Head struct { histogramsPool zeropool.Pool[[]record.RefHistogramSample] floatHistogramsPool zeropool.Pool[[]record.RefFloatHistogramSample] metadataPool zeropool.Pool[[]record.RefMetadata] + infoSamplesPool zeropool.Pool[[]record.RefInfoSample] seriesPool zeropool.Pool[[]*memSeries] bytesPool zeropool.Pool[[]byte] memChunkPool sync.Pool @@ -2059,20 +2060,22 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu } type sample struct { - t int64 - f float64 - h *histogram.Histogram - fh *histogram.FloatHistogram + t int64 + f float64 + h *histogram.Histogram + fh *histogram.FloatHistogram + identifyingLabels []int } -func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) chunks.Sample { - return sample{t, v, h, fh} +func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ils []int) chunks.Sample { + return sample{t, v, h, fh, ils} } func (s sample) T() int64 { return s.t } func (s sample) F() float64 { return s.f } func (s sample) H() *histogram.Histogram { return s.h } func (s sample) FH() *histogram.FloatHistogram { return s.fh } +func (s sample) IdentifyingLabels() []int { return s.identifyingLabels } func (s sample) Type() chunkenc.ValueType { switch { @@ -2080,6 +2083,8 @@ func (s sample) Type() chunkenc.ValueType { return chunkenc.ValHistogram case s.fh != nil: return chunkenc.ValFloatHistogram + case s.identifyingLabels != nil: + return chunkenc.ValInfoSample default: return chunkenc.ValFloat } @@ -2130,6 +2135,8 @@ type memSeries struct { // We keep the last value here (in addition to appending it to the chunk) so we can check for duplicates. lastValue float64 + // We keep the last info metric identifying labels here so we can check for duplicates. + lastIdentifyingLabels []int // We keep the last histogram value here (in addition to appending it to the chunk) so we can check for duplicates. lastHistogramValue *histogram.Histogram diff --git a/tsdb/head_append.go b/tsdb/head_append.go index 054c393e74..e71949c066 100644 --- a/tsdb/head_append.go +++ b/tsdb/head_append.go @@ -18,6 +18,7 @@ import ( "errors" "fmt" "math" + "slices" "github.com/go-kit/log/level" @@ -51,6 +52,16 @@ func (a *initAppender) Append(ref storage.SeriesRef, lset labels.Labels, t int64 return a.app.Append(ref, lset, t, v) } +func (a *initAppender) AppendInfoSample(ref storage.SeriesRef, lset labels.Labels, t int64, identifyingLabels []int) (storage.SeriesRef, error) { + if a.app != nil { + return a.app.AppendInfoSample(ref, lset, t, identifyingLabels) + } + + a.head.initTime(t) + a.app = a.head.appender() + return a.app.AppendInfoSample(ref, lset, t, identifyingLabels) +} + func (a *initAppender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) { // Check if exemplar storage is enabled. if !a.head.opts.EnableExemplarStorage || a.head.opts.MaxExemplars.Load() <= 0 { @@ -169,6 +180,7 @@ func (h *Head) appender() *headAppender { histograms: h.getHistogramBuffer(), floatHistograms: h.getFloatHistogramBuffer(), metadata: h.getMetadataBuffer(), + infoSamples: h.getInfoSampleBuffer(), appendID: appendID, cleanupAppendIDsBelow: cleanupAppendIDsBelow, } @@ -279,6 +291,22 @@ func (h *Head) putMetadataBuffer(b []record.RefMetadata) { h.metadataPool.Put(b[:0]) } +func (h *Head) getInfoSampleBuffer() []record.RefInfoSample { + if b := h.infoSamplesPool.Get(); b != nil { + return b + } + + return make([]record.RefInfoSample, 0, 512) +} + +func (h *Head) putInfoSampleBuffer(b []record.RefInfoSample) { + if b == nil { + return + } + + h.infoSamplesPool.Put(b[:0]) +} + func (h *Head) getSeriesBuffer() []*memSeries { b := h.seriesPool.Get() if b == nil { @@ -328,6 +356,8 @@ type headAppender struct { metadata []record.RefMetadata // New metadata held by this appender. metadataSeries []*memSeries // Series corresponding to the metadata held by this appender. exemplars []exemplarWithSeriesRef // New exemplars held by this appender. + infoSamples []record.RefInfoSample // New info metric samples held by this appender. + infoSeries []*memSeries // Info series corresponding to the info samples held by this appender (using corresponding slice indices - same series may appear more than once). appendID, cleanupAppendIDsBelow uint64 closed bool @@ -503,6 +533,50 @@ func (s *memSeries) appendable(t int64, v float64, headMaxt, minValidTime, oooTi return false, headMaxt - t, storage.ErrOutOfOrderSample } +// appendableInfoSample checks whether the given info sample is valid for appending to the series. (if we return false and no error) +// The sample belongs to the out of order chunk if we return true and no error. +// An error signifies the sample cannot be handled. +func (s *memSeries) appendableInfoSample(t int64, lbls labels.Labels, identifyingLabels []int, headMaxt, minValidTime, oooTimeWindow int64) (isOOO bool, oooDelta int64, err error) { + // Check if we can append in the in-order chunk. + if t >= minValidTime { + if s.headChunks == nil { + // The series has no sample and was freshly created. + return false, 0, nil + } + msMaxt := s.maxTime() + if t > msMaxt { + return false, 0, nil + } + if t == msMaxt { + // We are allowing exact duplicates as we can encounter them in valid cases + // like federation and erroring out at that time would be extremely noisy. + // This only checks against the latest in-order sample. + // The OOO headchunk has its own method to detect these duplicates. + slices.Sort(s.lastIdentifyingLabels) + slices.Sort(identifyingLabels) + if !slices.Equal(identifyingLabels, s.lastIdentifyingLabels) { + return false, 0, storage.ErrDuplicateSampleForTimestamp + } + // Sample is identical (ts + identifyingLabels) with most current (highest ts) sample in sampleBuf. + return false, 0, nil + } + } + + // The sample cannot go in the in-order chunk. Check if it can go in the out-of-order chunk. + if oooTimeWindow > 0 && t >= headMaxt-oooTimeWindow { + return true, headMaxt - t, nil + } + + // The sample cannot go in both in-order and out-of-order chunk. + if oooTimeWindow > 0 { + return true, headMaxt - t, storage.ErrTooOldSample + } + if t < minValidTime { + return false, headMaxt - t, storage.ErrOutOfBounds + } + return false, headMaxt - t, storage.ErrOutOfOrderSample +} + // appendableHistogram checks whether the given histogram is valid for appending to the series. func (s *memSeries) appendableHistogram(t int64, h *histogram.Histogram) error { if s.headChunks == nil { @@ -683,6 +757,60 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels return storage.SeriesRef(s.ref), nil } +func (a *headAppender) AppendInfoSample(ref storage.SeriesRef, lset labels.Labels, t int64, identifyingLabels []int) (storage.SeriesRef, error) { + // For OOO inserts, this restriction is irrelevant and will be checked later once we confirm the sample is an in-order append. + // If OOO inserts are disabled, we may as well as check this as early as we can and avoid more work. + if a.oooTimeWindow == 0 && t < a.minValidTime { + a.head.metrics.outOfBoundSamples.WithLabelValues(sampleMetricTypeFloat).Inc() + return 0, storage.ErrOutOfBounds + } + + s := a.head.series.getByID(chunks.HeadSeriesRef(ref)) + if s == nil { + var err error + s, err = a.getOrCreate(lset) + if err != nil { + return 0, err + } + } + + s.Lock() + // TODO(codesome): If we definitely know at this point that the sample is ooo, then optimise + // to skip that sample from the WAL and write only in the WBL. + _, delta, err := s.appendableInfoSample(t, lset, identifyingLabels, a.headMaxt, a.minValidTime, a.oooTimeWindow) + if err == nil { + s.pendingCommit = true + } + s.Unlock() + if delta > 0 { + a.head.metrics.oooHistogram.Observe(float64(delta) / 1000) + } + if err != nil { + switch { + case errors.Is(err, storage.ErrOutOfOrderSample): + a.head.metrics.outOfOrderSamples.WithLabelValues(sampleMetricTypeFloat).Inc() + case errors.Is(err, storage.ErrTooOldSample): + a.head.metrics.tooOldSamples.WithLabelValues(sampleMetricTypeFloat).Inc() + } + return 0, err + } + + if t < a.mint { + a.mint = t + } + if t > a.maxt { + a.maxt = t + } + + a.infoSamples = append(a.infoSamples, record.RefInfoSample{ + Ref: s.ref, + T: t, + IdentifyingLabels: identifyingLabels, + }) + a.infoSeries = append(a.infoSeries, s) + return storage.SeriesRef(s.ref), nil +} + // UpdateMetadata for headAppender assumes the series ref already exists, and so it doesn't // use getOrCreate or make any of the lset sanity checks that Append does. func (a *headAppender) UpdateMetadata(ref storage.SeriesRef, lset labels.Labels, meta metadata.Metadata) (storage.SeriesRef, error) { @@ -775,6 +903,13 @@ func (a *headAppender) log() error { return fmt.Errorf("log float histograms: %w", err) } } + if len(a.infoSamples) > 0 { + rec = enc.InfoSamples(a.infoSamples, buf) + buf = rec[:0] + if err := a.head.wal.Log(rec); err != nil { + return fmt.Errorf("log info metric samples: %w", err) + } + } // Exemplars should be logged after samples (float/native histogram/etc), // otherwise it might happen that we send the exemplars in a remote write // batch before the samples, which in turn means the exemplar is rejected @@ -845,6 +980,7 @@ func (a *headAppender) Commit() (err error) { defer a.head.putHistogramBuffer(a.histograms) defer a.head.putFloatHistogramBuffer(a.floatHistograms) defer a.head.putMetadataBuffer(a.metadata) + defer a.head.putInfoSampleBuffer(a.infoSamples) defer a.head.iso.closeAppend(a.appendID) var ( @@ -1059,6 +1195,98 @@ func (a *headAppender) Commit() (err error) { series.Unlock() } + for i, s := range a.infoSamples { + series = a.infoSeries[i] + series.Lock() + + oooSample, _, err := series.appendableInfoSample(s.T, series.lset, s.IdentifyingLabels, a.headMaxt, a.minValidTime, a.oooTimeWindow) + switch { + case err == nil: + // Do nothing. + case errors.Is(err, storage.ErrOutOfOrderSample): + // floatsAppended-- + // floatOOORejected++ + case errors.Is(err, storage.ErrOutOfBounds): + // floatsAppended-- + // floatOOBRejected++ + case errors.Is(err, storage.ErrTooOldSample): + // floatsAppended-- + // floatTooOldRejected++ + default: + // floatsAppended-- + } + + var ok, chunkCreated bool + switch { + case err != nil: + // Do nothing here. + case oooSample: + // Sample is OOO and OOO handling is enabled + // and the delta is within the OOO tolerance. + /* TODO + var mmapRef chunks.ChunkDiskMapperRef + ok, chunkCreated, mmapRef = series.insert(s.T, s.V, a.head.chunkDiskMapper, oooCapMax) + if chunkCreated { + r, ok := oooMmapMarkers[series.ref] + if !ok || r != 0 { + // !ok means there are no markers collected for these samples yet. So we first flush the samples + // before setting this m-map marker. + + // r != 0 means we have already m-mapped a chunk for this series in the same Commit(). + // Hence, before we m-map again, we should add the samples and m-map markers + // seen till now to the WBL records. + collectOOORecords() + } + + if oooMmapMarkers == nil { + oooMmapMarkers = make(map[chunks.HeadSeriesRef]chunks.ChunkDiskMapperRef) + } + oooMmapMarkers[series.ref] = mmapRef + } + if ok { + wblSamples = append(wblSamples, s) + if s.T < ooomint { + ooomint = s.T + } + if s.T > ooomaxt { + ooomaxt = s.T + } + floatOOOAccepted++ + } else { + // Sample is an exact duplicate of the last sample. + // NOTE: We can only detect updates if they clash with a sample in the OOOHeadChunk, + // not with samples in already flushed OOO chunks. + // TODO(codesome): Add error reporting? It depends on addressing https://github.com/prometheus/prometheus/discussions/10305. + floatsAppended-- + } + */ + default: + ok, chunkCreated = series.appendInfoSample(s.T, s.IdentifyingLabels, a.appendID, appendChunkOpts) + if ok { + if s.T < inOrderMint { + inOrderMint = s.T + } + if s.T > inOrderMaxt { + inOrderMaxt = s.T + } + /* + } else { + // The sample is an exact duplicate, and should be silently dropped. + floatsAppended-- + */ + } + } + + if chunkCreated { + a.head.metrics.chunks.Inc() + a.head.metrics.chunksCreated.Inc() + } + + series.cleanupAppendIDsBelow(a.cleanupAppendIDsBelow) + series.pendingCommit = false + series.Unlock() + } + a.head.metrics.outOfOrderSamples.WithLabelValues(sampleMetricTypeFloat).Add(float64(floatOOORejected)) a.head.metrics.outOfOrderSamples.WithLabelValues(sampleMetricTypeHistogram).Add(float64(histoOOORejected)) a.head.metrics.outOfBoundSamples.WithLabelValues(sampleMetricTypeFloat).Add(float64(floatOOBRejected)) @@ -1127,6 +1355,7 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, o chunkOpts) (sa c.maxTime = t s.lastValue = v + s.lastIdentifyingLabels = nil s.lastHistogramValue = nil s.lastFloatHistogramValue = nil @@ -1251,6 +1480,36 @@ func (s *memSeries) appendFloatHistogram(t int64, fh *histogram.FloatHistogram, return true, true } +// appendInfoSample adds an info metric sample. +// s should be the corresponding info metric/time series. +func (s *memSeries) appendInfoSample(t int64, identifyingLabels []int, appendID uint64, o chunkOpts) (bool, bool) { + // Every time we receive an OTLP write, a sample is written to target_info for the resource's job/instance combo + // plus the resource's other attributes. The sample has the timestamp of the most recent timestamp among the + // resource's metric samples. + // When writing a target_info sample (per resource) to the head, we also want to write a sample for its + // identifying label set. I.e., given a target_info label set, we also need to persist which are its + // identifying labels starting from a certain timestamp. When the identifying label set changes, + // we have to persist another sample with the new identifying label set. + c, sampleInOrder, chunkCreated := s.appendPreprocessor(t, chunkenc.EncInfoMetric, o) + if !sampleInOrder { + return sampleInOrder, chunkCreated + } + + s.app.AppendInfoSample(t, identifyingLabels) + c.maxTime = t + + s.lastIdentifyingLabels = identifyingLabels + s.lastValue = 0 + s.lastHistogramValue = nil + s.lastFloatHistogramValue = nil + + if appendID > 0 { + s.txs.add(appendID) + } + + return true, chunkCreated +} + // appendPreprocessor takes care of cutting new XOR chunks and m-mapping old ones. XOR chunks are cut based on the // number of samples they contain with a soft cap in bytes. // It is unsafe to call this concurrently with s.iterator(...) without holding the series lock. @@ -1509,8 +1768,8 @@ func (s *memSeries) mmapChunks(chunkDiskMapper chunkDiskMapper) (count int) { return } - // Write chunks starting from the oldest one and stop before we get to current s.headChunk. - // If we have this chain: s.headChunk{t4} -> t3 -> t2 -> t1 -> t0 + // Write chunks starting from the oldest one and stop before we get to current s.headChunks. + // If we have this chain: s.headChunks{t4} -> t3 -> t2 -> t1 -> t0 // then we need to write chunks t0 to t3, but skip s.headChunks. for i := s.headChunks.len() - 1; i > 0; i-- { chk := s.headChunks.atOffset(i) @@ -1561,15 +1820,24 @@ func (a *headAppender) Rollback() (err error) { series.pendingCommit = false series.Unlock() } + for i := range a.infoSamples { + series = a.infoSeries[i] + series.Lock() + series.cleanupAppendIDsBelow(a.cleanupAppendIDsBelow) + series.pendingCommit = false + series.Unlock() + } a.head.putAppendBuffer(a.samples) a.head.putExemplarBuffer(a.exemplars) a.head.putHistogramBuffer(a.histograms) a.head.putFloatHistogramBuffer(a.floatHistograms) a.head.putMetadataBuffer(a.metadata) + a.head.putInfoSampleBuffer(a.infoSamples) a.samples = nil a.exemplars = nil a.histograms = nil a.metadata = nil + a.infoSamples = nil // Series are created in the head memory regardless of rollback. Thus we have // to log them to the WAL in any case. diff --git a/tsdb/head_read_test.go b/tsdb/head_read_test.go index fbf64339e0..f0c63fb866 100644 --- a/tsdb/head_read_test.go +++ b/tsdb/head_read_test.go @@ -45,7 +45,7 @@ func TestBoundedChunk(t *testing.T) { name: "bounds represent a single sample", inputChunk: newTestChunk(10), expSamples: []sample{ - {0, 0, nil, nil}, + {0, 0, nil, nil, nil}, }, }, { @@ -54,14 +54,14 @@ func TestBoundedChunk(t *testing.T) { inputMinT: 1, inputMaxT: 8, expSamples: []sample{ - {1, 1, nil, nil}, - {2, 2, nil, nil}, - {3, 3, nil, nil}, - {4, 4, nil, nil}, - {5, 5, nil, nil}, - {6, 6, nil, nil}, - {7, 7, nil, nil}, - {8, 8, nil, nil}, + {1, 1, nil, nil, nil}, + {2, 2, nil, nil, nil}, + {3, 3, nil, nil, nil}, + {4, 4, nil, nil, nil}, + {5, 5, nil, nil, nil}, + {6, 6, nil, nil, nil}, + {7, 7, nil, nil, nil}, + {8, 8, nil, nil, nil}, }, }, { @@ -70,12 +70,12 @@ func TestBoundedChunk(t *testing.T) { inputMinT: 0, inputMaxT: 5, expSamples: []sample{ - {0, 0, nil, nil}, - {1, 1, nil, nil}, - {2, 2, nil, nil}, - {3, 3, nil, nil}, - {4, 4, nil, nil}, - {5, 5, nil, nil}, + {0, 0, nil, nil, nil}, + {1, 1, nil, nil, nil}, + {2, 2, nil, nil, nil}, + {3, 3, nil, nil, nil}, + {4, 4, nil, nil, nil}, + {5, 5, nil, nil, nil}, }, }, { @@ -84,11 +84,11 @@ func TestBoundedChunk(t *testing.T) { inputMinT: 5, inputMaxT: 9, expSamples: []sample{ - {5, 5, nil, nil}, - {6, 6, nil, nil}, - {7, 7, nil, nil}, - {8, 8, nil, nil}, - {9, 9, nil, nil}, + {5, 5, nil, nil, nil}, + {6, 6, nil, nil, nil}, + {7, 7, nil, nil, nil}, + {8, 8, nil, nil, nil}, + {9, 9, nil, nil, nil}, }, }, { @@ -99,11 +99,11 @@ func TestBoundedChunk(t *testing.T) { initialSeek: 1, seekIsASuccess: true, expSamples: []sample{ - {3, 3, nil, nil}, - {4, 4, nil, nil}, - {5, 5, nil, nil}, - {6, 6, nil, nil}, - {7, 7, nil, nil}, + {3, 3, nil, nil, nil}, + {4, 4, nil, nil, nil}, + {5, 5, nil, nil, nil}, + {6, 6, nil, nil, nil}, + {7, 7, nil, nil, nil}, }, }, { @@ -114,9 +114,9 @@ func TestBoundedChunk(t *testing.T) { initialSeek: 5, seekIsASuccess: true, expSamples: []sample{ - {5, 5, nil, nil}, - {6, 6, nil, nil}, - {7, 7, nil, nil}, + {5, 5, nil, nil, nil}, + {6, 6, nil, nil, nil}, + {7, 7, nil, nil, nil}, }, }, { @@ -141,14 +141,14 @@ func TestBoundedChunk(t *testing.T) { require.Equal(t, tc.seekIsASuccess, val == chunkenc.ValFloat) if val == chunkenc.ValFloat { t, v := it.At() - samples = append(samples, sample{t, v, nil, nil}) + samples = append(samples, sample{t, v, nil, nil, nil}) } } // Testing Next() for it.Next() == chunkenc.ValFloat { t, v := it.At() - samples = append(samples, sample{t, v, nil, nil}) + samples = append(samples, sample{t, v, nil, nil, nil}) } // it.Next() should keep returning no value. diff --git a/tsdb/head_test.go b/tsdb/head_test.go index 93c91a85ca..d2a16969dd 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -695,15 +695,15 @@ func TestHead_ReadWAL(t *testing.T) { c, _, _, err := s10.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{100, 2, nil, nil}, {101, 5, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{100, 2, nil, nil, nil}, {101, 5, nil, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) c, _, _, err = s50.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{101, 6, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{101, 6, nil, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) // The samples before the new series record should be discarded since a duplicate record // is only possible when old samples were compacted. c, _, _, err = s100.chunk(0, head.chunkDiskMapper, &head.memChunkPool) require.NoError(t, err) - require.Equal(t, []sample{{101, 7, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) + require.Equal(t, []sample{{101, 7, nil, nil, nil}}, expandChunk(c.chunk.Iterator(nil))) q, err := head.ExemplarQuerier(context.Background()) require.NoError(t, err) @@ -769,8 +769,8 @@ func TestHead_WALMultiRef(t *testing.T) { // The samples before the new ref should be discarded since Head truncation // happens only after compacting the Head. require.Equal(t, map[string][]chunks.Sample{`{foo="bar"}`: { - sample{1700, 3, nil, nil}, - sample{2000, 4, nil, nil}, + sample{1700, 3, nil, nil, nil}, + sample{2000, 4, nil, nil, nil}, }}, series) } @@ -1331,7 +1331,7 @@ func TestDeleteUntilCurMax(t *testing.T) { it = exps.Iterator(nil) resSamples, err := storage.ExpandSamples(it, newSample) require.NoError(t, err) - require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil}}, resSamples) + require.Equal(t, []chunks.Sample{sample{11, 1, nil, nil, nil}}, resSamples) for res.Next() { } require.NoError(t, res.Err()) @@ -1448,7 +1448,7 @@ func TestDelete_e2e(t *testing.T) { v := rand.Float64() _, err := app.Append(0, ls, ts, v) require.NoError(t, err) - series = append(series, sample{ts, v, nil, nil}) + series = append(series, sample{ts, v, nil, nil, nil}) ts += rand.Int63n(timeInterval) + 1 } seriesMap[labels.New(l...).String()] = series @@ -3323,7 +3323,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) { ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i)) require.NoError(t, err) maxt = ts - expSamples = append(expSamples, sample{ts, float64(i), nil, nil}) + expSamples = append(expSamples, sample{ts, float64(i), nil, nil, nil}) } require.NoError(t, app.Commit()) @@ -3861,17 +3861,17 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(1); ts <= 240; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil, nil}) _, err = app.AppendHistogram(0, lblsHist, ts, hist, nil) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist, nil}) _, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist) require.NoError(t, err) @@ -3936,17 +3936,17 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(241); ts <= 480; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) hist := histograms[int(ts)] - expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil}) + expHist[lblsHistStr] = append(expHist[lblsHistStr], sample{ts, 0, hist, nil, nil}) _, err = app.AppendHistogram(0, lblsHist, ts, hist, nil) require.NoError(t, err) floatHist := floatHistogram[int(ts)] - expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist}) + expFloatHist[lblsFloatHistStr] = append(expFloatHist[lblsFloatHistStr], sample{ts, 0, nil, floatHist, nil}) _, err = app.AppendHistogram(0, lblsFloatHist, ts, nil, floatHist) require.NoError(t, err) diff --git a/tsdb/head_wal.go b/tsdb/head_wal.go index 0f395c89e3..3630ac1a24 100644 --- a/tsdb/head_wal.go +++ b/tsdb/head_wal.go @@ -58,6 +58,7 @@ func (h *Head) loadWAL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[ch var unknownRefs atomic.Uint64 var unknownExemplarRefs atomic.Uint64 var unknownHistogramRefs atomic.Uint64 + var unknownInfoRefs atomic.Uint64 var unknownMetadataRefs atomic.Uint64 // Track number of series records that had overlapping m-map chunks. var mmapOverlappingChunks atomic.Uint64 @@ -70,6 +71,7 @@ func (h *Head) loadWAL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[ch exemplarsInput chan record.RefExemplar shards = make([][]record.RefSample, concurrency) + infoShards = make([][]record.RefInfoSample, concurrency) histogramShards = make([][]histogramRecord, concurrency) decoded = make(chan interface{}, 10) @@ -81,6 +83,7 @@ func (h *Head) loadWAL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[ch exemplarsPool zeropool.Pool[[]record.RefExemplar] histogramsPool zeropool.Pool[[]record.RefHistogramSample] floatHistogramsPool zeropool.Pool[[]record.RefFloatHistogramSample] + infoSamplesPool zeropool.Pool[[]record.RefInfoSample] metadataPool zeropool.Pool[[]record.RefMetadata] ) @@ -101,10 +104,11 @@ func (h *Head) loadWAL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[ch processors[i].setup() go func(wp *walSubsetProcessor) { - unknown, unknownHistograms, overlapping := wp.processWALSamples(h, mmappedChunks, oooMmappedChunks) + unknown, unknownHistograms, unknownInfoSamples, overlapping := wp.processWALSamples(h, mmappedChunks, oooMmappedChunks) unknownRefs.Add(unknown) mmapOverlappingChunks.Add(overlapping) unknownHistogramRefs.Add(unknownHistograms) + unknownInfoRefs.Add(unknownInfoSamples) wg.Done() }(&processors[i]) } @@ -212,6 +216,18 @@ func (h *Head) loadWAL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[ch return } decoded <- hists + case record.InfoSamples: + samples := infoSamplesPool.Get()[:0] + samples, err = dec.InfoSamples(rec, samples) + if err != nil { + decodeErr = &wlog.CorruptionErr{ + Err: fmt.Errorf("decode info samples: %w", err), + Segment: r.Segment(), + Offset: r.Offset(), + } + return + } + decoded <- samples case record.Metadata: meta := metadataPool.Get()[:0] meta, err := dec.Metadata(rec, meta) @@ -380,6 +396,42 @@ Outer: samples = samples[m:] } floatHistogramsPool.Put(v) + case []record.RefInfoSample: + samples := v + minValidTime := h.minValidTime.Load() + // We split up the samples into chunks of 5000 samples or less. + // With O(300 * #cores) in-flight sample batches, large scrapes could otherwise + // cause thousands of very large in flight buffers occupying large amounts + // of unused memory. + for len(samples) > 0 { + m := 5000 + if len(samples) < m { + m = len(samples) + } + for i := 0; i < concurrency; i++ { + if infoShards[i] == nil { + infoShards[i] = processors[i].reuseInfoBuf() + } + } + for _, sam := range samples[:m] { + if sam.T < minValidTime { + continue // Before minValidTime: discard. + } + if r, ok := multiRef[sam.Ref]; ok { + sam.Ref = r + } + mod := uint64(sam.Ref) % uint64(concurrency) + infoShards[mod] = append(infoShards[mod], sam) + } + for i := 0; i < concurrency; i++ { + if len(infoShards[i]) > 0 { + processors[i].input <- walSubsetProcessorInputItem{infoSamples: infoShards[i]} + infoShards[i] = nil + } + } + samples = samples[m:] + } + infoSamplesPool.Put(v) case []record.RefMetadata: for _, m := range v { s := h.series.getByID(m.Ref) @@ -420,12 +472,13 @@ Outer: return fmt.Errorf("read records: %w", err) } - if unknownRefs.Load()+unknownExemplarRefs.Load()+unknownHistogramRefs.Load()+unknownMetadataRefs.Load() > 0 { + if unknownRefs.Load()+unknownExemplarRefs.Load()+unknownHistogramRefs.Load()+unknownInfoRefs.Load()+unknownMetadataRefs.Load() > 0 { level.Warn(h.logger).Log( "msg", "Unknown series references", "samples", unknownRefs.Load(), "exemplars", unknownExemplarRefs.Load(), "histograms", unknownHistogramRefs.Load(), + "info_samples", unknownInfoRefs.Load(), "metadata", unknownMetadataRefs.Load(), ) } @@ -513,11 +566,13 @@ type walSubsetProcessor struct { input chan walSubsetProcessorInputItem output chan []record.RefSample histogramsOutput chan []histogramRecord + infoOutput chan []record.RefInfoSample } type walSubsetProcessorInputItem struct { samples []record.RefSample histogramSamples []histogramRecord + infoSamples []record.RefInfoSample existingSeries *memSeries walSeriesRef chunks.HeadSeriesRef } @@ -526,6 +581,7 @@ func (wp *walSubsetProcessor) setup() { wp.input = make(chan walSubsetProcessorInputItem, 300) wp.output = make(chan []record.RefSample, 300) wp.histogramsOutput = make(chan []histogramRecord, 300) + wp.infoOutput = make(chan []record.RefInfoSample, 300) } func (wp *walSubsetProcessor) closeAndDrain() { @@ -534,6 +590,8 @@ func (wp *walSubsetProcessor) closeAndDrain() { } for range wp.histogramsOutput { } + for range wp.infoOutput { + } } // If there is a buffer in the output chan, return it for reuse, otherwise return nil. @@ -546,7 +604,7 @@ func (wp *walSubsetProcessor) reuseBuf() []record.RefSample { return nil } -// If there is a buffer in the output chan, return it for reuse, otherwise return nil. +// If there is a buffer in the histogramsOutput chan, return it for reuse, otherwise return nil. func (wp *walSubsetProcessor) reuseHistogramBuf() []histogramRecord { select { case buf := <-wp.histogramsOutput: @@ -556,10 +614,20 @@ func (wp *walSubsetProcessor) reuseHistogramBuf() []histogramRecord { return nil } +// If there is a buffer in the infoOutput chan, return it for reuse, otherwise return nil. +func (wp *walSubsetProcessor) reuseInfoBuf() []record.RefInfoSample { + select { + case buf := <-wp.infoOutput: + return buf[:0] + default: + } + return nil +} + // processWALSamples adds the samples it receives to the head and passes // the buffer received to an output channel for reuse. // Samples before the minValidTime timestamp are discarded. -func (wp *walSubsetProcessor) processWALSamples(h *Head, mmappedChunks, oooMmappedChunks map[chunks.HeadSeriesRef][]*mmappedChunk) (unknownRefs, unknownHistogramRefs, mmapOverlappingChunks uint64) { +func (wp *walSubsetProcessor) processWALSamples(h *Head, mmappedChunks, oooMmappedChunks map[chunks.HeadSeriesRef][]*mmappedChunk) (unknownRefs, unknownHistogramRefs, unknownInfoRefs, mmapOverlappingChunks uint64) { defer close(wp.output) defer close(wp.histogramsOutput) @@ -637,14 +705,48 @@ func (wp *walSubsetProcessor) processWALSamples(h *Head, mmappedChunks, oooMmapp } } + for _, s := range in.infoSamples { + if s.T < minValidTime { + continue + } + ms := h.series.getByID(s.Ref) + if ms == nil { + unknownInfoRefs++ + continue + } + if s.T <= ms.mmMaxTime { + continue + } + if _, chunkCreated := ms.appendInfoSample(s.T, s.IdentifyingLabels, 0, appendChunkOpts); chunkCreated { + h.metrics.chunksCreated.Inc() + h.metrics.chunks.Inc() + _ = ms.mmapChunks(h.chunkDiskMapper) + } + if s.T > maxt { + maxt = s.T + } + if s.T < mint { + mint = s.T + } + } + select { + case wp.output <- in.samples: + default: + } + select { case wp.histogramsOutput <- in.histogramSamples: default: } + + select { + case wp.infoOutput <- in.infoSamples: + default: + } } h.updateMinMaxTime(mint, maxt) - return unknownRefs, unknownHistogramRefs, mmapOverlappingChunks + return unknownRefs, unknownHistogramRefs, unknownInfoRefs, mmapOverlappingChunks } func (h *Head) loadWBL(r *wlog.Reader, syms *labels.SymbolTable, multiRef map[chunks.HeadSeriesRef]chunks.HeadSeriesRef, lastMmapRef chunks.ChunkDiskMapperRef) (err error) { @@ -927,6 +1029,7 @@ type chunkSnapshotRecord struct { lastValue float64 lastHistogramValue *histogram.Histogram lastFloatHistogramValue *histogram.FloatHistogram + lastIdentifyingLabels []int } func (s *memSeries) encodeToSnapshotRecord(b []byte) []byte { @@ -959,8 +1062,12 @@ func (s *memSeries) encodeToSnapshotRecord(b []byte) []byte { buf.PutBEFloat64(s.lastValue) case chunkenc.EncHistogram: record.EncodeHistogram(&buf, s.lastHistogramValue) - default: // chunkenc.FloatHistogram. + case chunkenc.EncFloatHistogram: record.EncodeFloatHistogram(&buf, s.lastFloatHistogramValue) + case chunkenc.EncInfoMetric: + record.EncodeInfoSample(&buf, s.lastIdentifyingLabels) + default: + panic(fmt.Sprintf("unrecognized encoding: %s", enc.String())) } } s.Unlock() @@ -1013,9 +1120,13 @@ func decodeSeriesFromChunkSnapshot(d *record.Decoder, b []byte) (csr chunkSnapsh case chunkenc.EncHistogram: csr.lastHistogramValue = &histogram.Histogram{} record.DecodeHistogram(&dec, csr.lastHistogramValue) - default: // chunkenc.FloatHistogram. + case chunkenc.EncFloatHistogram: csr.lastFloatHistogramValue = &histogram.FloatHistogram{} record.DecodeFloatHistogram(&dec, csr.lastFloatHistogramValue) + case chunkenc.EncInfoMetric: + csr.lastIdentifyingLabels = record.DecodeInfoSample(&dec) + default: + panic(fmt.Sprintf("unrecognized encoding: %s", enc.String())) } err = dec.Err() @@ -1403,6 +1514,7 @@ func (h *Head) loadChunkSnapshot() (int, int, map[chunks.HeadSeriesRef]*memSerie series.lastValue = csr.lastValue series.lastHistogramValue = csr.lastHistogramValue series.lastFloatHistogramValue = csr.lastFloatHistogramValue + series.lastIdentifyingLabels = csr.lastIdentifyingLabels app, err := series.headChunks.chunk.Appender() if err != nil { @@ -1499,7 +1611,7 @@ Outer: } default: - // This is a record type we don't understand. It is either and old format from earlier versions, + // This is a record type we don't understand. It is either an old format from earlier versions, // or a new format and the code was rolled back to old version. loopErr = fmt.Errorf("unsupported snapshot record type 0b%b", rec[0]) break Outer diff --git a/tsdb/index/index.go b/tsdb/index/index.go index 263e1e2ac1..ba5fc356b5 100644 --- a/tsdb/index/index.go +++ b/tsdb/index/index.go @@ -159,7 +159,7 @@ type Writer struct { postingsEncoder PostingsEncoder } -// TOC represents index Table Of Content that states where each section of index starts. +// TOC represents the index Table Of Contents that states where each section of the index starts. type TOC struct { Symbols uint64 Series uint64 @@ -169,7 +169,7 @@ type TOC struct { PostingsTable uint64 } -// NewTOCFromByteSlice return parsed TOC from given index byte slice. +// NewTOCFromByteSlice returns parsed TOC from given index byte slice. func NewTOCFromByteSlice(bs ByteSlice) (*TOC, error) { if bs.Len() < indexTOCLen { return nil, encoding.ErrInvalidSize diff --git a/tsdb/ooo_head.go b/tsdb/ooo_head.go index 7f2110fa65..e396fa1f99 100644 --- a/tsdb/ooo_head.go +++ b/tsdb/ooo_head.go @@ -44,7 +44,7 @@ func (o *OOOChunk) Insert(t int64, v float64) bool { // try to append at the end first if the new timestamp is higher than the // last known timestamp. if len(o.samples) == 0 || t > o.samples[len(o.samples)-1].t { - o.samples = append(o.samples, sample{t, v, nil, nil}) + o.samples = append(o.samples, sample{t, v, nil, nil, nil}) return true } @@ -53,7 +53,7 @@ func (o *OOOChunk) Insert(t int64, v float64) bool { if i >= len(o.samples) { // none found. append it at the end - o.samples = append(o.samples, sample{t, v, nil, nil}) + o.samples = append(o.samples, sample{t, v, nil, nil, nil}) return true } @@ -65,7 +65,7 @@ func (o *OOOChunk) Insert(t int64, v float64) bool { // Expand length by 1 to make room. use a zero sample, we will overwrite it anyway. o.samples = append(o.samples, sample{}) copy(o.samples[i+1:], o.samples[i:]) - o.samples[i] = sample{t, v, nil, nil} + o.samples[i] = sample{t, v, nil, nil, nil} return true } diff --git a/tsdb/ooo_head_test.go b/tsdb/ooo_head_test.go index 27ff4048b7..251677bd47 100644 --- a/tsdb/ooo_head_test.go +++ b/tsdb/ooo_head_test.go @@ -25,7 +25,7 @@ const testMaxSize int = 32 func valEven(pos int) int { return pos*2 + 2 } // s[0]=2, s[1]=4, s[2]=6, ..., s[31]=64 - Predictable pre-existing values func valOdd(pos int) int { return pos*2 + 1 } // s[0]=1, s[1]=3, s[2]=5, ..., s[31]=63 - New values will interject at chosen position because they sort before the pre-existing vals. -func samplify(v int) sample { return sample{int64(v), float64(v), nil, nil} } +func samplify(v int) sample { return sample{int64(v), float64(v), nil, nil, nil} } func makeEvenSampleSlice(n int) []sample { s := make([]sample, n) diff --git a/tsdb/querier.go b/tsdb/querier.go index cf461d6cc0..531249f18d 100644 --- a/tsdb/querier.go +++ b/tsdb/querier.go @@ -886,6 +886,10 @@ func (p *populateWithDelSeriesIterator) AtFloatHistogram(fh *histogram.FloatHist return p.curr.AtFloatHistogram(fh) } +func (p *populateWithDelSeriesIterator) AtInfoSample() (int64, []int) { + return p.curr.AtInfoSample() +} + func (p *populateWithDelSeriesIterator) AtT() int64 { return p.curr.AtT() } @@ -1032,6 +1036,20 @@ func (p *populateWithDelChunkSeriesIterator) populateCurrForSingleChunk() bool { break } } + case chunkenc.ValInfoSample: + newChunk = chunkenc.NewInfoSampleChunk() + if app, err = newChunk.Appender(); err != nil { + break + } + for vt := valueType; vt != chunkenc.ValNone; vt = p.currDelIter.Next() { + if vt != chunkenc.ValInfoSample { + err = fmt.Errorf("found value type %v in info metric chunk", vt) + break + } + var ils []int + t, ils = p.currDelIter.AtInfoSample() + app.AppendInfoSample(t, ils) + } default: err = fmt.Errorf("populateCurrForSingleChunk: value type %v unsupported", valueType) } @@ -1088,7 +1106,7 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool { // Check if the encoding has changed (i.e. we need to create a new // chunk as chunks can't have multiple encoding types). // For the first sample, the following condition will always be true as - // ValNoneNone != ValFloat | ValHistogram | ValFloatHistogram. + // ValNoneNone != ValFloat | ValHistogram | ValFloatHistogram | ValInfoSample. if currentValueType != prevValueType { if prevValueType != chunkenc.ValNone { p.chunksFromIterable = append(p.chunksFromIterable, chunks.Meta{Chunk: currentChunk, MinTime: cmint, MaxTime: cmaxt}) @@ -1125,6 +1143,14 @@ func (p *populateWithDelChunkSeriesIterator) populateChunksFromIterable() bool { // counter reset header for the appender that's returned. newChunk, recoded, app, err = app.AppendFloatHistogram(nil, t, v, false) } + case chunkenc.ValInfoSample: + { + var ils []int + t, ils = p.currDelIter.AtInfoSample() + app.AppendInfoSample(t, ils) + } + default: + err = fmt.Errorf("unrecognized chunk encoding %s", currentValueType) } if err != nil { @@ -1301,6 +1327,10 @@ func (it *DeletedIterator) AtFloatHistogram(fh *histogram.FloatHistogram) (int64 return t, h } +func (it *DeletedIterator) AtInfoSample() (int64, []int) { + return it.Iter.AtInfoSample() +} + func (it *DeletedIterator) AtT() int64 { return it.Iter.AtT() } diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 08a053d89b..8b024d5dc5 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -315,24 +315,24 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}, sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), }, @@ -342,18 +342,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), }, @@ -366,20 +366,20 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), }, @@ -392,18 +392,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), }, @@ -417,24 +417,24 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}, sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), }, @@ -491,24 +491,24 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}, sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}, sample{5, 1, nil, nil, nil}, sample{6, 7, nil, nil, nil}, sample{7, 2, nil, nil, nil}}, ), }), }, @@ -518,18 +518,18 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{2, 3, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), }, @@ -574,18 +574,18 @@ func TestBlockQuerier_TrimmingDoesNotModifyOriginalTombstoneIntervals(t *testing ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{3, 4, nil, nil, nil}, sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{3, 3, nil, nil, nil}, sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{3, 4, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{3, 4, nil, nil, nil}}, []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{3, 3, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, + []chunks.Sample{sample{3, 3, nil, nil, nil}}, []chunks.Sample{sample{5, 3, nil, nil, nil}, sample{6, 6, nil, nil, nil}}, ), }), } @@ -611,22 +611,22 @@ var testData = []seriesSamples{ { lset: map[string]string{"a": "a"}, chunks: [][]sample{ - {{1, 2, nil, nil}, {2, 3, nil, nil}, {3, 4, nil, nil}}, - {{5, 2, nil, nil}, {6, 3, nil, nil}, {7, 4, nil, nil}}, + {{1, 2, nil, nil, nil}, {2, 3, nil, nil, nil}, {3, 4, nil, nil, nil}}, + {{5, 2, nil, nil, nil}, {6, 3, nil, nil, nil}, {7, 4, nil, nil, nil}}, }, }, { lset: map[string]string{"a": "a", "b": "b"}, chunks: [][]sample{ - {{1, 1, nil, nil}, {2, 2, nil, nil}, {3, 3, nil, nil}}, - {{5, 3, nil, nil}, {6, 6, nil, nil}}, + {{1, 1, nil, nil, nil}, {2, 2, nil, nil, nil}, {3, 3, nil, nil, nil}}, + {{5, 3, nil, nil, nil}, {6, 6, nil, nil, nil}}, }, }, { lset: map[string]string{"b": "b"}, chunks: [][]sample{ - {{1, 3, nil, nil}, {2, 2, nil, nil}, {3, 6, nil, nil}}, - {{5, 1, nil, nil}, {6, 7, nil, nil}, {7, 2, nil, nil}}, + {{1, 3, nil, nil, nil}, {2, 2, nil, nil, nil}, {3, 6, nil, nil, nil}}, + {{5, 1, nil, nil, nil}, {6, 7, nil, nil, nil}, {7, 2, nil, nil, nil}}, }, }, } @@ -673,24 +673,24 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}, sample{5, 1, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}, sample{7, 4, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("b", "b"), - []chunks.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil}}, + []chunks.Sample{sample{1, 3, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 6, nil, nil, nil}}, []chunks.Sample{sample{5, 1, nil, nil, nil}}, ), }), }, @@ -700,18 +700,18 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListSeries(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a"), - []chunks.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, + []chunks.Sample{sample{5, 2, nil, nil, nil}, sample{6, 3, nil, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.FromStrings("a", "a", "b", "b"), - []chunks.Sample{sample{5, 3, nil, nil}}, + []chunks.Sample{sample{5, 3, nil, nil, nil}}, ), }), }, @@ -823,6 +823,10 @@ func (it *mockSampleIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64 return it.s[it.idx].T(), it.s[it.idx].FH() } +func (it *mockSampleIterator) AtInfoSample() (int64, []int) { + return it.s[it.idx].T(), it.s[it.idx].IdentifyingLabels() +} + func (it *mockSampleIterator) AtT() int64 { return it.s[it.idx].T() } @@ -908,15 +912,15 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "one chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -924,19 +928,19 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two full chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}}, @@ -944,23 +948,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "three full chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, - {sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, + {sample{10, 22, nil, nil, nil}, sample{203, 3493, nil, nil, nil}}, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, sample{10, 22, nil, nil, nil}, sample{203, 3493, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, + sample{10, 22, nil, nil, nil}, sample{203, 3493, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}, {10, 203}}, @@ -976,8 +980,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek beyond chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, seek: 10, @@ -986,27 +990,27 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek on middle of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, seek: 2, seekSuccess: true, expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, }, { name: "two chunks and seek before first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, seek: -32, seekSuccess: true, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, }, // Deletion / Trim cases. @@ -1018,20 +1022,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, + sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, + sample{7, 89, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}, {7, 7}}, @@ -1039,20 +1043,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed middle sample of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}}, @@ -1060,20 +1064,20 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with deletion across two chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{9, 8, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{9, 8, nil, nil}, + sample{9, 8, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}, {9, 9}}, @@ -1081,17 +1085,17 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with first chunk deleted", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 6}}, expected: []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, + sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 9}}, @@ -1100,22 +1104,22 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), seek: 3, seekSuccess: true, expected: []chunks.Sample{ - sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, + sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 89, nil, nil, nil}, }, }, { name: "one chunk where all samples are trimmed", samples: [][]chunks.Sample{ - {sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, - {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, + {sample{7, 89, nil, nil, nil}, sample{9, 8, nil, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 3}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}), @@ -1126,24 +1130,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil, nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, + sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil, nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, + sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1152,21 +1156,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, + sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, - sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, + sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1175,23 +1179,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil, nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, - sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil}, - sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil}, + sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil, nil}, + sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1200,24 +1204,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2)), nil}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, + sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2)), nil}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, + sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1226,21 +1230,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, + sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, - sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, + sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1249,23 +1253,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one float histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2)), nil}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))}, - sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))}, + sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2)), nil}, + sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1274,24 +1278,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1300,21 +1304,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1323,23 +1327,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, - sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, + sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil, nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil}, - sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil}, - sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil}, + sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil, nil}, + sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil, nil}, + sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1348,24 +1352,24 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}}, @@ -1374,21 +1378,21 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram chunk intersect with earlier deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 1, Maxt: 2}}, expected: []chunks.Sample{ - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{3, 6}}, @@ -1397,23 +1401,23 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "one gauge float histogram chunk intersect with later deletion interval", samples: [][]chunks.Sample{ { - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, - sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, + sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6), nil}, }, }, intervals: tombstones.Intervals{{Mint: 5, Maxt: 20}}, expected: []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)}, - sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)}, - sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)}, + sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1), nil}, + sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2), nil}, + sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 3}}, @@ -1421,31 +1425,31 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "three full mixed chunks", samples: [][]chunks.Sample{ - {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}}, { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }, { - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, }, expected: []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, + sample{1, 2, nil, nil, nil}, sample{2, 3, nil, nil, nil}, sample{3, 5, nil, nil, nil}, sample{6, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{1, 6}, {7, 9}, {10, 203}}, @@ -1454,30 +1458,30 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks in different order", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}}, { - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}, sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, + sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 9}, {11, 16}, {100, 203}}, @@ -1486,29 +1490,29 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks in different order intersect with deletion interval", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{9, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}}, { - sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{100, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, }, intervals: tombstones.Intervals{{Mint: 8, Maxt: 11}, {Mint: 15, Maxt: 150}}, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, + sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 7}, {12, 13}, {203, 203}}, @@ -1517,30 +1521,30 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "three full mixed chunks overlapping", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }, - {sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}}, + {sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}}, { - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}, sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil}, - sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestGaugeHistogram(89), nil, nil}, + sample{12, 0, tsdbutil.GenerateTestGaugeHistogram(8), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{11, 2, nil, nil}, sample{12, 3, nil, nil}, sample{13, 5, nil, nil}, sample{16, 1, nil, nil}, + sample{11, 2, nil, nil, nil}, sample{12, 3, nil, nil, nil}, sample{13, 5, nil, nil, nil}, sample{16, 1, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22)}, - sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493)}, + sample{10, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(22), nil}, + sample{203, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3493), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{{7, 12}, {11, 16}, {10, 203}}, @@ -1551,56 +1555,56 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "int histogram iterables with counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil, nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, + sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil}, + sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, }, { - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, + sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil}, - sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil, nil}, + sample{12, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, + sample{15, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{16, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, + sample{17, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{19, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, + sample{20, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}, + sample{21, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(9)), nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(9)), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, - sample{15, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, - sample{16, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{12, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil, nil}, + sample{15, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, + sample{16, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{17, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, + sample{17, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{19, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(7)), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{20, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil}, - sample{21, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil}, + sample{20, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(5)), nil, nil}, + sample{21, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1622,56 +1626,56 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "float histogram iterables with counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, + sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}, + sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, + sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, + sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, + sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, }, { - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, + sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, + sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7), nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, + sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9)}, - sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, - sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7)}, - sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}, - sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, + sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}, + sample{8, 0, nil, tsdbutil.GenerateTestFloatHistogram(9), nil}, + sample{12, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, + sample{15, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, + sample{16, 0, nil, tsdbutil.GenerateTestFloatHistogram(7), nil}, + sample{17, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, + sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, + sample{19, 0, nil, tsdbutil.GenerateTestFloatHistogram(7), nil}, + sample{20, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}, + sample{21, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}, - sample{8, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(9))}, + sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}, + sample{8, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(9)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, - sample{15, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, - sample{16, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, + sample{12, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5)), nil}, + sample{15, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, + sample{16, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{17, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, + sample{17, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, - sample{19, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7))}, + sample{18, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, + sample{19, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(7)), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{20, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5))}, - sample{21, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))}, + sample{20, 0, nil, tsdbutil.SetFloatHistogramCounterReset(tsdbutil.GenerateTestFloatHistogram(5)), nil}, + sample{21, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6)), nil}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1693,61 +1697,61 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { name: "iterables with mixed encodings and counter resets", samples: [][]chunks.Sample{ { - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil, nil}, + sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10), nil}, + sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11), nil}, + sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12), nil}, + sample{12, 13, nil, nil, nil}, + sample{13, 14, nil, nil, nil}, + sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, // Counter reset should be detected when chunks are created from the iterable. - sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil}, + sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, }, { - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 45, nil, nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{19, 45, nil, nil, nil}, }, }, expected: []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil}, - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, - sample{19, 45, nil, nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil, nil}, + sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10), nil}, + sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11), nil}, + sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12), nil}, + sample{12, 13, nil, nil, nil}, + sample{13, 14, nil, nil, nil}, + sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{15, 0, tsdbutil.GenerateTestHistogram(7), nil, nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, + sample{19, 45, nil, nil, nil}, }, expectedChks: []chunks.Meta{ assureChunkFromSamples(t, []chunks.Sample{ - sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}, - sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil}, + sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, + sample{8, 0, tsdbutil.GenerateTestHistogram(9), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10)}, - sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11)}, - sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12)}, + sample{9, 0, nil, tsdbutil.GenerateTestFloatHistogram(10), nil}, + sample{10, 0, nil, tsdbutil.GenerateTestFloatHistogram(11), nil}, + sample{11, 0, nil, tsdbutil.GenerateTestFloatHistogram(12), nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{12, 13, nil, nil}, - sample{13, 14, nil, nil}, + sample{12, 13, nil, nil, nil}, + sample{13, 14, nil, nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil}, + sample{14, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{15, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(7)), nil}, + sample{15, 0, tsdbutil.SetHistogramCounterReset(tsdbutil.GenerateTestHistogram(7)), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil}, + sample{18, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, }), assureChunkFromSamples(t, []chunks.Sample{ - sample{19, 45, nil, nil}, + sample{19, 45, nil, nil, nil}, }), }, expectedMinMaxTimes: []minMaxTimes{ @@ -1888,8 +1892,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ {}, - {sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, - {sample{4, 4, nil, nil}, sample{5, 5, nil, nil}}, + {sample{1, 1, nil, nil, nil}, sample{2, 2, nil, nil, nil}, sample{3, 3, nil, nil, nil}}, + {sample{4, 4, nil, nil, nil}, sample{5, 5, nil, nil, nil}}, }, }, { @@ -1897,8 +1901,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil}, sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil}}, - {sample{4, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(5), nil}}, + {sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil, nil}, sample{2, 0, tsdbutil.GenerateTestHistogram(2), nil, nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(3), nil, nil}}, + {sample{4, 0, tsdbutil.GenerateTestHistogram(4), nil, nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(5), nil, nil}}, }, }, { @@ -1906,8 +1910,8 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)}, sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3)}}, - {sample{4, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(5)}}, + {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1), nil}, sample{2, 0, nil, tsdbutil.GenerateTestFloatHistogram(2), nil}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(3), nil}}, + {sample{4, 0, nil, tsdbutil.GenerateTestFloatHistogram(4), nil}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(5), nil}}, }, }, } @@ -1941,7 +1945,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ {}, - {sample{1, 2, nil, nil}, sample{3, 4, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, + {sample{1, 2, nil, nil, nil}, sample{3, 4, nil, nil, nil}, sample{5, 6, nil, nil, nil}, sample{7, 8, nil, nil, nil}}, {}, }, }, @@ -1950,7 +1954,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, tsdbutil.GenerateTestHistogram(2), nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(4), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{1, 0, tsdbutil.GenerateTestHistogram(2), nil, nil}, sample{3, 0, tsdbutil.GenerateTestHistogram(4), nil, nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}}, {}, }, }, @@ -1959,7 +1963,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ {}, - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(2)}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(4)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(2), nil}, sample{3, 0, nil, tsdbutil.GenerateTestFloatHistogram(4), nil}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}}, {}, }, }, @@ -1991,21 +1995,21 @@ func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { name: "float", valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ - {sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{6, 8, nil, nil}}, + {sample{1, 6, nil, nil, nil}, sample{5, 6, nil, nil, nil}, sample{6, 8, nil, nil, nil}}, }, }, { name: "histogram", valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{6, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, sample{6, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}}, }, }, { name: "float histogram", valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, sample{6, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}}, }, }, } @@ -2034,21 +2038,21 @@ func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { name: "float", valType: chunkenc.ValFloat, chks: [][]chunks.Sample{ - {sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, + {sample{1, 6, nil, nil, nil}, sample{5, 6, nil, nil, nil}, sample{7, 8, nil, nil, nil}}, }, }, { name: "histogram", valType: chunkenc.ValHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil}}, + {sample{1, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, sample{5, 0, tsdbutil.GenerateTestHistogram(6), nil, nil}, sample{7, 0, tsdbutil.GenerateTestHistogram(8), nil, nil}}, }, }, { name: "float histogram", valType: chunkenc.ValFloatHistogram, chks: [][]chunks.Sample{ - {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6)}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8)}}, + {sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, sample{5, 0, nil, tsdbutil.GenerateTestFloatHistogram(6), nil}, sample{7, 0, nil, tsdbutil.GenerateTestFloatHistogram(8), nil}}, }, }, } diff --git a/tsdb/record/record.go b/tsdb/record/record.go index 8a8409e55f..6ffbb87b08 100644 --- a/tsdb/record/record.go +++ b/tsdb/record/record.go @@ -52,6 +52,8 @@ const ( HistogramSamples Type = 7 // FloatHistogramSamples is used to match WAL records of type Float Histograms. FloatHistogramSamples Type = 8 + // InfoSamples is used to match WAL records of type Info Samples. + InfoSamples Type = 9 ) func (rt Type) String() string { @@ -68,6 +70,8 @@ func (rt Type) String() string { return "histogram_samples" case FloatHistogramSamples: return "float_histogram_samples" + case InfoSamples: + return "info_metric_samples" case MmapMarkers: return "mmapmarkers" case Metadata: @@ -163,7 +167,7 @@ type RefMetadata struct { Help string } -// RefExemplar is an exemplar with it's labels, timestamp, value the exemplar was collected/observed with, and a reference to a series. +// RefExemplar is an exemplar with the labels, timestamp, value the exemplar was collected/observed with, and a reference to a series. type RefExemplar struct { Ref chunks.HeadSeriesRef T int64 @@ -185,6 +189,13 @@ type RefFloatHistogramSample struct { FH *histogram.FloatHistogram } +// RefInfoSample is an info metric sample. +type RefInfoSample struct { + Ref chunks.HeadSeriesRef + T int64 + IdentifyingLabels []int +} + // RefMmapMarker marks that the all the samples of the given series until now have been m-mapped to disk. type RefMmapMarker struct { Ref chunks.HeadSeriesRef @@ -207,7 +218,7 @@ func (d *Decoder) Type(rec []byte) Type { return Unknown } switch t := Type(rec[0]); t { - case Series, Samples, Tombstones, Exemplars, MmapMarkers, Metadata, HistogramSamples, FloatHistogramSamples: + case Series, Samples, Tombstones, Exemplars, MmapMarkers, Metadata, HistogramSamples, FloatHistogramSamples, InfoSamples: return t } return Unknown @@ -543,7 +554,49 @@ func (d *Decoder) FloatHistogramSamples(rec []byte, histograms []RefFloatHistogr return histograms, nil } -// Decode decodes a Histogram from a byte slice. +// InfoSamples appends info samples in rec to the given slice. +func (d *Decoder) InfoSamples(rec []byte, infoSamples []RefInfoSample) ([]RefInfoSample, error) { + dec := encoding.Decbuf{B: rec} + + if Type(dec.Byte()) != InfoSamples { + return nil, errors.New("invalid record type") + } + if dec.Len() == 0 { + return infoSamples, nil + } + baseRef := dec.Be64() + baseTime := dec.Be64int64() + // Allow 1 byte for each varint and 8 for the length; the output slice must be at least that big. + if minSize := dec.Len() / (1 + 1 + 8); cap(infoSamples) < minSize { + infoSamples = make([]RefInfoSample, 0, minSize) + } + for len(dec.B) > 0 && dec.Err() == nil { + dref := dec.Varint64() + dtime := dec.Varint64() + l := dec.Uvarint() + identifyingLabels := make([]int, 0, l) + for i := 0; i < l; i++ { + idx := int(dec.Uvarint32()) + identifyingLabels = append(identifyingLabels, idx) + } + + infoSamples = append(infoSamples, RefInfoSample{ + Ref: chunks.HeadSeriesRef(int64(baseRef) + dref), + T: baseTime + dtime, + IdentifyingLabels: identifyingLabels, + }) + } + + if dec.Err() != nil { + return nil, fmt.Errorf("decode error after %d info samples: %w", len(infoSamples), dec.Err()) + } + if len(dec.B) > 0 { + return nil, fmt.Errorf("unexpected %d bytes left in entry", len(dec.B)) + } + return infoSamples, nil +} + +// DecodeFloatHistogram decodes a FloatHistogram from a byte slice. func DecodeFloatHistogram(buf *encoding.Decbuf, fh *histogram.FloatHistogram) { fh.CounterResetHint = histogram.CounterResetHint(buf.Byte()) @@ -589,6 +642,18 @@ func DecodeFloatHistogram(buf *encoding.Decbuf, fh *histogram.FloatHistogram) { } } +// DecodeInfoSample decodes an info sample from a byte slice. +func DecodeInfoSample(buf *encoding.Decbuf) []int { + l := buf.Uvarint() + identifyingLabels := make([]int, 0, l) + for i := 0; i < l; i++ { + idx := buf.Uvarint() + identifyingLabels = append(identifyingLabels, idx) + } + + return identifyingLabels +} + // Encoder encodes series, sample, and tombstones records. // The zero value is ready to use. type Encoder struct{} @@ -704,6 +769,32 @@ func (e *Encoder) EncodeExemplarsIntoBuffer(exemplars []RefExemplar, buf *encodi } } +func (e *Encoder) InfoSamples(infoSamples []RefInfoSample, b []byte) []byte { + buf := encoding.Encbuf{B: b} + buf.PutByte(byte(InfoSamples)) + + if len(infoSamples) == 0 { + return buf.Get() + } + + // Store base timestamp and base reference number of first sample. + // All samples encode their timestamp and ref as delta to those. + first := infoSamples[0] + buf.PutBE64(uint64(first.Ref)) + buf.PutBE64int64(first.T) + + for _, s := range infoSamples { + buf.PutVarint64(int64(s.Ref) - int64(first.Ref)) + buf.PutVarint64(s.T - first.T) + buf.PutUvarint(len(s.IdentifyingLabels)) + for _, idx := range s.IdentifyingLabels { + buf.PutUvarint32(uint32(idx)) + } + } + + return buf.Get() +} + func (e *Encoder) MmapMarkers(markers []RefMmapMarker, b []byte) []byte { buf := encoding.Encbuf{B: b} buf.PutByte(byte(MmapMarkers)) @@ -798,7 +889,7 @@ func (e *Encoder) FloatHistogramSamples(histograms []RefFloatHistogramSample, b return buf.Get() } -// Encode encodes the Float Histogram into a byte slice. +// EncodeFloatHistogram encodes the Float Histogram into a byte slice. func EncodeFloatHistogram(buf *encoding.Encbuf, h *histogram.FloatHistogram) { buf.PutByte(byte(h.CounterResetHint)) @@ -831,3 +922,11 @@ func EncodeFloatHistogram(buf *encoding.Encbuf, h *histogram.FloatHistogram) { buf.PutBEFloat64(b) } } + +// EncodeInfoSample encodes the info sample into a byte slice. +func EncodeInfoSample(buf *encoding.Encbuf, identifyingLabels []int) { + buf.PutUvarint(len(identifyingLabels)) + for _, idx := range identifyingLabels { + buf.PutUvarint(idx) + } +} diff --git a/tsdb/tsdbblockutil.go b/tsdb/tsdbblockutil.go index f7b27c2e08..4d21dfc03a 100644 --- a/tsdb/tsdbblockutil.go +++ b/tsdb/tsdbblockutil.go @@ -78,6 +78,9 @@ func CreateBlock(series []storage.Series, dir string, chunkRange int64, logger l case chunkenc.ValFloatHistogram: t, fh := it.AtFloatHistogram(nil) ref, err = app.AppendHistogram(ref, lset, t, nil, fh) + case chunkenc.ValInfoSample: + t, identifyingLabels := it.AtInfoSample() + ref, err = app.AppendInfoSample(ref, lset, t, identifyingLabels) default: return "", fmt.Errorf("unknown sample type %s", typ.String()) } diff --git a/tsdb/wlog/checkpoint.go b/tsdb/wlog/checkpoint.go index 4ad1bb2365..d83699cf5c 100644 --- a/tsdb/wlog/checkpoint.go +++ b/tsdb/wlog/checkpoint.go @@ -152,6 +152,7 @@ func Checkpoint(logger log.Logger, w *WL, from, to int, keep func(id chunks.Head series []record.RefSeries samples []record.RefSample histogramSamples []record.RefHistogramSample + infoSamples []record.RefInfoSample tstones []tombstones.Stone exemplars []record.RefExemplar metadata []record.RefMetadata @@ -164,7 +165,7 @@ func Checkpoint(logger log.Logger, w *WL, from, to int, keep func(id chunks.Head latestMetadataMap = make(map[chunks.HeadSeriesRef]record.RefMetadata) ) for r.Next() { - series, samples, histogramSamples, tstones, exemplars, metadata = series[:0], samples[:0], histogramSamples[:0], tstones[:0], exemplars[:0], metadata[:0] + series, samples, histogramSamples, infoSamples, tstones, exemplars, metadata = series[:0], samples[:0], histogramSamples[:0], infoSamples[:0], tstones[:0], exemplars[:0], metadata[:0] // We don't reset the buffer since we batch up multiple records // before writing them to the checkpoint. @@ -227,6 +228,26 @@ func Checkpoint(logger log.Logger, w *WL, from, to int, keep func(id chunks.Head stats.TotalSamples += len(samples) stats.DroppedSamples += len(samples) - len(repl) + // XXX: Should we try also record.FloatHistogramSamples? + + case record.InfoSamples: + infoSamples, err = dec.InfoSamples(rec, infoSamples) + if err != nil { + return nil, fmt.Errorf("decode info samples: %w", err) + } + // Drop irrelevant infoSamples in place. + repl := infoSamples[:0] + for _, s := range infoSamples { + if s.T >= mint { + repl = append(repl, s) + } + } + if len(repl) > 0 { + buf = enc.InfoSamples(repl, buf) + } + stats.TotalSamples += len(infoSamples) + stats.DroppedSamples += len(infoSamples) - len(repl) + case record.Tombstones: tstones, err = dec.Tombstones(rec, tstones) if err != nil { diff --git a/tsdb/wlog/watcher.go b/tsdb/wlog/watcher.go index b924944689..02cdb3e285 100644 --- a/tsdb/wlog/watcher.go +++ b/tsdb/wlog/watcher.go @@ -56,6 +56,7 @@ type WriteTo interface { AppendExemplars([]record.RefExemplar) bool AppendHistograms([]record.RefHistogramSample) bool AppendFloatHistograms([]record.RefFloatHistogramSample) bool + AppendInfoSamples([]record.RefInfoSample) bool StoreSeries([]record.RefSeries, int) // Next two methods are intended for garbage-collection: first we call @@ -542,6 +543,8 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error { histogramsToSend []record.RefHistogramSample floatHistograms []record.RefFloatHistogramSample floatHistogramsToSend []record.RefFloatHistogramSample + infoSamples []record.RefInfoSample + infoSamplesToSend []record.RefInfoSample ) for r.Next() && !isClosed(w.quit) { rec := r.Record() @@ -653,6 +656,37 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error { w.writer.AppendFloatHistograms(floatHistogramsToSend) floatHistogramsToSend = floatHistogramsToSend[:0] } + case record.InfoSamples: + /* + // Skip if experimental "info samples over remote write" is not enabled. + if !w.sendInfoSamples { + break + } + */ + // If we're not tailing a segment we can ignore any info samples records we see. + // This speeds up replay of the WAL by > 10x. + if !tail { + break + } + samples, err := dec.InfoSamples(rec, infoSamples[:0]) + if err != nil { + w.recordDecodeFailsMetric.Inc() + return err + } + for _, s := range samples { + if s.T > w.startTimestamp { + if !w.sendSamples { + w.sendSamples = true + duration := time.Since(w.startTime) + level.Info(w.logger).Log("msg", "Done replaying WAL", "duration", duration) + } + infoSamplesToSend = append(infoSamplesToSend, s) + } + } + if len(infoSamplesToSend) > 0 { + w.writer.AppendInfoSamples(infoSamplesToSend) + infoSamplesToSend = infoSamplesToSend[:0] + } case record.Tombstones: default: @@ -689,6 +723,8 @@ func (w *Watcher) readSegmentForGC(r *LiveReader, segmentNum int, _ bool) error // Ignore these; we're only interested in series. case record.Samples: case record.Exemplars: + case record.InfoSamples: + // XXX: Check also (float) histograms? case record.Tombstones: default: diff --git a/tsdb/wlog/watcher_test.go b/tsdb/wlog/watcher_test.go index ff006cb817..0e770861a5 100644 --- a/tsdb/wlog/watcher_test.go +++ b/tsdb/wlog/watcher_test.go @@ -56,6 +56,7 @@ type writeToMock struct { exemplarsAppended int histogramsAppended int floatHistogramsAppended int + infoSamplesAppended int seriesLock sync.Mutex seriesSegmentIndexes map[chunks.HeadSeriesRef]int @@ -87,6 +88,12 @@ func (wtm *writeToMock) AppendFloatHistograms(fh []record.RefFloatHistogramSampl return true } +func (wtm *writeToMock) AppendInfoSamples(s []record.RefInfoSample) bool { + time.Sleep(wtm.delay) + wtm.infoSamplesAppended += len(s) + return true +} + func (wtm *writeToMock) StoreSeries(series []record.RefSeries, index int) { time.Sleep(wtm.delay) wtm.UpdateSeriesSegment(series, index) diff --git a/web/api/v1/json_codec.go b/web/api/v1/json_codec.go index f1a8104cc4..84ac26a579 100644 --- a/web/api/v1/json_codec.go +++ b/web/api/v1/json_codec.go @@ -72,6 +72,7 @@ func marshalSeriesJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) { stream.WriteObjectField(`metric`) marshalLabelsJSON(s.Metric, stream) + // TODO: Handle info metric samples for i, p := range s.Floats { stream.WriteMore() if i == 0 { @@ -136,6 +137,7 @@ func marshalSampleJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) { stream.WriteArrayStart() jsonutil.MarshalTimestamp(s.T, stream) stream.WriteMore() + // TODO: Handle info metric samples if s.H == nil { jsonutil.MarshalFloat(s.F, stream) } else { diff --git a/web/federate.go b/web/federate.go index 8176eba365..33e5a1a6a5 100644 --- a/web/federate.go +++ b/web/federate.go @@ -114,9 +114,10 @@ Loop: it.Reset(chkIter) var ( - t int64 - f float64 - fh *histogram.FloatHistogram + t int64 + f float64 + fh *histogram.FloatHistogram + ils []int ) valueType := it.Seek(maxt) switch valueType { @@ -124,6 +125,8 @@ Loop: t, f = it.At() case chunkenc.ValFloatHistogram, chunkenc.ValHistogram: t, fh = it.AtFloatHistogram(nil) + case chunkenc.ValInfoSample: + t, ils = it.AtInfoSample() default: sample, ok := it.PeekBack(1) if !ok { @@ -150,10 +153,11 @@ Loop: } vec = append(vec, promql.Sample{ - Metric: s.Labels(), - T: t, - F: f, - H: fh, + Metric: s.Labels(), + T: t, + F: f, + H: fh, + IdentifyingLabels: ils, }) } if ws := set.Warnings(); len(ws) > 0 {