From a7695763196185cb9ff5578d1696191c3c16be37 Mon Sep 17 00:00:00 2001 From: Arve Knudsen Date: Sun, 5 Jan 2025 13:12:09 +0100 Subject: [PATCH] ingester/client.QueryStreamResponse: Use memory pooling Signed-off-by: Arve Knudsen --- pkg/distributor/distributor.go | 2 +- pkg/distributor/distributor_test.go | 40 +- pkg/distributor/query.go | 19 +- pkg/distributor/query_test.go | 209 +++- pkg/ingester/client/chunkcompat.go | 4 +- pkg/ingester/client/client.go | 4 +- pkg/ingester/client/custom.go | 896 +++++++++++++++++- pkg/ingester/client/ingester.pb.go | 441 ++++----- pkg/ingester/client/ingester.pb.go.expdiff | 54 -- pkg/ingester/client/ingester.proto | 12 +- pkg/ingester/client/streaming.go | 16 +- pkg/ingester/client/streaming_test.go | 2 +- pkg/ingester/ingester.go | 50 +- pkg/ingester/ingester_test.go | 54 +- pkg/mimirpb/custom.go | 566 +++++++++++ .../distributor_queryable_streaming_test.go | 30 +- pkg/querier/distributor_queryable_test.go | 146 ++- pkg/querier/duplicates_test.go | 6 +- pkg/querier/querier_test.go | 86 +- pkg/querier/timeseries_series_set.go | 8 +- 20 files changed, 2093 insertions(+), 552 deletions(-) delete mode 100644 pkg/ingester/client/ingester.pb.go.expdiff diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 1400f46c0da..d8ea9650cd8 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -2712,7 +2712,7 @@ func (d *Distributor) MetricsForLabelMatchers(ctx context.Context, from, through } defer func() { for _, resp := range resps { - resp.FreeBuffer() + resp.Release() } }() diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index ff63b09baa3..3f194a8a883 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -6283,28 +6283,34 @@ func (i *mockIngester) QueryStream(ctx context.Context, req *client.QueryRequest if i.disableStreamingResponse || req.StreamingChunksBatchSize == 0 { nonStreamingResponses = append(nonStreamingResponses, &client.QueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: ts.Labels, - Chunks: wireChunks, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: ts.Labels, + Chunks: wireChunks, + }, }, }, }) } else { streamingLabelResponses = append(streamingLabelResponses, &client.QueryStreamResponse{ - StreamingSeries: []client.QueryStreamSeries{ + StreamingSeries: []client.CustomQueryStreamSeries{ { - Labels: ts.Labels, - ChunkCount: int64(len(wireChunks)), + QueryStreamSeries: &client.QueryStreamSeries{ + Labels: ts.Labels, + ChunkCount: int64(len(wireChunks)), + }, }, }, }) streamingChunkResponses = append(streamingChunkResponses, &client.QueryStreamResponse{ - StreamingSeriesChunks: []client.QueryStreamSeriesChunks{ + StreamingSeriesChunks: []client.CustomQueryStreamSeriesChunks{ { - SeriesIndex: uint64(seriesIndex), - Chunks: wireChunks, + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: uint64(seriesIndex), + Chunks: wireChunks, + }, }, }, }) @@ -6384,15 +6390,17 @@ func (i *mockIngester) QueryExemplars(ctx context.Context, req *client.ExemplarQ } if len(exemplars) > 0 { - res.Timeseries = append(res.Timeseries, mimirpb.TimeSeries{ - Labels: series.Labels, - Exemplars: exemplars, + res.Timeseries = append(res.Timeseries, mimirpb.CustomTimeSeries{ + TimeSeries: &mimirpb.TimeSeries{ + Labels: series.Labels, + Exemplars: exemplars, + }, }) } } // Sort series by labels because the real ingester returns sorted ones. - slices.SortFunc(res.Timeseries, func(a, b mimirpb.TimeSeries) int { + slices.SortFunc(res.Timeseries, func(a, b mimirpb.CustomTimeSeries) int { aKey := mimirpb.FromLabelAdaptersToKeyString(a.Labels) bKey := mimirpb.FromLabelAdaptersToKeyString(b.Labels) return strings.Compare(aKey, bKey) @@ -6424,7 +6432,11 @@ func (i *mockIngester) MetricsForLabelMatchers(ctx context.Context, req *client. for _, matchers := range multiMatchers { for _, ts := range i.timeseries { if match(ts.Labels, matchers) { - response.Metric = append(response.Metric, &mimirpb.Metric{Labels: ts.Labels}) + response.Metric = append(response.Metric, mimirpb.CustomMetric{ + Metric: &mimirpb.Metric{ + Labels: ts.Labels, + }, + }) } } } diff --git a/pkg/distributor/query.go b/pkg/distributor/query.go index 2a2b922db53..3fcd7a63ab9 100644 --- a/pkg/distributor/query.go +++ b/pkg/distributor/query.go @@ -68,7 +68,7 @@ func (d *Distributor) QueryExemplars(ctx context.Context, from, to model.Time, m } defer func() { for _, r := range results { - r.FreeBuffer() + r.Release() } }() @@ -180,7 +180,7 @@ func mergeExemplarSets(a, b []mimirpb.Exemplar) []mimirpb.Exemplar { func mergeExemplarQueryResponses(results []*ingester_client.ExemplarQueryResponse) *ingester_client.ExemplarQueryResponse { var keys []string - exemplarResults := make(map[string]mimirpb.TimeSeries) + exemplarResults := make(map[string]mimirpb.CustomTimeSeries) for _, r := range results { for _, ts := range r.Timeseries { lbls := mimirpb.FromLabelAdaptersToKeyString(ts.Labels) @@ -199,7 +199,7 @@ func mergeExemplarQueryResponses(results []*ingester_client.ExemplarQueryRespons // Query results from each ingester were sorted, but are not necessarily still sorted after merging. slices.Sort(keys) - result := make([]mimirpb.TimeSeries, len(exemplarResults)) + result := make([]mimirpb.CustomTimeSeries, len(exemplarResults)) for i, k := range keys { ts := exemplarResults[k] for i, l := range ts.Labels { @@ -220,8 +220,8 @@ func mergeExemplarQueryResponses(results []*ingester_client.ExemplarQueryRespons type ingesterQueryResult struct { // Why retain the batches rather than build a single slice? We don't need a single slice for each ingester, so building a single slice for each ingester is a waste of time. - chunkseriesBatches [][]ingester_client.TimeSeriesChunk - timeseriesBatches [][]mimirpb.TimeSeries + chunkseriesBatches [][]ingester_client.CustomTimeSeriesChunk + timeseriesBatches [][]mimirpb.CustomTimeSeries streamingSeries seriesChunksStream } @@ -327,8 +327,8 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSets [ queryMetrics.IngesterChunksTotal.Add(float64(totalChunks)) }() - hashToChunkseries := map[string]ingester_client.TimeSeriesChunk{} - hashToTimeSeries := map[string]mimirpb.TimeSeries{} + hashToChunkseries := map[string]ingester_client.CustomTimeSeriesChunk{} + hashToTimeSeries := map[string]mimirpb.CustomTimeSeries{} for _, res := range results { // Accumulate any chunk series @@ -370,8 +370,8 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSets [ // Now turn the accumulated maps into slices. resp := ingester_client.CombinedQueryStreamResponse{ - Chunkseries: make([]ingester_client.TimeSeriesChunk, 0, len(hashToChunkseries)), - Timeseries: make([]mimirpb.TimeSeries, 0, len(hashToTimeSeries)), + Chunkseries: make([]ingester_client.CustomTimeSeriesChunk, 0, len(hashToChunkseries)), + Timeseries: make([]mimirpb.CustomTimeSeries, 0, len(hashToTimeSeries)), StreamingSeries: mergeSeriesChunkStreams(results, d.estimatedIngestersPerSeries(replicationSets)), } for _, series := range hashToChunkseries { @@ -395,7 +395,6 @@ func receiveResponse(stream ingester_client.Ingester_QueryStreamClient, streamin if err != nil { return 0, nil, false, err } - defer resp.FreeBuffer() if len(resp.Timeseries) > 0 { for _, series := range resp.Timeseries { diff --git a/pkg/distributor/query_test.go b/pkg/distributor/query_test.go index 9f47672938d..5cddd7e79b9 100644 --- a/pkg/distributor/query_test.go +++ b/pkg/distributor/query_test.go @@ -565,52 +565,185 @@ func TestMergeExemplars(t *testing.T) { labels2 := []mimirpb.LabelAdapter{{Name: "label1", Value: "foo2"}} for i, c := range []struct { - seriesA []mimirpb.TimeSeries - seriesB []mimirpb.TimeSeries - expected []mimirpb.TimeSeries + seriesA []mimirpb.CustomTimeSeries + seriesB []mimirpb.CustomTimeSeries + expected []mimirpb.CustomTimeSeries nonReversible bool }{ { - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{}}}, - expected: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{}, + }, + }, + }, }, { - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{}}}, - expected: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}, + }, + }, + }, }, { - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}}}, - expected: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1}, + }, + }, + }, }, { - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar3, exemplar4}}}, - expected: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3, exemplar4}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar3, exemplar4}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3, exemplar4}, + }, + }, + }, }, { // Ensure that when there are exemplars with duplicate timestamps, the first one wins. - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar5, exemplar3, exemplar4}}}, - expected: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3, exemplar4}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar5, exemplar3, exemplar4}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2, exemplar3, exemplar4}, + }, + }, + }, nonReversible: true, }, { // Disjoint exemplars on two different series. - seriesA: []mimirpb.TimeSeries{{Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}}}, - expected: []mimirpb.TimeSeries{ - {Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}}, - {Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}, + }, + }, + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}}, + }, + }, }, { // Second input adds to first on one series. - seriesA: []mimirpb.TimeSeries{ - {Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}}, - {Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3}}}, - seriesB: []mimirpb.TimeSeries{{Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar4}}}, - expected: []mimirpb.TimeSeries{ - {Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}}, - {Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}}}, + seriesA: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}, + }, + }, + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3}, + }, + }, + }, + seriesB: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar4}, + }, + }, + }, + expected: []mimirpb.CustomTimeSeries{ + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels1, Exemplars: []mimirpb.Exemplar{exemplar1, exemplar2}, + }, + }, + { + TimeSeries: &mimirpb.TimeSeries{ + Labels: labels2, Exemplars: []mimirpb.Exemplar{exemplar3, exemplar4}}, + }, + }, }, } { t.Run(fmt.Sprint("test", i), func(t *testing.T) { @@ -629,19 +762,21 @@ func TestMergeExemplars(t *testing.T) { func makeExemplarQueryResponse(numSeries int) *ingester_client.ExemplarQueryResponse { now := time.Now() - ts := make([]mimirpb.TimeSeries, numSeries) + ts := make([]mimirpb.CustomTimeSeries, numSeries) for i := 0; i < numSeries; i++ { lbls := labels.NewBuilder(labels.EmptyLabels()) lbls.Set(model.MetricNameLabel, "foo") for i := 0; i < 10; i++ { lbls.Set(fmt.Sprintf("name_%d", i), fmt.Sprintf("value_%d_%d", i, rand.Intn(10))) } - ts[i].Labels = mimirpb.FromLabelsToLabelAdapters(lbls.Labels()) - ts[i].Exemplars = []mimirpb.Exemplar{{ - Labels: []mimirpb.LabelAdapter{{Name: "traceid", Value: "trace1"}}, - Value: float64(i), - TimestampMs: now.Add(time.Hour).UnixNano() / int64(time.Millisecond), - }} + ts[i].TimeSeries = &mimirpb.TimeSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(lbls.Labels()), + Exemplars: []mimirpb.Exemplar{{ + Labels: []mimirpb.LabelAdapter{{Name: "traceid", Value: "trace1"}}, + Value: float64(i), + TimestampMs: now.Add(time.Hour).UnixNano() / int64(time.Millisecond), + }}, + } } return &ingester_client.ExemplarQueryResponse{Timeseries: ts} diff --git a/pkg/ingester/client/chunkcompat.go b/pkg/ingester/client/chunkcompat.go index d3eedaf43d3..3771f9645d8 100644 --- a/pkg/ingester/client/chunkcompat.go +++ b/pkg/ingester/client/chunkcompat.go @@ -100,7 +100,7 @@ func fromLabelsToMetric(ls labels.Labels) model.Metric { } // TimeSeriesChunksToMatrix converts slice of []client.TimeSeriesChunk to a model.Matrix. -func TimeSeriesChunksToMatrix(from, through model.Time, serieses []TimeSeriesChunk) (model.Matrix, error) { +func TimeSeriesChunksToMatrix(from, through model.Time, serieses []CustomTimeSeriesChunk) (model.Matrix, error) { if serieses == nil { return nil, nil } @@ -116,7 +116,7 @@ func TimeSeriesChunksToMatrix(from, through model.Time, serieses []TimeSeriesChu return result, nil } -func TimeseriesToMatrix(from, through model.Time, series []mimirpb.TimeSeries) (model.Matrix, error) { +func TimeseriesToMatrix(from, through model.Time, series []mimirpb.CustomTimeSeries) (model.Matrix, error) { if series == nil { return nil, nil } diff --git a/pkg/ingester/client/client.go b/pkg/ingester/client/client.go index 262dcdda789..1be1de5af7e 100644 --- a/pkg/ingester/client/client.go +++ b/pkg/ingester/client/client.go @@ -80,7 +80,7 @@ func (cfg *Config) Validate() error { } type CombinedQueryStreamResponse struct { - Chunkseries []TimeSeriesChunk - Timeseries []mimirpb.TimeSeries + Chunkseries []CustomTimeSeriesChunk + Timeseries []mimirpb.CustomTimeSeries StreamingSeries []StreamingSeries } diff --git a/pkg/ingester/client/custom.go b/pkg/ingester/client/custom.go index c6ac7830119..69a884c362b 100644 --- a/pkg/ingester/client/custom.go +++ b/pkg/ingester/client/custom.go @@ -6,14 +6,43 @@ package client import ( + "fmt" + io "io" + "slices" + "sync" + "github.com/pkg/errors" "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" + "github.com/grafana/mimir/pkg/mimirpb" "github.com/grafana/mimir/pkg/storage/chunk" ) -func ChunksCount(series []TimeSeriesChunk) int { +var ( + chunkDataPool = sync.Pool{ + New: func() any { + return mimirpb.UnsafeByteSlice{} + }, + } + timeSeriesChunkPool = sync.Pool{ + New: func() any { + return &TimeSeriesChunk{} + }, + } + queryStreamSeriesPool = sync.Pool{ + New: func() any { + return &QueryStreamSeries{} + }, + } + queryStreamSeriesChunksPool = sync.Pool{ + New: func() any { + return &QueryStreamSeriesChunks{} + }, + } +) + +func ChunksCount(series []CustomTimeSeriesChunk) int { if len(series) == 0 { return 0 } @@ -25,7 +54,7 @@ func ChunksCount(series []TimeSeriesChunk) int { return count } -func ChunksSize(series []TimeSeriesChunk) int { +func ChunksSize(series []CustomTimeSeriesChunk) int { if len(series) == 0 { return 0 } @@ -65,3 +94,866 @@ func ChunkFromMeta(meta chunks.Meta) (Chunk, error) { func DefaultMetricsMetadataRequest() *MetricsMetadataRequest { return &MetricsMetadataRequest{Limit: -1, LimitPerMetric: -1, Metric: ""} } + +type CustomTimeSeriesChunk struct { + *TimeSeriesChunk +} + +// Release back to pool. +func (m *CustomTimeSeriesChunk) Release() { + for _, chk := range m.Chunks { + //nolint:staticcheck + chunkDataPool.Put(chk.Data) + chk.Data = nil + } + m.Labels = m.Labels[:0] + m.Chunks = m.Chunks[:0] + timeSeriesChunkPool.Put(m.TimeSeriesChunk) + m.TimeSeriesChunk = nil +} + +func (m *CustomTimeSeriesChunk) Unmarshal(data []byte) error { + m.TimeSeriesChunk = timeSeriesChunkPool.Get().(*TimeSeriesChunk) + m.Labels = m.Labels[:0] + m.Chunks = m.Chunks[:0] + + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: TimeSeriesChunk: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: TimeSeriesChunk: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field FromIngesterId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + intStringLen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.FromIngesterId = string(data[index:postIndex]) + index = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field UserId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + intStringLen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.UserId = string(data[index:postIndex]) + index = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + + var la mimirpb.LabelAdapter + if err := unmarshalLabelAdapter(&la, data[index:postIndex]); err != nil { + return err + } + m.Labels = append(m.Labels, la) + index = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Chunks", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var chk Chunk + if err := unmarshalChunk(&chk, data[index:postIndex]); err != nil { + return err + } + m.Chunks = append(m.Chunks, chk) + if err := m.Chunks[len(m.Chunks)-1].Unmarshal(data[index:postIndex]); err != nil { + return err + } + index = postIndex + default: + index = preIndex + skippy, err := skipIngester(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +func unmarshalLabelAdapter(la *mimirpb.LabelAdapter, data []byte) error { + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return mimirpb.ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: LabelPair: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: LabelPair: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return mimirpb.ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return mimirpb.ErrInvalidLengthMimir + } + postIndex := index + byteLen + if postIndex < 0 { + return mimirpb.ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // TODO: Consider using a pool: Get byte slice from pool, copy the data to it, and take a yoloString. + la.Name = string(data[index:postIndex]) + index = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return mimirpb.ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return mimirpb.ErrInvalidLengthMimir + } + postIndex := index + byteLen + if postIndex < 0 { + return mimirpb.ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // TODO: Consider using a pool: Get byte slice from pool, copy the data to it, and take a yoloString. + la.Value = string(data[index:postIndex]) + index = postIndex + default: + index = preIndex + skippy, err := skipMimir(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return mimirpb.ErrInvalidLengthMimir + } + if (index + skippy) < 0 { + return mimirpb.ErrInvalidLengthMimir + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + if index > l { + return io.ErrUnexpectedEOF + } + + return nil +} + +func skipMimir(data []byte) (n int, err error) { + l := len(data) + index := 0 + for index < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, mimirpb.ErrIntOverflowMimir + } + if index >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, mimirpb.ErrIntOverflowMimir + } + if index >= l { + return 0, io.ErrUnexpectedEOF + } + index++ + if data[index-1] < 0x80 { + break + } + } + return index, nil + case 1: + index += 8 + return index, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, mimirpb.ErrIntOverflowMimir + } + if index >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[index] + index++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, mimirpb.ErrInvalidLengthMimir + } + index += length + if index < 0 { + return 0, mimirpb.ErrInvalidLengthMimir + } + return index, nil + case 3: + for { + var innerWire uint64 + start := index + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, mimirpb.ErrIntOverflowMimir + } + if index >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[index] + index++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipMimir(data[start:]) + if err != nil { + return 0, err + } + index = start + next + if index < 0 { + return 0, mimirpb.ErrInvalidLengthMimir + } + } + return index, nil + case 4: + return index, nil + case 5: + index += 4 + return index, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +func unmarshalChunk(chk *Chunk, data []byte) error { + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Chunk: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Chunk: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field StartTimestampMs", wireType) + } + chk.StartTimestampMs = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + chk.StartTimestampMs |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field EndTimestampMs", wireType) + } + chk.EndTimestampMs = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + chk.EndTimestampMs |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Encoding", wireType) + } + chk.Encoding = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + chk.Encoding |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Data", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + byteLen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + chk.Data = slices.Grow(chunkDataPool.Get().(mimirpb.UnsafeByteSlice)[:0], l)[0:l] + copy(chk.Data, data[index:postIndex]) + index = postIndex + default: + index = preIndex + skippy, err := skipIngester(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +type CustomQueryStreamSeries struct { + *QueryStreamSeries +} + +// Release back to pool. +func (m *CustomQueryStreamSeries) Release() { + m.Labels = m.Labels[:0] + queryStreamSeriesPool.Put(m.QueryStreamSeries) + m.QueryStreamSeries = nil +} + +func (m *CustomQueryStreamSeries) Unmarshal(data []byte) error { + m.QueryStreamSeries = queryStreamSeriesPool.Get().(*QueryStreamSeries) + m.Labels = m.Labels[:0] + + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QueryStreamSeries: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QueryStreamSeries: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var la mimirpb.LabelAdapter + if err := unmarshalLabelAdapter(&la, data[index:postIndex]); err != nil { + return err + } + m.Labels = append(m.Labels, la) + index = postIndex + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ChunkCount", wireType) + } + m.ChunkCount = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + m.ChunkCount |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + index = preIndex + skippy, err := skipIngester(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +type CustomQueryStreamSeriesChunks struct { + *QueryStreamSeriesChunks +} + +// Release back to pool. +func (m *CustomQueryStreamSeriesChunks) Release() { + for _, chk := range m.Chunks { + //nolint:staticcheck + chunkDataPool.Put(chk.Data) + chk.Data = nil + } + m.Chunks = m.Chunks[:0] + queryStreamSeriesChunksPool.Put(m.QueryStreamSeriesChunks) + m.QueryStreamSeriesChunks = nil +} + +func (m *CustomQueryStreamSeriesChunks) Unmarshal(data []byte) error { + m.QueryStreamSeriesChunks = queryStreamSeriesChunksPool.Get().(*QueryStreamSeriesChunks) + m.Chunks = m.Chunks[:0] + + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QueryStreamSeriesChunks: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QueryStreamSeriesChunks: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field SeriesIndex", wireType) + } + m.SeriesIndex = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + m.SeriesIndex |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Chunks", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIngester + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIngester + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthIngester + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Chunks = append(m.Chunks, Chunk{}) + if err := m.Chunks[len(m.Chunks)-1].Unmarshal(data[index:postIndex]); err != nil { + return err + } + index = postIndex + default: + index = preIndex + skippy, err := skipIngester(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) < 0 { + return ErrInvalidLengthIngester + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +// Release held resources to pool. +func (m *QueryStreamResponse) Release() { + for _, s := range m.Chunkseries { + s.Release() + } + m.Chunkseries = nil + for _, s := range m.Timeseries { + s.Release() + } + m.Timeseries = nil + for _, s := range m.StreamingSeries { + s.Release() + } + m.StreamingSeriesChunks = nil + for _, s := range m.StreamingSeriesChunks { + s.Release() + } +} + +// Release back to pool. +func (m *MetricsForLabelMatchersResponse) Release() { + for _, me := range m.Metric { + me.Release() + } + m.Metric = nil +} + +// Release back to pool. +func (m *ExemplarQueryResponse) Release() { + for _, s := range m.Timeseries { + s.Release() + } + m.Timeseries = nil +} diff --git a/pkg/ingester/client/ingester.pb.go b/pkg/ingester/client/ingester.pb.go index 9398a5d80b5..498d2bb73b5 100644 --- a/pkg/ingester/client/ingester.pb.go +++ b/pkg/ingester/client/ingester.pb.go @@ -582,9 +582,6 @@ func (m *ActiveSeriesRequest) GetType() ActiveSeriesRequest_RequestType { } type QueryResponse struct { - // Keep reference to buffer for unsafe references. - mimirpb.BufferHolder - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` } @@ -636,14 +633,11 @@ func (m *QueryResponse) GetTimeseries() []mimirpb.TimeSeries { // // Only one of these two options will be populated. type QueryStreamResponse struct { - // Keep reference to buffer for unsafe references. - mimirpb.BufferHolder - - Chunkseries []TimeSeriesChunk `protobuf:"bytes,1,rep,name=chunkseries,proto3" json:"chunkseries"` - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,2,rep,name=timeseries,proto3" json:"timeseries"` - StreamingSeries []QueryStreamSeries `protobuf:"bytes,3,rep,name=streaming_series,json=streamingSeries,proto3" json:"streaming_series"` - IsEndOfSeriesStream bool `protobuf:"varint,4,opt,name=is_end_of_series_stream,json=isEndOfSeriesStream,proto3" json:"is_end_of_series_stream,omitempty"` - StreamingSeriesChunks []QueryStreamSeriesChunks `protobuf:"bytes,5,rep,name=streaming_series_chunks,json=streamingSeriesChunks,proto3" json:"streaming_series_chunks"` + Chunkseries []CustomTimeSeriesChunk `protobuf:"bytes,1,rep,name=chunkseries,proto3,customtype=CustomTimeSeriesChunk" json:"chunkseries"` + Timeseries []github_com_grafana_mimir_pkg_mimirpb.CustomTimeSeries `protobuf:"bytes,2,rep,name=timeseries,proto3,customtype=github.com/grafana/mimir/pkg/mimirpb.CustomTimeSeries" json:"timeseries"` + StreamingSeries []CustomQueryStreamSeries `protobuf:"bytes,3,rep,name=streaming_series,json=streamingSeries,proto3,customtype=CustomQueryStreamSeries" json:"streaming_series"` + IsEndOfSeriesStream bool `protobuf:"varint,4,opt,name=is_end_of_series_stream,json=isEndOfSeriesStream,proto3" json:"is_end_of_series_stream,omitempty"` + StreamingSeriesChunks []CustomQueryStreamSeriesChunks `protobuf:"bytes,5,rep,name=streaming_series_chunks,json=streamingSeriesChunks,proto3,customtype=CustomQueryStreamSeriesChunks" json:"streaming_series_chunks"` } func (m *QueryStreamResponse) Reset() { *m = QueryStreamResponse{} } @@ -678,27 +672,6 @@ func (m *QueryStreamResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QueryStreamResponse proto.InternalMessageInfo -func (m *QueryStreamResponse) GetChunkseries() []TimeSeriesChunk { - if m != nil { - return m.Chunkseries - } - return nil -} - -func (m *QueryStreamResponse) GetTimeseries() []mimirpb.TimeSeries { - if m != nil { - return m.Timeseries - } - return nil -} - -func (m *QueryStreamResponse) GetStreamingSeries() []QueryStreamSeries { - if m != nil { - return m.StreamingSeries - } - return nil -} - func (m *QueryStreamResponse) GetIsEndOfSeriesStream() bool { if m != nil { return m.IsEndOfSeriesStream @@ -706,13 +679,6 @@ func (m *QueryStreamResponse) GetIsEndOfSeriesStream() bool { return false } -func (m *QueryStreamResponse) GetStreamingSeriesChunks() []QueryStreamSeriesChunks { - if m != nil { - return m.StreamingSeriesChunks - } - return nil -} - type QueryStreamSeries struct { Labels []github_com_grafana_mimir_pkg_mimirpb.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/grafana/mimir/pkg/mimirpb.LabelAdapter" json:"labels"` ChunkCount int64 `protobuf:"varint,2,opt,name=chunk_count,json=chunkCount,proto3" json:"chunk_count,omitempty"` @@ -809,10 +775,7 @@ func (m *QueryStreamSeriesChunks) GetChunks() []Chunk { } type ExemplarQueryResponse struct { - // Keep reference to buffer for unsafe references. - mimirpb.BufferHolder - - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` + Timeseries []github_com_grafana_mimir_pkg_mimirpb.CustomTimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3,customtype=github.com/grafana/mimir/pkg/mimirpb.CustomTimeSeries" json:"timeseries"` } func (m *ExemplarQueryResponse) Reset() { *m = ExemplarQueryResponse{} } @@ -847,13 +810,6 @@ func (m *ExemplarQueryResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ExemplarQueryResponse proto.InternalMessageInfo -func (m *ExemplarQueryResponse) GetTimeseries() []mimirpb.TimeSeries { - if m != nil { - return m.Timeseries - } - return nil -} - type LabelValuesRequest struct { LabelName string `protobuf:"bytes,1,opt,name=label_name,json=labelName,proto3" json:"label_name,omitempty"` StartTimestampMs int64 `protobuf:"varint,2,opt,name=start_timestamp_ms,json=startTimestampMs,proto3" json:"start_timestamp_ms,omitempty"` @@ -1330,10 +1286,7 @@ func (m *MetricsForLabelMatchersRequest) GetMatchersSet() []*LabelMatchers { } type MetricsForLabelMatchersResponse struct { - // Keep reference to buffer for unsafe references. - mimirpb.BufferHolder - - Metric []*mimirpb.Metric `protobuf:"bytes,1,rep,name=metric,proto3" json:"metric,omitempty"` + Metric []github_com_grafana_mimir_pkg_mimirpb.CustomMetric `protobuf:"bytes,1,rep,name=metric,proto3,customtype=github.com/grafana/mimir/pkg/mimirpb.CustomMetric" json:"metric,omitempty"` } func (m *MetricsForLabelMatchersResponse) Reset() { *m = MetricsForLabelMatchersResponse{} } @@ -1368,13 +1321,6 @@ func (m *MetricsForLabelMatchersResponse) XXX_DiscardUnknown() { var xxx_messageInfo_MetricsForLabelMatchersResponse proto.InternalMessageInfo -func (m *MetricsForLabelMatchersResponse) GetMetric() []*mimirpb.Metric { - if m != nil { - return m.Metric - } - return nil -} - type MetricsMetadataRequest struct { Limit int32 `protobuf:"zigzag32,1,opt,name=limit,proto3" json:"limit,omitempty"` LimitPerMetric int32 `protobuf:"zigzag32,2,opt,name=limit_per_metric,json=limitPerMetric,proto3" json:"limit_per_metric,omitempty"` @@ -1478,9 +1424,6 @@ func (m *MetricsMetadataResponse) GetMetadata() []*mimirpb.MetricMetadata { } type ActiveSeriesResponse struct { - // Keep reference to buffer for unsafe references. - mimirpb.BufferHolder - Metric []*mimirpb.Metric `protobuf:"bytes,1,rep,name=metric,proto3" json:"metric,omitempty"` // bucket_count is only used when the request type was NATIVE_HISTOGRAM_SERIES. // bucket_count contains the native histogram active buckets count for each series in "metric" above. @@ -1796,117 +1739,121 @@ func init() { func init() { proto.RegisterFile("ingester.proto", fileDescriptor_60f6df4f3586b478) } var fileDescriptor_60f6df4f3586b478 = []byte{ - // 1748 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x4b, 0x6f, 0x1b, 0xc9, - 0x11, 0x66, 0xf3, 0xb5, 0x62, 0x91, 0x92, 0xa8, 0xa6, 0x24, 0x72, 0x47, 0xab, 0x91, 0x76, 0x02, - 0xef, 0x32, 0x9b, 0x5d, 0xc9, 0xaf, 0x2c, 0xbc, 0x9b, 0x0d, 0x02, 0x4a, 0xa6, 0x2d, 0xda, 0xa6, - 0x24, 0x0f, 0x25, 0xe7, 0x01, 0x18, 0x83, 0x21, 0xd9, 0x92, 0x06, 0xe2, 0x0c, 0x99, 0x99, 0xa6, - 0x61, 0xf9, 0x94, 0x53, 0xce, 0xf9, 0x01, 0x41, 0x80, 0xdc, 0x82, 0x1c, 0x73, 0xc9, 0x25, 0xc8, - 0xd9, 0x97, 0x00, 0xbe, 0xc5, 0x08, 0x10, 0x23, 0x96, 0x2f, 0xc9, 0xcd, 0x40, 0xfe, 0x40, 0x30, - 0xdd, 0x3d, 0x4f, 0x52, 0x12, 0x65, 0xc4, 0x3e, 0x91, 0x5d, 0xaf, 0xfe, 0xaa, 0xba, 0xba, 0xaa, - 0xa6, 0x61, 0xc6, 0xb0, 0x0e, 0x89, 0x43, 0x89, 0xbd, 0x36, 0xb0, 0xfb, 0xb4, 0x8f, 0xb3, 0x9d, - 0xbe, 0x4d, 0xc9, 0x53, 0xe9, 0xab, 0x43, 0x83, 0x1e, 0x0d, 0xdb, 0x6b, 0x9d, 0xbe, 0xb9, 0x7e, - 0xd8, 0x3f, 0xec, 0xaf, 0x33, 0x76, 0x7b, 0x78, 0xc0, 0x56, 0x6c, 0xc1, 0xfe, 0x71, 0x35, 0xe9, - 0x6a, 0x58, 0xdc, 0xd6, 0x0f, 0x74, 0x4b, 0x5f, 0x37, 0x0d, 0xd3, 0xb0, 0xd7, 0x07, 0xc7, 0x87, - 0xfc, 0xdf, 0xa0, 0xcd, 0x7f, 0xb9, 0x86, 0xf2, 0x6b, 0x04, 0xd2, 0x03, 0xbd, 0x4d, 0x7a, 0xdb, - 0xba, 0x49, 0x9c, 0x9a, 0xd5, 0x7d, 0xa4, 0xf7, 0x86, 0xc4, 0x51, 0xc9, 0x2f, 0x87, 0xc4, 0xa1, - 0xf8, 0x2a, 0x4c, 0x99, 0x3a, 0xed, 0x1c, 0x11, 0xdb, 0xa9, 0xa0, 0xd5, 0x54, 0x35, 0x7f, 0x7d, - 0x7e, 0x8d, 0x43, 0x5b, 0x63, 0x5a, 0x4d, 0xce, 0x54, 0x7d, 0x29, 0xfc, 0x35, 0x14, 0x3a, 0xfd, - 0xa1, 0x45, 0x35, 0x93, 0xd0, 0xa3, 0x7e, 0xb7, 0x92, 0x5c, 0x45, 0xd5, 0x99, 0xeb, 0x25, 0x4f, - 0x6b, 0xd3, 0xe5, 0x35, 0x19, 0x4b, 0xcd, 0x77, 0x82, 0x85, 0xb2, 0x05, 0x4b, 0x63, 0x71, 0x38, - 0x83, 0xbe, 0xe5, 0x10, 0xfc, 0x7d, 0xc8, 0x18, 0x94, 0x98, 0x1e, 0x8a, 0x52, 0x04, 0x85, 0x90, - 0xe5, 0x12, 0xca, 0x6d, 0xc8, 0x87, 0xa8, 0x78, 0x19, 0xa0, 0xe7, 0x2e, 0x35, 0x4b, 0x37, 0x49, - 0x05, 0xad, 0xa2, 0x6a, 0x4e, 0xcd, 0xf5, 0xbc, 0xad, 0xf0, 0x22, 0x64, 0x9f, 0x30, 0xc1, 0x4a, - 0x72, 0x35, 0x55, 0xcd, 0xa9, 0x62, 0xa5, 0xfc, 0x11, 0xc1, 0x72, 0xc8, 0xcc, 0xa6, 0x6e, 0x77, - 0x0d, 0x4b, 0xef, 0x19, 0xf4, 0xc4, 0x8b, 0xcd, 0x0a, 0xe4, 0x03, 0xc3, 0x1c, 0x58, 0x4e, 0x05, - 0xdf, 0xb2, 0x13, 0x09, 0x5e, 0xf2, 0x9d, 0x82, 0x97, 0x9a, 0x30, 0x78, 0xfb, 0x20, 0x9f, 0x85, - 0x55, 0xc4, 0xef, 0x46, 0x34, 0x7e, 0xcb, 0xa3, 0xf1, 0x6b, 0x11, 0xdb, 0x20, 0x0e, 0xdb, 0xc2, - 0x8b, 0xe4, 0x2b, 0x04, 0x0b, 0x63, 0x05, 0x2e, 0x0a, 0xaa, 0x0e, 0x98, 0xb3, 0x59, 0x30, 0x35, - 0x87, 0x69, 0x8a, 0x18, 0xdc, 0x38, 0x77, 0xeb, 0x11, 0x6a, 0xdd, 0xa2, 0xf6, 0x89, 0x5a, 0xec, - 0xc5, 0xc8, 0xd2, 0xe6, 0x28, 0x34, 0x26, 0x8a, 0x8b, 0x90, 0x3a, 0x26, 0x27, 0x02, 0x93, 0xfb, - 0x17, 0xcf, 0x43, 0x86, 0xe1, 0x60, 0xb9, 0x98, 0x56, 0xf9, 0xe2, 0xdb, 0xe4, 0x2d, 0xa4, 0xfc, - 0x1d, 0x41, 0xe1, 0xe1, 0x90, 0xd8, 0xfe, 0x99, 0x7e, 0x09, 0xd8, 0xa1, 0xba, 0x4d, 0x35, 0x6a, - 0x98, 0xc4, 0xa1, 0xba, 0x39, 0xd0, 0x58, 0xcc, 0x50, 0x35, 0xa5, 0x16, 0x19, 0x67, 0xcf, 0x63, - 0x34, 0x1d, 0x5c, 0x85, 0x22, 0xb1, 0xba, 0x51, 0xd9, 0x24, 0x93, 0x9d, 0x21, 0x56, 0x37, 0x2c, - 0x19, 0x4e, 0x85, 0xd4, 0x44, 0xa9, 0xf0, 0x63, 0x58, 0x72, 0xa8, 0x4d, 0x74, 0xd3, 0xb0, 0x0e, - 0xb5, 0xce, 0xd1, 0xd0, 0x3a, 0x76, 0xb4, 0xb6, 0xcb, 0xd4, 0x1c, 0xe3, 0x19, 0xa9, 0x74, 0x99, - 0x2b, 0x15, 0x5f, 0x64, 0x93, 0x49, 0x6c, 0xb8, 0x02, 0x2d, 0xe3, 0x19, 0x51, 0x7e, 0x8f, 0x60, - 0xbe, 0xfe, 0x94, 0x98, 0x83, 0x9e, 0x6e, 0x7f, 0x10, 0x0f, 0xaf, 0x8d, 0x78, 0xb8, 0x30, 0xce, - 0x43, 0x27, 0x70, 0x51, 0xf9, 0x0b, 0x82, 0x52, 0xad, 0x43, 0x8d, 0x27, 0xe2, 0xfc, 0xde, 0xbd, - 0xe8, 0xfc, 0x08, 0xd2, 0xf4, 0x64, 0x40, 0x44, 0xb1, 0xf9, 0xdc, 0x93, 0x1e, 0x63, 0x7c, 0x4d, - 0xfc, 0xee, 0x9d, 0x0c, 0x88, 0xca, 0x94, 0x94, 0xaf, 0x21, 0x1f, 0x22, 0x62, 0x80, 0x6c, 0xab, - 0xae, 0x36, 0xea, 0xad, 0x62, 0x02, 0x2f, 0x41, 0x79, 0xbb, 0xb6, 0xd7, 0x78, 0x54, 0xd7, 0xb6, - 0x1a, 0xad, 0xbd, 0x9d, 0xbb, 0x6a, 0xad, 0xa9, 0x09, 0x26, 0x52, 0xee, 0xc3, 0xb4, 0x88, 0xac, - 0xb8, 0x63, 0xdf, 0x02, 0xb0, 0x40, 0xf1, 0x6c, 0x8f, 0x22, 0x1f, 0xb4, 0xd7, 0xdc, 0x68, 0x71, - 0x2c, 0x1b, 0xe9, 0xe7, 0xaf, 0x56, 0x12, 0x6a, 0x48, 0x5a, 0xf9, 0x6f, 0x12, 0x4a, 0xcc, 0x5a, - 0x8b, 0x9d, 0xa8, 0x6f, 0xf3, 0x27, 0x90, 0xe7, 0x87, 0x1f, 0x36, 0x5a, 0xf6, 0x1c, 0x0c, 0x4c, - 0xb2, 0xf3, 0x17, 0x76, 0xc3, 0x1a, 0x31, 0x50, 0xc9, 0xcb, 0x80, 0xc2, 0xf7, 0xa0, 0x18, 0xe4, - 0xa0, 0xb0, 0xc0, 0xcf, 0xf6, 0x63, 0x0f, 0x41, 0x08, 0x73, 0xc4, 0xcc, 0xac, 0xaf, 0xc8, 0xc9, - 0xf8, 0x26, 0x94, 0x0d, 0x47, 0x73, 0x93, 0xa9, 0x7f, 0x20, 0x6c, 0x69, 0x5c, 0xa6, 0x92, 0x5e, - 0x45, 0xd5, 0x29, 0xb5, 0x64, 0x38, 0x75, 0xab, 0xbb, 0x73, 0xc0, 0xe5, 0xb9, 0x49, 0xfc, 0x18, - 0xca, 0x71, 0x04, 0xe2, 0x32, 0x54, 0x32, 0x0c, 0xc8, 0xca, 0x99, 0x40, 0xc4, 0x8d, 0xe0, 0x70, - 0x16, 0x62, 0x70, 0x38, 0x53, 0xf9, 0x2d, 0x82, 0xb9, 0x11, 0x45, 0x7c, 0x00, 0x59, 0x56, 0x6e, - 0xe2, 0xcd, 0x66, 0xd0, 0xe6, 0xf9, 0xb7, 0xab, 0x1b, 0xf6, 0xc6, 0x37, 0xae, 0xdd, 0x7f, 0xbc, - 0x5a, 0xb9, 0x36, 0x49, 0xcb, 0xe5, 0x7a, 0xb5, 0xae, 0x3e, 0xa0, 0xc4, 0x56, 0x85, 0x75, 0xb7, - 0x81, 0x30, 0x5f, 0x34, 0x56, 0xca, 0xc5, 0xbd, 0x02, 0x46, 0x62, 0xb5, 0x50, 0x31, 0xa0, 0x7c, - 0x86, 0x5b, 0xf8, 0x53, 0x28, 0x88, 0x70, 0x18, 0x56, 0x97, 0x3c, 0x65, 0x17, 0x38, 0xad, 0xe6, - 0x39, 0xad, 0xe1, 0x92, 0xf0, 0x0f, 0x20, 0x2b, 0x42, 0xc5, 0x4f, 0x7d, 0xda, 0x6f, 0x23, 0xa1, - 0x5c, 0x11, 0x22, 0x4a, 0x0b, 0x16, 0x62, 0xe5, 0xe2, 0xff, 0x90, 0xd4, 0x7f, 0x45, 0x80, 0xc3, - 0x0d, 0x5a, 0xdc, 0xef, 0x0b, 0x9a, 0xc7, 0xf8, 0x0a, 0x95, 0xbc, 0x44, 0x85, 0x4a, 0x5d, 0x58, - 0xa1, 0xdc, 0x94, 0x9b, 0xa0, 0x42, 0xdd, 0x82, 0x52, 0x04, 0xbf, 0x88, 0xc9, 0xa7, 0x50, 0x08, - 0xb5, 0x37, 0xaf, 0xf5, 0xe7, 0x83, 0x1e, 0xe5, 0x28, 0xbf, 0x43, 0x30, 0x17, 0xcc, 0x33, 0x1f, - 0xb6, 0xf8, 0x4e, 0xe4, 0xda, 0x0f, 0xc5, 0xd1, 0x08, 0x7c, 0xc2, 0xb3, 0x8b, 0x66, 0x1a, 0xe5, - 0x1e, 0x14, 0xf7, 0x1d, 0x62, 0xb7, 0xa8, 0x4e, 0x7d, 0xaf, 0xe2, 0x53, 0x0b, 0x9a, 0x70, 0x6a, - 0xf9, 0x33, 0x82, 0xb9, 0x90, 0x31, 0x01, 0xe1, 0x8a, 0x37, 0x0c, 0x1b, 0x7d, 0x4b, 0xb3, 0x75, - 0xca, 0x33, 0x04, 0xa9, 0xd3, 0x3e, 0x55, 0xd5, 0x29, 0x71, 0x93, 0xc8, 0x1a, 0x9a, 0xc1, 0x68, - 0xe1, 0xa6, 0x7f, 0xce, 0x1a, 0x7a, 0x77, 0xf8, 0x4b, 0xc0, 0xfa, 0xc0, 0xd0, 0x62, 0x96, 0x52, - 0xcc, 0x52, 0x51, 0x1f, 0x18, 0x8d, 0x88, 0xb1, 0x35, 0x28, 0xd9, 0xc3, 0x1e, 0x89, 0x8b, 0xa7, - 0x99, 0xf8, 0x9c, 0xcb, 0x8a, 0xc8, 0x2b, 0x8f, 0xa1, 0xe4, 0x02, 0x6f, 0xdc, 0x8e, 0x42, 0x2f, - 0xc3, 0x47, 0x43, 0x87, 0xd8, 0x9a, 0xd1, 0x15, 0x59, 0x9d, 0x75, 0x97, 0x8d, 0x2e, 0xfe, 0x0a, - 0xd2, 0x5d, 0x9d, 0xea, 0x0c, 0x66, 0xa8, 0x78, 0x8e, 0x38, 0xaf, 0x32, 0x31, 0xe5, 0x2e, 0x60, - 0x97, 0xe5, 0x44, 0xad, 0x5f, 0x83, 0x8c, 0xe3, 0x12, 0xc4, 0x25, 0x5c, 0x0a, 0x5b, 0x89, 0x21, - 0x51, 0xb9, 0xa4, 0xf2, 0x27, 0x04, 0x72, 0x93, 0x50, 0xdb, 0xe8, 0x38, 0x77, 0xfa, 0x76, 0x34, - 0x15, 0xde, 0x73, 0x4a, 0xde, 0x82, 0x82, 0x97, 0x6b, 0x9a, 0x43, 0xe8, 0xf9, 0x33, 0x41, 0xde, - 0x13, 0x6d, 0x11, 0xaa, 0xdc, 0x87, 0x95, 0x33, 0x31, 0x8b, 0x50, 0x54, 0x21, 0x6b, 0x32, 0x11, - 0x11, 0x8b, 0x62, 0x50, 0x90, 0xb8, 0xaa, 0x2a, 0xf8, 0xca, 0x00, 0x16, 0x85, 0xb1, 0x26, 0xa1, - 0xba, 0x1b, 0x5d, 0xcf, 0xf1, 0x79, 0xc8, 0xf4, 0x0c, 0xd3, 0xa0, 0xcc, 0xd7, 0x39, 0x95, 0x2f, - 0x5c, 0x07, 0xd9, 0x1f, 0x6d, 0x40, 0x6c, 0x4d, 0xec, 0x91, 0x64, 0x02, 0x33, 0x8c, 0xbe, 0x4b, - 0x6c, 0x6e, 0xcf, 0xfd, 0x70, 0x10, 0xfc, 0x14, 0x3f, 0x6b, 0xb1, 0xe3, 0x0e, 0x94, 0x47, 0x76, - 0x14, 0xb0, 0x6f, 0xc2, 0x94, 0x29, 0x68, 0x02, 0x78, 0x25, 0x0e, 0xdc, 0xd7, 0xf1, 0x25, 0x95, - 0x0e, 0xcc, 0x47, 0x07, 0x99, 0xcb, 0x06, 0xc1, 0xad, 0x57, 0xed, 0x61, 0xe7, 0x98, 0x50, 0xbf, - 0xd3, 0xa4, 0xdc, 0x66, 0xc1, 0x69, 0xbc, 0xd5, 0xfc, 0x07, 0xc1, 0x6c, 0x6c, 0x9a, 0x70, 0x63, - 0x71, 0x60, 0xf7, 0x4d, 0xcd, 0xfb, 0x36, 0x0d, 0xf2, 0x7a, 0xc6, 0xa5, 0x37, 0x04, 0xb9, 0xd1, - 0x0d, 0x27, 0x7e, 0x32, 0x92, 0xf8, 0x41, 0x2b, 0x4d, 0xbd, 0xd7, 0x56, 0x1a, 0xf4, 0xba, 0xf4, - 0xc5, 0xbd, 0xee, 0x6f, 0x08, 0x32, 0xdc, 0xc3, 0xf7, 0x95, 0xfc, 0x12, 0x4c, 0x11, 0xab, 0xd3, - 0xef, 0x1a, 0xd6, 0x21, 0xcb, 0x8e, 0x8c, 0xea, 0xaf, 0xf1, 0xae, 0xa8, 0x05, 0x6e, 0x71, 0x29, - 0x6c, 0x7c, 0x27, 0x7c, 0xbf, 0x39, 0x91, 0xef, 0xfb, 0x96, 0xa3, 0x1f, 0x90, 0x8d, 0x13, 0x4a, - 0x5a, 0x3d, 0xa3, 0xe3, 0x95, 0x8b, 0x1a, 0x4c, 0x47, 0xae, 0xc9, 0xe5, 0x07, 0x68, 0x45, 0x83, - 0x42, 0x98, 0x83, 0xaf, 0x88, 0x81, 0x9a, 0x97, 0xf2, 0x39, 0x4f, 0x9b, 0xb1, 0x83, 0xd1, 0x19, - 0x63, 0x48, 0xb3, 0x1e, 0xce, 0x0f, 0x9d, 0xfd, 0x0f, 0xbe, 0xb6, 0xf8, 0xb5, 0xe0, 0x8b, 0x2f, - 0xaa, 0x90, 0x0f, 0xf5, 0x01, 0x3c, 0x0d, 0xb9, 0xc6, 0xb6, 0xd6, 0xac, 0x37, 0x77, 0xd4, 0x9f, - 0x17, 0x13, 0xee, 0xcc, 0x5d, 0xdb, 0x74, 0xe7, 0xec, 0x22, 0xfa, 0xe2, 0x1e, 0xe4, 0xfc, 0x6d, - 0x70, 0x0e, 0x32, 0xf5, 0x87, 0xfb, 0xb5, 0x07, 0xc5, 0x84, 0xab, 0xb2, 0xbd, 0xb3, 0xa7, 0xf1, - 0x25, 0xc2, 0xb3, 0x90, 0x57, 0xeb, 0x77, 0xeb, 0x3f, 0xd3, 0x9a, 0xb5, 0xbd, 0xcd, 0xad, 0x62, - 0x12, 0x63, 0x98, 0xe1, 0x84, 0xed, 0x1d, 0x41, 0x4b, 0x5d, 0xff, 0xe7, 0x47, 0x30, 0xe5, 0xa5, - 0x29, 0xfe, 0x06, 0xd2, 0xbb, 0x43, 0xe7, 0x08, 0x2f, 0x06, 0x39, 0xf8, 0x53, 0xdb, 0xa0, 0x44, - 0x14, 0x04, 0xa9, 0x3c, 0x42, 0xe7, 0x17, 0x4d, 0x49, 0xe0, 0xdb, 0x90, 0x0f, 0x0d, 0x62, 0x78, - 0x3e, 0x32, 0x74, 0x7a, 0xfa, 0x4b, 0x63, 0x46, 0xd1, 0xc0, 0xc6, 0x55, 0x84, 0x77, 0x60, 0x86, - 0xb1, 0xbc, 0x41, 0xcb, 0xc1, 0x9f, 0x78, 0x2a, 0xe3, 0x3e, 0xd5, 0xa4, 0xe5, 0x33, 0xb8, 0x3e, - 0xac, 0xad, 0xe8, 0x4b, 0x87, 0x34, 0xee, 0x51, 0x24, 0x0e, 0x6e, 0xcc, 0x3c, 0xa3, 0x24, 0x70, - 0x1d, 0x20, 0x98, 0x06, 0xf0, 0xc7, 0x11, 0xe1, 0xf0, 0x04, 0x23, 0x49, 0xe3, 0x58, 0xbe, 0x99, - 0x0d, 0xc8, 0xf9, 0x3d, 0x0d, 0x57, 0xc6, 0xb4, 0x39, 0x6e, 0xe4, 0xec, 0x06, 0xa8, 0x24, 0xf0, - 0x1d, 0x28, 0xd4, 0x7a, 0xbd, 0x49, 0xcc, 0x48, 0x61, 0x8e, 0x13, 0xb7, 0xd3, 0xf3, 0xeb, 0x70, - 0xbc, 0x8d, 0xe0, 0xcf, 0xfc, 0x7c, 0x3e, 0xb7, 0x37, 0x4a, 0x9f, 0x5f, 0x28, 0xe7, 0xef, 0xb6, - 0x07, 0xb3, 0xb1, 0xaa, 0x8f, 0xe5, 0x98, 0x76, 0xac, 0x01, 0x49, 0x2b, 0x67, 0xf2, 0x7d, 0xab, - 0x6d, 0x31, 0x7f, 0x46, 0x1f, 0xc5, 0xb0, 0x32, 0x7a, 0x08, 0xf1, 0x97, 0x3b, 0xe9, 0x7b, 0xe7, - 0xca, 0x84, 0xb2, 0xf2, 0x18, 0x16, 0xc7, 0xbf, 0x1d, 0xe1, 0x2b, 0x63, 0x72, 0x66, 0xf4, 0x1d, - 0x4c, 0xfa, 0xec, 0x22, 0xb1, 0xd0, 0x66, 0x4d, 0x28, 0x84, 0x7b, 0x19, 0x5e, 0x3a, 0xe7, 0x53, - 0x5d, 0xfa, 0x64, 0x3c, 0x33, 0x30, 0xb7, 0xf1, 0xdd, 0x8b, 0xd7, 0x72, 0xe2, 0xe5, 0x6b, 0x39, - 0xf1, 0xf6, 0xb5, 0x8c, 0x7e, 0x75, 0x2a, 0xa3, 0x3f, 0x9c, 0xca, 0xe8, 0xf9, 0xa9, 0x8c, 0x5e, - 0x9c, 0xca, 0xe8, 0x5f, 0xa7, 0x32, 0xfa, 0xf7, 0xa9, 0x9c, 0x78, 0x7b, 0x2a, 0xa3, 0xdf, 0xbc, - 0x91, 0x13, 0x2f, 0xde, 0xc8, 0x89, 0x97, 0x6f, 0xe4, 0xc4, 0x2f, 0xb2, 0x9d, 0x9e, 0x41, 0x2c, - 0xda, 0xce, 0xb2, 0x27, 0xd0, 0x1b, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x69, 0x8a, 0xb2, 0x58, - 0x7d, 0x15, 0x00, 0x00, + // 1815 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xbd, 0x6f, 0x23, 0xc7, + 0x15, 0xe7, 0x90, 0x14, 0x2d, 0x3e, 0x52, 0x3a, 0x6a, 0x24, 0x1d, 0xe9, 0x95, 0xb5, 0x92, 0x37, + 0x38, 0x9b, 0x71, 0x6c, 0xde, 0xb7, 0x71, 0x76, 0xec, 0x82, 0xd4, 0xd1, 0x77, 0x74, 0x4c, 0xe9, + 0xbc, 0x94, 0x1c, 0x27, 0x40, 0xb0, 0x58, 0x92, 0x23, 0x69, 0x21, 0xee, 0x92, 0xd9, 0x19, 0x1a, + 0x27, 0x17, 0x41, 0x92, 0x22, 0x55, 0x8a, 0xfc, 0x01, 0x41, 0x80, 0x74, 0x41, 0xca, 0x34, 0x69, + 0x82, 0xd4, 0x6e, 0x02, 0x5c, 0x17, 0x23, 0x40, 0x0e, 0x39, 0x5d, 0x93, 0x74, 0xfe, 0x13, 0x82, + 0x9d, 0x99, 0xfd, 0x24, 0x29, 0xd2, 0x86, 0xcf, 0x15, 0x39, 0xef, 0xfd, 0xe6, 0xcd, 0x7b, 0x6f, + 0xde, 0xd7, 0x2c, 0xac, 0x5a, 0xce, 0x09, 0xa1, 0x8c, 0xb8, 0xb5, 0x91, 0x3b, 0x64, 0x43, 0x9c, + 0xeb, 0x0d, 0x5d, 0x46, 0x1e, 0x2b, 0x6f, 0x9d, 0x58, 0xec, 0x74, 0xdc, 0xad, 0xf5, 0x86, 0xf6, + 0xf5, 0x93, 0xe1, 0xc9, 0xf0, 0x3a, 0x67, 0x77, 0xc7, 0xc7, 0x7c, 0xc5, 0x17, 0xfc, 0x9f, 0xd8, + 0xa6, 0xdc, 0x88, 0xc2, 0x5d, 0xf3, 0xd8, 0x74, 0xcc, 0xeb, 0xb6, 0x65, 0x5b, 0xee, 0xf5, 0xd1, + 0xd9, 0x89, 0xf8, 0x37, 0xea, 0x8a, 0x5f, 0xb1, 0x43, 0xfb, 0x0d, 0x02, 0xe5, 0x23, 0xb3, 0x4b, + 0x06, 0xfb, 0xa6, 0x4d, 0x68, 0xdd, 0xe9, 0x7f, 0x62, 0x0e, 0xc6, 0x84, 0xea, 0xe4, 0xe7, 0x63, + 0x42, 0x19, 0xbe, 0x01, 0xcb, 0xb6, 0xc9, 0x7a, 0xa7, 0xc4, 0xa5, 0x15, 0xb4, 0x9b, 0xa9, 0x16, + 0x6e, 0x6d, 0xd4, 0x84, 0x6a, 0x35, 0xbe, 0xab, 0x2d, 0x98, 0x7a, 0x80, 0xc2, 0x6f, 0x43, 0xb1, + 0x37, 0x1c, 0x3b, 0xcc, 0xb0, 0x09, 0x3b, 0x1d, 0xf6, 0x2b, 0xe9, 0x5d, 0x54, 0x5d, 0xbd, 0xb5, + 0xee, 0xef, 0xda, 0xf3, 0x78, 0x6d, 0xce, 0xd2, 0x0b, 0xbd, 0x70, 0xa1, 0x3d, 0x84, 0xad, 0xa9, + 0x7a, 0xd0, 0xd1, 0xd0, 0xa1, 0x04, 0x7f, 0x1f, 0x96, 0x2c, 0x46, 0x6c, 0x5f, 0x8b, 0xf5, 0x98, + 0x16, 0x12, 0x2b, 0x10, 0xda, 0x7d, 0x28, 0x44, 0xa8, 0x78, 0x1b, 0x60, 0xe0, 0x2d, 0x0d, 0xc7, + 0xb4, 0x49, 0x05, 0xed, 0xa2, 0x6a, 0x5e, 0xcf, 0x0f, 0xfc, 0xa3, 0xf0, 0x55, 0xc8, 0x7d, 0xc6, + 0x81, 0x95, 0xf4, 0x6e, 0xa6, 0x9a, 0xd7, 0xe5, 0x4a, 0xfb, 0x33, 0x82, 0xed, 0x88, 0x98, 0x3d, + 0xd3, 0xed, 0x5b, 0x8e, 0x39, 0xb0, 0xd8, 0xb9, 0xef, 0x9b, 0x1d, 0x28, 0x84, 0x82, 0x85, 0x62, + 0x79, 0x1d, 0x02, 0xc9, 0x34, 0xe6, 0xbc, 0xf4, 0x37, 0x72, 0x5e, 0x66, 0x41, 0xe7, 0x1d, 0x81, + 0x3a, 0x4b, 0x57, 0xe9, 0xbf, 0xdb, 0x71, 0xff, 0x6d, 0x4f, 0xfa, 0xaf, 0x43, 0x5c, 0x8b, 0x50, + 0x7e, 0x84, 0xef, 0xc9, 0xa7, 0x08, 0x36, 0xa7, 0x02, 0xe6, 0x39, 0xd5, 0x04, 0x2c, 0xd8, 0xdc, + 0x99, 0x06, 0xe5, 0x3b, 0xa5, 0x0f, 0x6e, 0x5f, 0x7a, 0xf4, 0x04, 0xb5, 0xe9, 0x30, 0xf7, 0x5c, + 0x2f, 0x0d, 0x12, 0x64, 0x65, 0x6f, 0x52, 0x35, 0x0e, 0xc5, 0x25, 0xc8, 0x9c, 0x91, 0x73, 0xa9, + 0x93, 0xf7, 0x17, 0x6f, 0xc0, 0x12, 0xd7, 0x83, 0xc7, 0x62, 0x56, 0x17, 0x8b, 0x77, 0xd3, 0xf7, + 0x90, 0xf6, 0x4f, 0x04, 0xc5, 0x8f, 0xc7, 0xc4, 0x0d, 0xee, 0xf4, 0x4d, 0xc0, 0x94, 0x99, 0x2e, + 0x33, 0x98, 0x65, 0x13, 0xca, 0x4c, 0x7b, 0x64, 0x70, 0x9f, 0xa1, 0x6a, 0x46, 0x2f, 0x71, 0xce, + 0xa1, 0xcf, 0x68, 0x53, 0x5c, 0x85, 0x12, 0x71, 0xfa, 0x71, 0x6c, 0x9a, 0x63, 0x57, 0x89, 0xd3, + 0x8f, 0x22, 0xa3, 0xa1, 0x90, 0x59, 0x28, 0x14, 0xde, 0x87, 0x2d, 0xca, 0x5c, 0x62, 0xda, 0x96, + 0x73, 0x62, 0xf4, 0x4e, 0xc7, 0xce, 0x19, 0x35, 0xba, 0x1e, 0xd3, 0xa0, 0xd6, 0xe7, 0xa4, 0xd2, + 0xe7, 0xa6, 0x54, 0x02, 0xc8, 0x1e, 0x47, 0x34, 0x3c, 0x40, 0xc7, 0xfa, 0x9c, 0x68, 0x7f, 0x44, + 0xb0, 0xd1, 0x7c, 0x4c, 0xec, 0xd1, 0xc0, 0x74, 0xbf, 0x13, 0x0b, 0x6f, 0x4e, 0x58, 0xb8, 0x39, + 0xcd, 0x42, 0x1a, 0x9a, 0xa8, 0xfd, 0x0d, 0xc1, 0x7a, 0xbd, 0xc7, 0xac, 0xcf, 0xe4, 0xfd, 0x7d, + 0xf3, 0xa2, 0xf3, 0x43, 0xc8, 0xb2, 0xf3, 0x11, 0x91, 0xc5, 0xe6, 0x75, 0x1f, 0x3d, 0x45, 0x78, + 0x4d, 0xfe, 0x1e, 0x9e, 0x8f, 0x88, 0xce, 0x37, 0x69, 0x6f, 0x43, 0x21, 0x42, 0xc4, 0x00, 0xb9, + 0x4e, 0x53, 0x6f, 0x35, 0x3b, 0xa5, 0x14, 0xde, 0x82, 0xf2, 0x7e, 0xfd, 0xb0, 0xf5, 0x49, 0xd3, + 0x78, 0xd8, 0xea, 0x1c, 0x1e, 0x3c, 0xd0, 0xeb, 0x6d, 0x43, 0x32, 0x91, 0xf6, 0x23, 0x58, 0x91, + 0x9e, 0x95, 0x39, 0xf6, 0x2e, 0x00, 0x77, 0x94, 0x88, 0xf6, 0xb8, 0xe6, 0xa3, 0x6e, 0xcd, 0xf3, + 0x96, 0xd0, 0xa5, 0x91, 0xfd, 0xe2, 0xe9, 0x4e, 0x4a, 0x8f, 0xa0, 0xb5, 0x5f, 0x65, 0x61, 0x9d, + 0x4b, 0xeb, 0xf0, 0x1b, 0x0d, 0x64, 0x7e, 0x0a, 0x05, 0x71, 0xf9, 0x51, 0xa1, 0x65, 0xdf, 0xc0, + 0x50, 0x24, 0xbf, 0xff, 0xc6, 0xb6, 0x27, 0xf7, 0x5f, 0x4f, 0x77, 0x36, 0xf7, 0xc6, 0x94, 0x0d, + 0xed, 0x04, 0x5b, 0x8f, 0x8a, 0xc2, 0x34, 0xa6, 0x6d, 0xfa, 0x12, 0x6d, 0xdf, 0x97, 0x52, 0xef, + 0x2e, 0xd2, 0x5d, 0x6a, 0xc9, 0xa3, 0xa3, 0x66, 0x62, 0x02, 0xa5, 0x30, 0xaa, 0xe5, 0xd1, 0x22, + 0x5a, 0x5e, 0xf6, 0x6d, 0x8a, 0x78, 0x41, 0x9e, 0xbf, 0x23, 0xcf, 0x2f, 0x0b, 0xd1, 0x13, 0x00, + 0xfd, 0x4a, 0x20, 0x53, 0x10, 0xf0, 0x1d, 0x28, 0x5b, 0xd4, 0xf0, 0x22, 0x77, 0x78, 0x2c, 0x8f, + 0x31, 0x04, 0xa6, 0x92, 0xdd, 0x45, 0xd5, 0x65, 0x7d, 0xdd, 0xa2, 0x4d, 0xa7, 0x7f, 0x70, 0x2c, + 0xf0, 0x42, 0x18, 0xfe, 0x05, 0x94, 0x93, 0xca, 0xc9, 0xcc, 0xab, 0x2c, 0x71, 0x1d, 0x77, 0x66, + 0xea, 0x28, 0xd3, 0xef, 0x9a, 0xd4, 0x74, 0x7b, 0x86, 0xa6, 0x02, 0xa6, 0x6f, 0x26, 0xf4, 0x15, + 0x64, 0xed, 0xf7, 0x08, 0xd6, 0x26, 0xb6, 0xe0, 0x63, 0xc8, 0xf1, 0xe2, 0x97, 0x6c, 0x7d, 0xa3, + 0xae, 0xc8, 0x86, 0x47, 0xa6, 0xe5, 0x36, 0xde, 0x91, 0x07, 0xdf, 0x5c, 0xe8, 0x8a, 0xf8, 0xbe, + 0x7a, 0xdf, 0x1c, 0x31, 0xe2, 0xea, 0x52, 0xba, 0xd7, 0xce, 0xb8, 0xb1, 0x06, 0x6f, 0x2c, 0x32, + 0xcb, 0x81, 0x93, 0x78, 0x65, 0xd6, 0x2c, 0x28, 0xcf, 0x30, 0x08, 0xbf, 0x0a, 0x45, 0xe9, 0x2f, + 0xcb, 0xe9, 0x93, 0xc7, 0xbc, 0x9c, 0x64, 0xf5, 0x82, 0xa0, 0xb5, 0x3c, 0x12, 0xfe, 0x01, 0xe4, + 0xa4, 0x2f, 0x45, 0xa8, 0xad, 0x04, 0x4d, 0x8d, 0x47, 0xae, 0xc8, 0x08, 0x09, 0xd1, 0x7e, 0x8b, + 0x60, 0x33, 0x51, 0xbd, 0x64, 0x3e, 0xd0, 0x85, 0x73, 0xec, 0xdb, 0x8b, 0x5a, 0xed, 0xef, 0x08, + 0x70, 0x74, 0xd0, 0x90, 0x75, 0x6a, 0x4e, 0x13, 0x9c, 0x5e, 0x69, 0xd3, 0x5f, 0xa3, 0xd2, 0x66, + 0xe6, 0x56, 0x5a, 0x2f, 0x9a, 0x17, 0xa8, 0xb4, 0xf7, 0x60, 0x3d, 0xa6, 0xbf, 0x74, 0xe6, 0xab, + 0x50, 0x8c, 0xb4, 0x69, 0x7f, 0x84, 0x29, 0x84, 0xbd, 0x96, 0x6a, 0x7f, 0x40, 0xb0, 0x16, 0xce, + 0x65, 0xdf, 0x6d, 0x13, 0x59, 0xc8, 0xb4, 0xbb, 0xf2, 0x6a, 0xa4, 0x7e, 0xd2, 0xb2, 0x79, 0xb3, + 0x99, 0xf6, 0x21, 0x94, 0x8e, 0x28, 0x71, 0x3b, 0xcc, 0x64, 0x81, 0x55, 0xc9, 0xe9, 0x0b, 0x2d, + 0x38, 0x7d, 0xfd, 0x15, 0xc1, 0x5a, 0x44, 0x98, 0x54, 0xe1, 0x9a, 0x3f, 0xd4, 0x5b, 0x43, 0xc7, + 0x70, 0x4d, 0x26, 0x22, 0x04, 0xe9, 0x2b, 0x01, 0x55, 0x37, 0x19, 0xf1, 0x82, 0xc8, 0x19, 0xdb, + 0xe1, 0x88, 0xe4, 0x25, 0x4e, 0xde, 0x19, 0xfb, 0xd9, 0xff, 0x26, 0x60, 0x73, 0x64, 0x19, 0x09, + 0x49, 0x19, 0x2e, 0xa9, 0x64, 0x8e, 0xac, 0x56, 0x4c, 0x58, 0x0d, 0xd6, 0xdd, 0xf1, 0x80, 0x24, + 0xe1, 0x59, 0x0e, 0x5f, 0xf3, 0x58, 0x31, 0xbc, 0xf6, 0x33, 0x58, 0xf7, 0x14, 0x6f, 0xdd, 0x8f, + 0xab, 0x5e, 0x86, 0x97, 0xc6, 0x94, 0xb8, 0x86, 0xd5, 0x97, 0x51, 0x9d, 0xf3, 0x96, 0xad, 0x3e, + 0x7e, 0x0b, 0xb2, 0x7d, 0x93, 0x99, 0x5c, 0xcd, 0x48, 0xc9, 0x9e, 0x30, 0x5e, 0xe7, 0x30, 0xed, + 0x01, 0x60, 0x8f, 0x45, 0xe3, 0xd2, 0x6f, 0xc2, 0x12, 0xf5, 0x08, 0x32, 0x7b, 0xb7, 0xa2, 0x52, + 0x12, 0x9a, 0xe8, 0x02, 0xa9, 0xfd, 0x05, 0x81, 0xda, 0x26, 0xcc, 0xb5, 0x7a, 0xf4, 0x83, 0xa1, + 0x1b, 0x0f, 0x85, 0x17, 0x1c, 0x92, 0xf7, 0xa0, 0xe8, 0xc7, 0x9a, 0x41, 0x09, 0xbb, 0x7c, 0xb6, + 0x29, 0xf8, 0xd0, 0x0e, 0x61, 0xda, 0xaf, 0x11, 0xec, 0xcc, 0x54, 0x5a, 0xfa, 0xc2, 0x80, 0x9c, + 0xcd, 0x21, 0xd2, 0x19, 0xa5, 0xb0, 0x94, 0x89, 0xad, 0x8d, 0xbb, 0x0b, 0x57, 0x75, 0x51, 0xc2, + 0xc4, 0x36, 0x5d, 0x8a, 0xd5, 0x46, 0x70, 0x55, 0xea, 0xd0, 0x26, 0xcc, 0xf4, 0x6e, 0xc5, 0x77, + 0xd8, 0x06, 0x2c, 0x0d, 0x2c, 0xdb, 0x62, 0xdc, 0x47, 0x6b, 0xba, 0x58, 0x78, 0x8e, 0xe1, 0x7f, + 0x8c, 0x11, 0x71, 0x0d, 0xa9, 0x5a, 0x9a, 0x03, 0x56, 0x39, 0xfd, 0x11, 0x71, 0x85, 0x3c, 0xef, + 0xe1, 0x24, 0xf9, 0x19, 0x11, 0x23, 0xf2, 0xc4, 0x03, 0x28, 0x4f, 0x9c, 0x28, 0xad, 0xbd, 0x03, + 0xcb, 0xb6, 0xa4, 0x49, 0x7b, 0x2b, 0x49, 0x7b, 0x83, 0x3d, 0x01, 0x52, 0xeb, 0xc1, 0x46, 0x7c, + 0x90, 0x93, 0xd2, 0xaa, 0xf3, 0x7c, 0xe7, 0xab, 0xe4, 0xd5, 0xb9, 0xee, 0xb8, 0x77, 0x46, 0x58, + 0xd0, 0xdb, 0x32, 0x5e, 0x7b, 0x12, 0x34, 0xd1, 0xdc, 0xfe, 0x87, 0xe0, 0x4a, 0x62, 0x5c, 0xf2, + 0x7c, 0x71, 0xec, 0x0e, 0x6d, 0xc3, 0x7f, 0x9b, 0x87, 0xf9, 0xb0, 0xea, 0xd1, 0x5b, 0x92, 0xdc, + 0xea, 0x47, 0x13, 0x26, 0x1d, 0x4b, 0x98, 0xb0, 0x79, 0x67, 0x5e, 0x68, 0xf3, 0x0e, 0xbb, 0x6b, + 0x76, 0x7e, 0x77, 0xfd, 0x07, 0x82, 0x25, 0x61, 0xe1, 0x8b, 0x4a, 0x1a, 0x05, 0x96, 0x89, 0xd3, + 0x1b, 0xf6, 0x2d, 0xe7, 0x84, 0x47, 0xc7, 0x92, 0x1e, 0xac, 0xf1, 0x23, 0x59, 0x43, 0xbc, 0xa2, + 0x54, 0x6c, 0xbc, 0x27, 0x6d, 0xbf, 0xb3, 0x90, 0xed, 0x47, 0x0e, 0x35, 0x8f, 0x49, 0xe3, 0x9c, + 0x91, 0xce, 0xc0, 0xea, 0xf9, 0x65, 0xa6, 0x0e, 0x2b, 0xb1, 0xec, 0xfa, 0xfa, 0x0f, 0x08, 0xcd, + 0x80, 0x62, 0x94, 0x83, 0xaf, 0xc9, 0x07, 0x85, 0x68, 0x01, 0x6b, 0xfe, 0x6e, 0xce, 0x0e, 0x9f, + 0x0e, 0x18, 0x43, 0x96, 0xf7, 0x7e, 0x71, 0xe9, 0xfc, 0x7f, 0xf8, 0xda, 0x14, 0x69, 0x21, 0x16, + 0x6f, 0x54, 0xa1, 0x10, 0xe9, 0x1f, 0x78, 0x05, 0xf2, 0xad, 0x7d, 0xa3, 0xdd, 0x6c, 0x1f, 0xe8, + 0x3f, 0x29, 0xa5, 0xbc, 0x37, 0x47, 0x7d, 0xcf, 0x7b, 0x67, 0x94, 0xd0, 0x1b, 0x1f, 0x42, 0x3e, + 0x38, 0x06, 0xe7, 0x61, 0xa9, 0xf9, 0xf1, 0x51, 0xfd, 0xa3, 0x52, 0xca, 0xdb, 0xb2, 0x7f, 0x70, + 0x68, 0x88, 0x25, 0xc2, 0x57, 0xa0, 0xa0, 0x37, 0x1f, 0x34, 0x3f, 0x35, 0xda, 0xf5, 0xc3, 0xbd, + 0x87, 0xa5, 0x34, 0xc6, 0xb0, 0x2a, 0x08, 0xfb, 0x07, 0x92, 0x96, 0xb9, 0xf5, 0xef, 0x97, 0x60, + 0xd9, 0x0f, 0x53, 0xfc, 0x0e, 0x64, 0x1f, 0x8d, 0xe9, 0x29, 0xbe, 0x1a, 0xc6, 0xe0, 0x8f, 0x5d, + 0x8b, 0x11, 0x59, 0x10, 0x94, 0xf2, 0x04, 0x5d, 0x24, 0x9a, 0x96, 0xc2, 0xf7, 0xa1, 0x10, 0x19, + 0xfd, 0xf0, 0x46, 0x6c, 0x0e, 0xf6, 0xf7, 0x6f, 0x4d, 0x99, 0x8e, 0x43, 0x19, 0x37, 0x10, 0x3e, + 0x80, 0x55, 0xce, 0xf2, 0x27, 0x3b, 0x8a, 0x5f, 0xf1, 0xb7, 0x4c, 0x7b, 0xaa, 0x2a, 0xdb, 0x33, + 0xb8, 0x81, 0x5a, 0x0f, 0xe3, 0x5f, 0x7a, 0x94, 0x69, 0x1f, 0x85, 0x92, 0xca, 0x4d, 0x99, 0x83, + 0xb4, 0x14, 0x6e, 0x02, 0x84, 0x53, 0x04, 0x7e, 0x39, 0x06, 0x8e, 0x4e, 0x3e, 0x8a, 0x32, 0x8d, + 0x15, 0x88, 0x69, 0x40, 0x3e, 0xe8, 0x85, 0xb8, 0x32, 0xa5, 0x3d, 0x0a, 0x21, 0xb3, 0x1b, 0xa7, + 0x96, 0xc2, 0x1f, 0x40, 0xb1, 0x3e, 0x18, 0x2c, 0x22, 0x46, 0x89, 0x72, 0x68, 0x52, 0xce, 0x20, + 0xa8, 0xc3, 0xc9, 0xee, 0x83, 0x5f, 0x0b, 0xe2, 0xf9, 0xd2, 0x9e, 0xaa, 0xbc, 0x3e, 0x17, 0x17, + 0x9c, 0x76, 0x08, 0x57, 0x12, 0x55, 0x1f, 0xab, 0x89, 0xdd, 0x89, 0x06, 0xa4, 0xec, 0xcc, 0xe4, + 0x07, 0x52, 0xbb, 0x72, 0x6e, 0x8d, 0x7f, 0x14, 0xc4, 0xda, 0xe4, 0x25, 0x24, 0xbf, 0x5c, 0x2a, + 0xdf, 0xbb, 0x14, 0x13, 0x89, 0xca, 0x33, 0xb8, 0x3a, 0xfd, 0xdb, 0x19, 0xbe, 0x36, 0x25, 0x66, + 0x26, 0xbf, 0x03, 0x2a, 0xaf, 0xcd, 0x83, 0x45, 0x0e, 0x6b, 0x43, 0x31, 0xda, 0xcb, 0xf0, 0xd6, + 0x25, 0x9f, 0x2a, 0x94, 0x57, 0xa6, 0x33, 0x43, 0x71, 0x8d, 0xf7, 0x9e, 0x3c, 0x53, 0x53, 0x5f, + 0x3e, 0x53, 0x53, 0x5f, 0x3d, 0x53, 0xd1, 0x2f, 0x2f, 0x54, 0xf4, 0xa7, 0x0b, 0x15, 0x7d, 0x71, + 0xa1, 0xa2, 0x27, 0x17, 0x2a, 0xfa, 0xcf, 0x85, 0x8a, 0xfe, 0x7b, 0xa1, 0xa6, 0xbe, 0xba, 0x50, + 0xd1, 0xef, 0x9e, 0xab, 0xa9, 0x27, 0xcf, 0xd5, 0xd4, 0x97, 0xcf, 0xd5, 0xd4, 0x4f, 0x73, 0xbd, + 0x81, 0x45, 0x1c, 0xd6, 0xcd, 0xf1, 0x4f, 0xc0, 0xb7, 0xff, 0x1f, 0x00, 0x00, 0xff, 0xff, 0xc4, + 0x6d, 0x29, 0xba, 0x7d, 0x16, 0x00, 0x00, } func (x CountMethod) String() string { @@ -2281,7 +2228,7 @@ func (this *QueryStreamResponse) Equal(that interface{}) bool { return false } for i := range this.Chunkseries { - if !this.Chunkseries[i].Equal(&that1.Chunkseries[i]) { + if !this.Chunkseries[i].Equal(that1.Chunkseries[i]) { return false } } @@ -2289,7 +2236,7 @@ func (this *QueryStreamResponse) Equal(that interface{}) bool { return false } for i := range this.Timeseries { - if !this.Timeseries[i].Equal(&that1.Timeseries[i]) { + if !this.Timeseries[i].Equal(that1.Timeseries[i]) { return false } } @@ -2297,7 +2244,7 @@ func (this *QueryStreamResponse) Equal(that interface{}) bool { return false } for i := range this.StreamingSeries { - if !this.StreamingSeries[i].Equal(&that1.StreamingSeries[i]) { + if !this.StreamingSeries[i].Equal(that1.StreamingSeries[i]) { return false } } @@ -2308,7 +2255,7 @@ func (this *QueryStreamResponse) Equal(that interface{}) bool { return false } for i := range this.StreamingSeriesChunks { - if !this.StreamingSeriesChunks[i].Equal(&that1.StreamingSeriesChunks[i]) { + if !this.StreamingSeriesChunks[i].Equal(that1.StreamingSeriesChunks[i]) { return false } } @@ -2401,7 +2348,7 @@ func (this *ExemplarQueryResponse) Equal(that interface{}) bool { return false } for i := range this.Timeseries { - if !this.Timeseries[i].Equal(&that1.Timeseries[i]) { + if !this.Timeseries[i].Equal(that1.Timeseries[i]) { return false } } @@ -3085,35 +3032,11 @@ func (this *QueryStreamResponse) GoString() string { } s := make([]string, 0, 9) s = append(s, "&client.QueryStreamResponse{") - if this.Chunkseries != nil { - vs := make([]*TimeSeriesChunk, len(this.Chunkseries)) - for i := range vs { - vs[i] = &this.Chunkseries[i] - } - s = append(s, "Chunkseries: "+fmt.Sprintf("%#v", vs)+",\n") - } - if this.Timeseries != nil { - vs := make([]*mimirpb.TimeSeries, len(this.Timeseries)) - for i := range vs { - vs[i] = &this.Timeseries[i] - } - s = append(s, "Timeseries: "+fmt.Sprintf("%#v", vs)+",\n") - } - if this.StreamingSeries != nil { - vs := make([]*QueryStreamSeries, len(this.StreamingSeries)) - for i := range vs { - vs[i] = &this.StreamingSeries[i] - } - s = append(s, "StreamingSeries: "+fmt.Sprintf("%#v", vs)+",\n") - } + s = append(s, "Chunkseries: "+fmt.Sprintf("%#v", this.Chunkseries)+",\n") + s = append(s, "Timeseries: "+fmt.Sprintf("%#v", this.Timeseries)+",\n") + s = append(s, "StreamingSeries: "+fmt.Sprintf("%#v", this.StreamingSeries)+",\n") s = append(s, "IsEndOfSeriesStream: "+fmt.Sprintf("%#v", this.IsEndOfSeriesStream)+",\n") - if this.StreamingSeriesChunks != nil { - vs := make([]*QueryStreamSeriesChunks, len(this.StreamingSeriesChunks)) - for i := range vs { - vs[i] = &this.StreamingSeriesChunks[i] - } - s = append(s, "StreamingSeriesChunks: "+fmt.Sprintf("%#v", vs)+",\n") - } + s = append(s, "StreamingSeriesChunks: "+fmt.Sprintf("%#v", this.StreamingSeriesChunks)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -3151,13 +3074,7 @@ func (this *ExemplarQueryResponse) GoString() string { } s := make([]string, 0, 5) s = append(s, "&client.ExemplarQueryResponse{") - if this.Timeseries != nil { - vs := make([]*mimirpb.TimeSeries, len(this.Timeseries)) - for i := range vs { - vs[i] = &this.Timeseries[i] - } - s = append(s, "Timeseries: "+fmt.Sprintf("%#v", vs)+",\n") - } + s = append(s, "Timeseries: "+fmt.Sprintf("%#v", this.Timeseries)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -3278,9 +3195,7 @@ func (this *MetricsForLabelMatchersResponse) GoString() string { } s := make([]string, 0, 5) s = append(s, "&client.MetricsForLabelMatchersResponse{") - if this.Metric != nil { - s = append(s, "Metric: "+fmt.Sprintf("%#v", this.Metric)+",\n") - } + s = append(s, "Metric: "+fmt.Sprintf("%#v", this.Metric)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -4437,11 +4352,11 @@ func (m *QueryStreamResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.StreamingSeriesChunks) > 0 { for iNdEx := len(m.StreamingSeriesChunks) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.StreamingSeriesChunks[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.StreamingSeriesChunks[iNdEx].Size() + i -= size + if _, err := m.StreamingSeriesChunks[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -4461,11 +4376,11 @@ func (m *QueryStreamResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.StreamingSeries) > 0 { for iNdEx := len(m.StreamingSeries) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.StreamingSeries[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.StreamingSeries[iNdEx].Size() + i -= size + if _, err := m.StreamingSeries[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -4475,11 +4390,11 @@ func (m *QueryStreamResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.Timeseries) > 0 { for iNdEx := len(m.Timeseries) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Timeseries[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Timeseries[iNdEx].Size() + i -= size + if _, err := m.Timeseries[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -4489,11 +4404,11 @@ func (m *QueryStreamResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.Chunkseries) > 0 { for iNdEx := len(m.Chunkseries) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Chunkseries[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Chunkseries[iNdEx].Size() + i -= size + if _, err := m.Chunkseries[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -4610,11 +4525,11 @@ func (m *ExemplarQueryResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { if len(m.Timeseries) > 0 { for iNdEx := len(m.Timeseries) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Timeseries[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Timeseries[iNdEx].Size() + i -= size + if _, err := m.Timeseries[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -5008,11 +4923,11 @@ func (m *MetricsForLabelMatchersResponse) MarshalToSizedBuffer(dAtA []byte) (int if len(m.Metric) > 0 { for iNdEx := len(m.Metric) - 1; iNdEx >= 0; iNdEx-- { { - size, err := m.Metric[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { + size := m.Metric[iNdEx].Size() + i -= size + if _, err := m.Metric[iNdEx].MarshalTo(dAtA[i:]); err != nil { return 0, err } - i -= size i = encodeVarintIngester(dAtA, i, uint64(size)) } i-- @@ -6118,32 +6033,12 @@ func (this *QueryStreamResponse) String() string { if this == nil { return "nil" } - repeatedStringForChunkseries := "[]TimeSeriesChunk{" - for _, f := range this.Chunkseries { - repeatedStringForChunkseries += strings.Replace(strings.Replace(f.String(), "TimeSeriesChunk", "TimeSeriesChunk", 1), `&`, ``, 1) + "," - } - repeatedStringForChunkseries += "}" - repeatedStringForTimeseries := "[]TimeSeries{" - for _, f := range this.Timeseries { - repeatedStringForTimeseries += fmt.Sprintf("%v", f) + "," - } - repeatedStringForTimeseries += "}" - repeatedStringForStreamingSeries := "[]QueryStreamSeries{" - for _, f := range this.StreamingSeries { - repeatedStringForStreamingSeries += strings.Replace(strings.Replace(f.String(), "QueryStreamSeries", "QueryStreamSeries", 1), `&`, ``, 1) + "," - } - repeatedStringForStreamingSeries += "}" - repeatedStringForStreamingSeriesChunks := "[]QueryStreamSeriesChunks{" - for _, f := range this.StreamingSeriesChunks { - repeatedStringForStreamingSeriesChunks += strings.Replace(strings.Replace(f.String(), "QueryStreamSeriesChunks", "QueryStreamSeriesChunks", 1), `&`, ``, 1) + "," - } - repeatedStringForStreamingSeriesChunks += "}" s := strings.Join([]string{`&QueryStreamResponse{`, - `Chunkseries:` + repeatedStringForChunkseries + `,`, - `Timeseries:` + repeatedStringForTimeseries + `,`, - `StreamingSeries:` + repeatedStringForStreamingSeries + `,`, + `Chunkseries:` + fmt.Sprintf("%v", this.Chunkseries) + `,`, + `Timeseries:` + fmt.Sprintf("%v", this.Timeseries) + `,`, + `StreamingSeries:` + fmt.Sprintf("%v", this.StreamingSeries) + `,`, `IsEndOfSeriesStream:` + fmt.Sprintf("%v", this.IsEndOfSeriesStream) + `,`, - `StreamingSeriesChunks:` + repeatedStringForStreamingSeriesChunks + `,`, + `StreamingSeriesChunks:` + fmt.Sprintf("%v", this.StreamingSeriesChunks) + `,`, `}`, }, "") return s @@ -6179,13 +6074,8 @@ func (this *ExemplarQueryResponse) String() string { if this == nil { return "nil" } - repeatedStringForTimeseries := "[]TimeSeries{" - for _, f := range this.Timeseries { - repeatedStringForTimeseries += fmt.Sprintf("%v", f) + "," - } - repeatedStringForTimeseries += "}" s := strings.Join([]string{`&ExemplarQueryResponse{`, - `Timeseries:` + repeatedStringForTimeseries + `,`, + `Timeseries:` + fmt.Sprintf("%v", this.Timeseries) + `,`, `}`, }, "") return s @@ -6305,13 +6195,8 @@ func (this *MetricsForLabelMatchersResponse) String() string { if this == nil { return "nil" } - repeatedStringForMetric := "[]*Metric{" - for _, f := range this.Metric { - repeatedStringForMetric += strings.Replace(fmt.Sprintf("%v", f), "Metric", "mimirpb.Metric", 1) + "," - } - repeatedStringForMetric += "}" s := strings.Join([]string{`&MetricsForLabelMatchersResponse{`, - `Metric:` + repeatedStringForMetric + `,`, + `Metric:` + fmt.Sprintf("%v", this.Metric) + `,`, `}`, }, "") return s @@ -7678,7 +7563,7 @@ func (m *QueryStreamResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Chunkseries = append(m.Chunkseries, TimeSeriesChunk{}) + m.Chunkseries = append(m.Chunkseries, CustomTimeSeriesChunk{}) if err := m.Chunkseries[len(m.Chunkseries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7712,7 +7597,7 @@ func (m *QueryStreamResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Timeseries = append(m.Timeseries, mimirpb.TimeSeries{}) + m.Timeseries = append(m.Timeseries, github_com_grafana_mimir_pkg_mimirpb.CustomTimeSeries{}) if err := m.Timeseries[len(m.Timeseries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7746,7 +7631,7 @@ func (m *QueryStreamResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.StreamingSeries = append(m.StreamingSeries, QueryStreamSeries{}) + m.StreamingSeries = append(m.StreamingSeries, CustomQueryStreamSeries{}) if err := m.StreamingSeries[len(m.StreamingSeries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7800,7 +7685,7 @@ func (m *QueryStreamResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.StreamingSeriesChunks = append(m.StreamingSeriesChunks, QueryStreamSeriesChunks{}) + m.StreamingSeriesChunks = append(m.StreamingSeriesChunks, CustomQueryStreamSeriesChunks{}) if err := m.StreamingSeriesChunks[len(m.StreamingSeriesChunks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -8099,7 +7984,7 @@ func (m *ExemplarQueryResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Timeseries = append(m.Timeseries, mimirpb.TimeSeries{}) + m.Timeseries = append(m.Timeseries, github_com_grafana_mimir_pkg_mimirpb.CustomTimeSeries{}) if err := m.Timeseries[len(m.Timeseries)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -9152,7 +9037,7 @@ func (m *MetricsForLabelMatchersResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Metric = append(m.Metric, &mimirpb.Metric{}) + m.Metric = append(m.Metric, github_com_grafana_mimir_pkg_mimirpb.CustomMetric{}) if err := m.Metric[len(m.Metric)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ingester/client/ingester.pb.go.expdiff b/pkg/ingester/client/ingester.pb.go.expdiff deleted file mode 100644 index a192a4d43e8..00000000000 --- a/pkg/ingester/client/ingester.pb.go.expdiff +++ /dev/null @@ -1,54 +0,0 @@ -diff --git a/pkg/ingester/client/ingester.pb.go b/pkg/ingester/client/ingester.pb.go -index 9398a5d80..bbefc14b1 100644 ---- a/pkg/ingester/client/ingester.pb.go -+++ b/pkg/ingester/client/ingester.pb.go -@@ -582,9 +582,6 @@ func (m *ActiveSeriesRequest) GetType() ActiveSeriesRequest_RequestType { - } - - type QueryResponse struct { -- // Keep reference to buffer for unsafe references. -- mimirpb.BufferHolder -- - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` - } - -@@ -636,9 +633,6 @@ func (m *QueryResponse) GetTimeseries() []mimirpb.TimeSeries { - // - // Only one of these two options will be populated. - type QueryStreamResponse struct { -- // Keep reference to buffer for unsafe references. -- mimirpb.BufferHolder -- - Chunkseries []TimeSeriesChunk `protobuf:"bytes,1,rep,name=chunkseries,proto3" json:"chunkseries"` - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,2,rep,name=timeseries,proto3" json:"timeseries"` - StreamingSeries []QueryStreamSeries `protobuf:"bytes,3,rep,name=streaming_series,json=streamingSeries,proto3" json:"streaming_series"` -@@ -809,9 +803,6 @@ func (m *QueryStreamSeriesChunks) GetChunks() []Chunk { - } - - type ExemplarQueryResponse struct { -- // Keep reference to buffer for unsafe references. -- mimirpb.BufferHolder -- - Timeseries []mimirpb.TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` - } - -@@ -1330,9 +1321,6 @@ func (m *MetricsForLabelMatchersRequest) GetMatchersSet() []*LabelMatchers { - } - - type MetricsForLabelMatchersResponse struct { -- // Keep reference to buffer for unsafe references. -- mimirpb.BufferHolder -- - Metric []*mimirpb.Metric `protobuf:"bytes,1,rep,name=metric,proto3" json:"metric,omitempty"` - } - -@@ -1478,9 +1466,6 @@ func (m *MetricsMetadataResponse) GetMetadata() []*mimirpb.MetricMetadata { - } - - type ActiveSeriesResponse struct { -- // Keep reference to buffer for unsafe references. -- mimirpb.BufferHolder -- - Metric []*mimirpb.Metric `protobuf:"bytes,1,rep,name=metric,proto3" json:"metric,omitempty"` - // bucket_count is only used when the request type was NATIVE_HISTOGRAM_SERIES. - // bucket_count contains the native histogram active buckets count for each series in "metric" above. diff --git a/pkg/ingester/client/ingester.proto b/pkg/ingester/client/ingester.proto index b2dd18b2a90..0495e7c5199 100644 --- a/pkg/ingester/client/ingester.proto +++ b/pkg/ingester/client/ingester.proto @@ -113,13 +113,13 @@ message QueryResponse { // // Only one of these two options will be populated. message QueryStreamResponse { - repeated TimeSeriesChunk chunkseries = 1 [(gogoproto.nullable) = false]; - repeated cortexpb.TimeSeries timeseries = 2 [(gogoproto.nullable) = false]; + repeated TimeSeriesChunk chunkseries = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "CustomTimeSeriesChunk"]; + repeated cortexpb.TimeSeries timeseries = 2 [(gogoproto.nullable) = false, (gogoproto.customtype) = "github.com/grafana/mimir/pkg/mimirpb.CustomTimeSeries"]; - repeated QueryStreamSeries streaming_series = 3 [(gogoproto.nullable) = false]; + repeated QueryStreamSeries streaming_series = 3 [(gogoproto.nullable) = false, (gogoproto.customtype) = "CustomQueryStreamSeries"]; bool is_end_of_series_stream = 4; - repeated QueryStreamSeriesChunks streaming_series_chunks = 5 [(gogoproto.nullable) = false]; + repeated QueryStreamSeriesChunks streaming_series_chunks = 5 [(gogoproto.nullable) = false, (gogoproto.customtype) = "CustomQueryStreamSeriesChunks"]; } message QueryStreamSeries { @@ -133,7 +133,7 @@ message QueryStreamSeriesChunks { } message ExemplarQueryResponse { - repeated cortexpb.TimeSeries timeseries = 1 [(gogoproto.nullable) = false]; + repeated cortexpb.TimeSeries timeseries = 1 [(gogoproto.nullable) = false, (gogoproto.customtype) = "github.com/grafana/mimir/pkg/mimirpb.CustomTimeSeries"]; } message LabelValuesRequest { @@ -184,7 +184,7 @@ message MetricsForLabelMatchersRequest { } message MetricsForLabelMatchersResponse { - repeated cortexpb.Metric metric = 1; + repeated cortexpb.Metric metric = 1 [(gogoproto.customtype) = "github.com/grafana/mimir/pkg/mimirpb.CustomMetric"]; } message MetricsMetadataRequest { diff --git a/pkg/ingester/client/streaming.go b/pkg/ingester/client/streaming.go index f38c0cc341d..6175d3f334d 100644 --- a/pkg/ingester/client/streaming.go +++ b/pkg/ingester/client/streaming.go @@ -43,10 +43,10 @@ type SeriesChunksStreamReader struct { cleanup func() log log.Logger - seriesBatchChan chan []QueryStreamSeriesChunks + seriesBatchChan chan []CustomQueryStreamSeriesChunks errorChan chan error err error - seriesBatch []QueryStreamSeriesChunks + seriesBatch []CustomQueryStreamSeriesChunks // Keeping the ingester name for debug logs. ingesterName string @@ -85,7 +85,7 @@ func (s *SeriesChunksStreamReader) Close() { // If an error occurs while streaming, a subsequent call to GetChunks will return an error. // To cancel buffering, cancel the context associated with this SeriesChunksStreamReader's client.Ingester_QueryStreamClient. func (s *SeriesChunksStreamReader) StartBuffering() { - s.seriesBatchChan = make(chan []QueryStreamSeriesChunks, 1) + s.seriesBatchChan = make(chan []CustomQueryStreamSeriesChunks, 1) // Important: to ensure that the goroutine does not become blocked and leak, the goroutine must only ever write to errorChan at most once. s.errorChan = make(chan error, 1) @@ -137,13 +137,13 @@ func (s *SeriesChunksStreamReader) readStream(log *spanlogger.SpanLogger) error } if len(msg.StreamingSeriesChunks) == 0 { - msg.FreeBuffer() + msg.Release() continue } totalSeries += len(msg.StreamingSeriesChunks) if totalSeries > s.expectedSeriesCount { - msg.FreeBuffer() + msg.Release() return fmt.Errorf("expected to receive only %v series, but received at least %v series", s.expectedSeriesCount, totalSeries) } @@ -159,11 +159,11 @@ func (s *SeriesChunksStreamReader) readStream(log *spanlogger.SpanLogger) error // The chunk count limit is enforced earlier, while we're reading series labels, so we don't need to do that here. if err := s.queryLimiter.AddChunkBytes(chunkBytes); err != nil { - msg.FreeBuffer() + msg.Release() return err } - rslt := make([]QueryStreamSeriesChunks, 0, len(msg.StreamingSeriesChunks)) + rslt := make([]CustomQueryStreamSeriesChunks, 0, len(msg.StreamingSeriesChunks)) for _, chunks := range msg.StreamingSeriesChunks { safeChunks := make([]Chunk, 0, len(chunks.Chunks)) for _, c := range chunks.Chunks { @@ -175,7 +175,7 @@ func (s *SeriesChunksStreamReader) readStream(log *spanlogger.SpanLogger) error chunks.Chunks = safeChunks rslt = append(rslt, chunks) } - msg.FreeBuffer() + msg.Release() select { case <-s.ctx.Done(): // Why do we abort if the context is done? diff --git a/pkg/ingester/client/streaming_test.go b/pkg/ingester/client/streaming_test.go index c284a4c8b00..6f64100afd0 100644 --- a/pkg/ingester/client/streaming_test.go +++ b/pkg/ingester/client/streaming_test.go @@ -390,7 +390,7 @@ func createTestChunk(t *testing.T, time int64, value float64) Chunk { type mockQueryStreamClient struct { ctx context.Context - batches [][]QueryStreamSeriesChunks + batches [][]CustomQueryStreamSeriesChunks closed atomic.Bool } diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 5d490d8b56a..bc2e97bf020 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -1664,9 +1664,11 @@ func (i *Ingester) QueryExemplars(ctx context.Context, req *client.ExemplarQuery result := &client.ExemplarQueryResponse{} for _, es := range res { - ts := mimirpb.TimeSeries{ - Labels: mimirpb.FromLabelsToLabelAdapters(es.SeriesLabels), - Exemplars: mimirpb.FromExemplarsToExemplarProtos(es.Exemplars), + ts := mimirpb.CustomTimeSeries{ + TimeSeries: &mimirpb.TimeSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(es.SeriesLabels), + Exemplars: mimirpb.FromExemplarsToExemplarProtos(es.Exemplars), + }, } numExemplars += len(ts.Exemplars) @@ -1842,7 +1844,7 @@ func (i *Ingester) MetricsForLabelMatchers(ctx context.Context, req *client.Metr // Generate the response merging all series sets. result := &client.MetricsForLabelMatchersResponse{ - Metric: make([]*mimirpb.Metric, 0), + Metric: make([]mimirpb.CustomMetric, 0), } mergedSet := storage.NewMergeSeriesSet(sets, 0, storage.ChainedSeriesMerge) @@ -1852,8 +1854,10 @@ func (i *Ingester) MetricsForLabelMatchers(ctx context.Context, req *client.Metr return nil, ctx.Err() } - result.Metric = append(result.Metric, &mimirpb.Metric{ - Labels: mimirpb.FromLabelsToLabelAdapters(mergedSet.At().Labels()), + result.Metric = append(result.Metric, mimirpb.CustomMetric{ + Metric: &mimirpb.Metric{ + Labels: mimirpb.FromLabelsToLabelAdapters(mergedSet.At().Labels()), + }, }) } @@ -2166,15 +2170,17 @@ func (i *Ingester) executeSamplesQuery(ctx context.Context, db *userTSDB, from, return 0, 0, ss.Err() } - timeseries := make([]mimirpb.TimeSeries, 0, queryStreamBatchSize) + timeseries := make([]mimirpb.CustomTimeSeries, 0, queryStreamBatchSize) batchSizeBytes := 0 var it chunkenc.Iterator for ss.Next() { series := ss.At() // convert labels to LabelAdapter - ts := mimirpb.TimeSeries{ - Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), + ts := mimirpb.CustomTimeSeries{ + TimeSeries: &mimirpb.TimeSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), + }, } it = series.Iterator(it) @@ -2264,15 +2270,17 @@ func (i *Ingester) executeChunksQuery(ctx context.Context, db *userTSDB, from, t return 0, 0, errors.Wrap(ss.Err(), "selecting series from ChunkQuerier") } - chunkSeries := make([]client.TimeSeriesChunk, 0, queryStreamBatchSize) + chunkSeries := make([]client.CustomTimeSeriesChunk, 0, queryStreamBatchSize) batchSizeBytes := 0 var it chunks.Iterator for ss.Next() { series := ss.At() // convert labels to LabelAdapter - ts := client.TimeSeriesChunk{ - Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), + ts := client.CustomTimeSeriesChunk{ + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), + }, } it = series.Iterator(it) @@ -2416,7 +2424,7 @@ func (i *Ingester) sendStreamingQuerySeries(ctx context.Context, q storage.Chunk return nil, 0, errors.Wrap(ss.Err(), "selecting series from ChunkQuerier") } - seriesInBatch := make([]client.QueryStreamSeries, 0, queryStreamBatchSize) + seriesInBatch := make([]client.CustomQueryStreamSeries, 0, queryStreamBatchSize) // Why retain the storage.ChunkSeries instead of their chunks.Iterator? If we get the iterators here, // we can't re-use them. Re-using iterators has a bigger impact on allocations/memory than trying to @@ -2445,9 +2453,11 @@ func (i *Ingester) sendStreamingQuerySeries(ctx context.Context, q storage.Chunk return nil, 0, errors.Wrap(err, "getting ChunkSeries chunk count") } - seriesInBatch = append(seriesInBatch, client.QueryStreamSeries{ - Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), - ChunkCount: int64(chunkCount), + seriesInBatch = append(seriesInBatch, client.CustomQueryStreamSeries{ + QueryStreamSeries: &client.QueryStreamSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(series.Labels()), + ChunkCount: int64(chunkCount), + }, }) if len(seriesInBatch) >= queryStreamBatchSize { @@ -2487,15 +2497,17 @@ func (i *Ingester) sendStreamingQueryChunks(allSeries *chunkSeriesNode, stream c numSamples = 0 numChunks = 0 numBatches = 0 - seriesInBatch = make([]client.QueryStreamSeriesChunks, 0, batchSize) + seriesInBatch = make([]client.CustomQueryStreamSeriesChunks, 0, batchSize) batchSizeBytes = 0 ) for currNode != nil { for _, series := range currNode.series { seriesIdx++ - seriesChunks := client.QueryStreamSeriesChunks{ - SeriesIndex: uint64(seriesIdx), + seriesChunks := client.CustomQueryStreamSeriesChunks{ + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: uint64(seriesIdx), + }, } it = series.Iterator(it) diff --git a/pkg/ingester/ingester_test.go b/pkg/ingester/ingester_test.go index a3df3b6d3b1..1741f56afaf 100644 --- a/pkg/ingester/ingester_test.go +++ b/pkg/ingester/ingester_test.go @@ -4920,7 +4920,7 @@ func Test_Ingester_MetricsForLabelMatchers_Deduplication(t *testing.T) { res, err := i.MetricsForLabelMatchers(ctx, req) require.NoError(t, err) - require.Len(t, res.GetMetric(), numSeries) + require.Len(t, res.Metric, numSeries) } func Benchmark_Ingester_MetricsForLabelMatchers(b *testing.B) { @@ -4953,7 +4953,7 @@ func Benchmark_Ingester_MetricsForLabelMatchers(b *testing.B) { res, err := i.MetricsForLabelMatchers(ctx, req) require.NoError(b, err) - require.Len(b, res.GetMetric(), numSeries) + require.Len(b, res.Metric, numSeries) } } @@ -5107,8 +5107,8 @@ func TestIngester_QueryStream(t *testing.T) { streamType = testData.streamType // Query all series. - var actualTimeseries []mimirpb.TimeSeries - var actualChunkseries []client.TimeSeriesChunk + var actualTimeseries []mimirpb.CustomTimeSeries + var actualChunkseries []client.CustomTimeSeriesChunk runQueryAndSaveResponse := func(req *client.QueryRequest) (receivedSeries int, err error) { s, err := c.QueryStream(ctx, req) @@ -5524,10 +5524,22 @@ func TestIngester_QueryStream_StreamingWithManySamples(t *testing.T) { require.NoError(t, err) seriesLabelsMsg := client.QueryStreamResponse{ - StreamingSeries: []client.QueryStreamSeries{ - {Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "1")), ChunkCount: 834}, - {Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "2")), ChunkCount: 8334}, - {Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "3")), ChunkCount: 4167}, + StreamingSeries: []client.CustomQueryStreamSeries{ + { + QueryStreamSeries: &client.QueryStreamSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "1")), ChunkCount: 834, + }, + }, + { + QueryStreamSeries: &client.QueryStreamSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "2")), ChunkCount: 8334, + }, + }, + { + QueryStreamSeries: &client.QueryStreamSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(labels.FromStrings(labels.MetricName, "foo", "l", "3")), ChunkCount: 4167, + }, + }, }, IsEndOfSeriesStream: true, } @@ -6162,10 +6174,12 @@ func mockWriteRequest(t testing.TB, lbls labels.Labels, value float64, timestamp } expectedQueryStreamResSamples := &client.QueryStreamResponse{ - Timeseries: []mimirpb.TimeSeries{ + Timeseries: []mimirpb.CustomTimeSeries{ { - Labels: mimirpb.FromLabelsToLabelAdapters(lbls), - Samples: samples, + TimeSeries: &mimirpb.TimeSeries{ + Labels: mimirpb.FromLabelsToLabelAdapters(lbls), + Samples: samples, + }, }, }, } @@ -6177,15 +6191,17 @@ func mockWriteRequest(t testing.TB, lbls labels.Labels, value float64, timestamp chk.Compact() expectedQueryStreamResChunks := &client.QueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: mimirpb.FromLabelsToLabelAdapters(lbls), - Chunks: []client.Chunk{ - { - StartTimestampMs: timestampMs, - EndTimestampMs: timestampMs, - Encoding: int32(chunk.PrometheusXorChunk), - Data: chk.Bytes(), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: mimirpb.FromLabelsToLabelAdapters(lbls), + Chunks: []client.Chunk{ + { + StartTimestampMs: timestampMs, + EndTimestampMs: timestampMs, + Encoding: int32(chunk.PrometheusXorChunk), + Data: chk.Bytes(), + }, }, }, }, diff --git a/pkg/mimirpb/custom.go b/pkg/mimirpb/custom.go index d2df5e7449f..bedbedf0c87 100644 --- a/pkg/mimirpb/custom.go +++ b/pkg/mimirpb/custom.go @@ -4,8 +4,11 @@ package mimirpb import ( "bytes" + encoding_binary "encoding/binary" "fmt" + io "io" "math" + "sync" "github.com/prometheus/prometheus/model/histogram" "google.golang.org/grpc/encoding" @@ -15,6 +18,19 @@ import ( "google.golang.org/protobuf/protoadapt" ) +var ( + labelAdaptersPool = sync.Pool{ + New: func() any { + return []LabelAdapter{} + }, + } + metricPool = sync.Pool{ + New: func() any { + return &Metric{} + }, + } +) + func init() { c := encoding.GetCodecV2(proto.Name) encoding.RegisterCodecV2(&codecV2{codec: c}) @@ -324,3 +340,553 @@ func (t *UnsafeByteSlice) Size() int { func (t UnsafeByteSlice) Equal(other UnsafeByteSlice) bool { return bytes.Equal(t, other) } + +type CustomTimeSeries struct { + *TimeSeries +} + +// Release back to pool. +func (m *CustomTimeSeries) Release() { + ReuseTimeseries(m.TimeSeries) + m.TimeSeries = nil +} + +func (m *CustomTimeSeries) Unmarshal(data []byte) error { + m.TimeSeries = TimeseriesFromPool() + m.Labels = m.Labels[:0] + m.Samples = m.Samples[:0] + m.Exemplars = m.Exemplars[:0] + m.Histograms = m.Histograms[:0] + + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: TimeSeries: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: TimeSeries: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var la LabelAdapter + if err := unmarshalLabelAdapter(&la, data[index:postIndex]); err != nil { + return err + } + m.Labels = append(m.Labels, la) + index = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Samples", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Samples = append(m.Samples, Sample{}) + if err := m.Samples[len(m.Samples)-1].Unmarshal(data[index:postIndex]); err != nil { + return err + } + index = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Exemplars", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if !m.SkipUnmarshalingExemplars { + var ex Exemplar + if err := unmarshalExemplar(&ex, data[index:postIndex]); err != nil { + return err + } + m.Exemplars = append(m.Exemplars, ex) + } + index = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Histograms", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Histograms = append(m.Histograms, Histogram{}) + if err := m.Histograms[len(m.Histograms)-1].Unmarshal(data[index:postIndex]); err != nil { + return err + } + index = postIndex + default: + index = preIndex + skippy, err := skipMimir(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +func unmarshalLabelAdapter(la *LabelAdapter, data []byte) error { + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: LabelPair: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: LabelPair: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + byteLen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // TODO: Consider using a pool: Get byte slice from pool, copy the data to it, and take a yoloString. + la.Name = string(data[index:postIndex]) + index = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + byteLen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // TODO: Consider using a pool: Get byte slice from pool, copy the data to it, and take a yoloString. + la.Value = string(data[index:postIndex]) + index = postIndex + default: + index = preIndex + skippy, err := skipMimir(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + if index > l { + return io.ErrUnexpectedEOF + } + + return nil +} + +func unmarshalExemplar(ex *Exemplar, data []byte) error { + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Exemplar: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Exemplar: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var la LabelAdapter + if err := unmarshalLabelAdapter(&la, data[index:postIndex]); err != nil { + return err + } + ex.Labels = append(ex.Labels, la) + index = postIndex + case 2: + if wireType != 1 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) + } + var v uint64 + if (index + 8) > l { + return io.ErrUnexpectedEOF + } + v = uint64(encoding_binary.LittleEndian.Uint64(data[index:])) + index += 8 + ex.Value = float64(math.Float64frombits(v)) + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TimestampMs", wireType) + } + ex.TimestampMs = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + ex.TimestampMs |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + index = preIndex + skippy, err := skipMimir(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} + +type CustomMetric struct { + *Metric +} + +// Release back to pool. +func (m *CustomMetric) Release() { + m.Labels = m.Labels[:0] + metricPool.Put(m.Metric) + m.Metric = nil +} + +func (m *CustomMetric) Unmarshal(data []byte) error { + m.Metric = metricPool.Get().(*Metric) + m.Labels = m.Labels[:0] + + l := len(data) + index := 0 + for index < l { + preIndex := index + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Metric: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Metric: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowMimir + } + if index >= l { + return io.ErrUnexpectedEOF + } + b := data[index] + index++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthMimir + } + postIndex := index + msglen + if postIndex < 0 { + return ErrInvalidLengthMimir + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var la LabelAdapter + if err := unmarshalLabelAdapter(&la, data[index:postIndex]); err != nil { + return err + } + m.Labels = append(m.Labels, la) + index = postIndex + default: + index = preIndex + skippy, err := skipMimir(data[index:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) < 0 { + return ErrInvalidLengthMimir + } + if (index + skippy) > l { + return io.ErrUnexpectedEOF + } + index += skippy + } + } + + if index > l { + return io.ErrUnexpectedEOF + } + return nil +} diff --git a/pkg/querier/distributor_queryable_streaming_test.go b/pkg/querier/distributor_queryable_streaming_test.go index ef62a42b6e9..0c47589200f 100644 --- a/pkg/querier/distributor_queryable_streaming_test.go +++ b/pkg/querier/distributor_queryable_streaming_test.go @@ -34,8 +34,20 @@ func TestStreamingChunkSeries_HappyPath(t *testing.T) { series := streamingChunkSeries{ labels: labels.FromStrings("the-name", "the-value"), sources: []client.StreamingSeriesSource{ - {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.QueryStreamSeriesChunks{{SeriesIndex: 0, Chunks: []client.Chunk{chunkUniqueToFirstSource, chunkPresentInBothSources}}})}, - {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.QueryStreamSeriesChunks{{SeriesIndex: 0, Chunks: []client.Chunk{chunkUniqueToSecondSource, chunkPresentInBothSources}}})}, + {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.CustomQueryStreamSeriesChunks{ + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 0, Chunks: []client.Chunk{chunkUniqueToFirstSource, chunkPresentInBothSources}, + }, + }, + })}, + {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.CustomQueryStreamSeriesChunks{ + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 0, Chunks: []client.Chunk{chunkUniqueToSecondSource, chunkPresentInBothSources}, + }, + }, + })}, }, context: &streamingChunkSeriesContext{ queryMetrics: stats.NewQueryMetrics(reg), @@ -97,7 +109,7 @@ func TestStreamingChunkSeries_StreamReaderReturnsError(t *testing.T) { labels: labels.FromStrings("the-name", "the-value"), // Create a stream reader that will always return an error because we'll try to read a series when it has no series to read. sources: []client.StreamingSeriesSource{ - {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.QueryStreamSeriesChunks{})}, + {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.CustomQueryStreamSeriesChunks{})}, }, context: &streamingChunkSeriesContext{ queryMetrics: stats.NewQueryMetrics(reg), @@ -114,7 +126,13 @@ func TestStreamingChunkSeries_CreateIteratorTwice(t *testing.T) { series := streamingChunkSeries{ labels: labels.FromStrings("the-name", "the-value"), sources: []client.StreamingSeriesSource{ - {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.QueryStreamSeriesChunks{{SeriesIndex: 0, Chunks: []client.Chunk{createTestChunk(t, 1500, 1.23)}}})}, + {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.CustomQueryStreamSeriesChunks{ + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 0, Chunks: []client.Chunk{createTestChunk(t, 1500, 1.23)}}, + }, + }, + )}, }, context: &streamingChunkSeriesContext{ queryMetrics: stats.NewQueryMetrics(prometheus.NewPedanticRegistry()), @@ -144,7 +162,7 @@ func createTestChunk(t *testing.T, time int64, value float64) client.Chunk { return chunks[0] } -func createTestStreamReader(batches ...[]client.QueryStreamSeriesChunks) *client.SeriesChunksStreamReader { +func createTestStreamReader(batches ...[]client.CustomQueryStreamSeriesChunks) *client.SeriesChunksStreamReader { seriesCount := 0 for _, batch := range batches { @@ -167,7 +185,7 @@ func createTestStreamReader(batches ...[]client.QueryStreamSeriesChunks) *client type mockQueryStreamClient struct { ctx context.Context - batches [][]client.QueryStreamSeriesChunks + batches [][]client.CustomQueryStreamSeriesChunks } func (m *mockQueryStreamClient) Recv() (*client.QueryStreamResponse, error) { diff --git a/pkg/querier/distributor_queryable_test.go b/pkg/querier/distributor_queryable_test.go index 28a7775190a..4e4e180b7ac 100644 --- a/pkg/querier/distributor_queryable_test.go +++ b/pkg/querier/distributor_queryable_test.go @@ -143,18 +143,22 @@ func TestDistributorQuerier_Select(t *testing.T) { }{ "chunk series": { response: client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{ - {Name: "bar", Value: "baz"}, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{ + {Name: "bar", Value: "baz"}, + }, + Chunks: clientChunks, }, - Chunks: clientChunks, }, { - Labels: []mimirpb.LabelAdapter{ - {Name: "foo", Value: "bar"}, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{ + {Name: "foo", Value: "bar"}, + }, + Chunks: clientChunks, }, - Chunks: clientChunks, }, }, }, @@ -240,33 +244,49 @@ func TestDistributorQuerier_Select_MixedChunkseriesTimeseriesAndStreamingResults mimirpb.Sample{TimestampMs: 7000, Value: 7}, } - streamReader := createTestStreamReader([]client.QueryStreamSeriesChunks{ - {SeriesIndex: 0, Chunks: convertToChunks(t, samplesToInterface(s4), false)}, - {SeriesIndex: 1, Chunks: convertToChunks(t, samplesToInterface(s3), false)}, + streamReader := createTestStreamReader([]client.CustomQueryStreamSeriesChunks{ + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 0, Chunks: convertToChunks(t, samplesToInterface(s4), false), + }, + }, + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 1, Chunks: convertToChunks(t, samplesToInterface(s3), false), + }, + }, }) d := &mockDistributor{} d.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, samplesToInterface(s1), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, samplesToInterface(s1), false), + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Chunks: convertToChunks(t, samplesToInterface(s1), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Chunks: convertToChunks(t, samplesToInterface(s1), false), + }, }, }, - Timeseries: []mimirpb.TimeSeries{ + Timeseries: []mimirpb.CustomTimeSeries{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Samples: s2, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Samples: s2, + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, - Samples: s1, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, + Samples: s1, + }, }, }, @@ -374,25 +394,33 @@ func TestDistributorQuerier_Select_MixedFloatAndIntegerHistograms(t *testing.T) d := &mockDistributor{} d.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, histogramsToInterface(s1), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, histogramsToInterface(s1), false), + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Chunks: convertToChunks(t, histogramsToInterface(s1), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Chunks: convertToChunks(t, histogramsToInterface(s1), false), + }, }, }, - Timeseries: []mimirpb.TimeSeries{ + Timeseries: []mimirpb.CustomTimeSeries{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Histograms: s2, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Histograms: s2, + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, - Histograms: s1, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, + Histograms: s1, + }, }, }, }, @@ -473,27 +501,35 @@ func TestDistributorQuerier_Select_MixedHistogramsAndFloatSamples(t *testing.T) d := &mockDistributor{} d.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, append(samplesToInterface(s1), histogramsToInterface(h1)...), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, append(samplesToInterface(s1), histogramsToInterface(h1)...), false), + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Chunks: convertToChunks(t, append(samplesToInterface(s1), histogramsToInterface(h1)...), false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Chunks: convertToChunks(t, append(samplesToInterface(s1), histogramsToInterface(h1)...), false), + }, }, }, - Timeseries: []mimirpb.TimeSeries{ + Timeseries: []mimirpb.CustomTimeSeries{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, - Samples: s2, - Histograms: h2, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "two"}}, + Samples: s2, + Histograms: h2, + }, }, { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, - Samples: s1, - Histograms: h1, + TimeSeries: &mimirpb.TimeSeries{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "three"}}, + Samples: s1, + Histograms: h1, + }, }, }, }, @@ -591,10 +627,12 @@ func TestDistributorQuerier_Select_CounterResets(t *testing.T) { }{ "chunkseries": { combinedResponse: client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: tc.chunks, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: tc.chunks, + }, }, }, }, @@ -605,8 +643,12 @@ func TestDistributorQuerier_Select_CounterResets(t *testing.T) { { Labels: labels.FromStrings(labels.MetricName, "one"), Sources: []client.StreamingSeriesSource{ - {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.QueryStreamSeriesChunks{ - {SeriesIndex: 0, Chunks: tc.chunks}, + {SeriesIndex: 0, StreamReader: createTestStreamReader([]client.CustomQueryStreamSeriesChunks{ + { + QueryStreamSeriesChunks: &client.QueryStreamSeriesChunks{ + SeriesIndex: 0, Chunks: tc.chunks, + }, + }, })}, }, }, @@ -689,14 +731,16 @@ func BenchmarkDistributorQuerier_Select(b *testing.B) { commonLabelsBuilder.Sort() commonLabels := commonLabelsBuilder.Labels() - response := client.CombinedQueryStreamResponse{Chunkseries: make([]client.TimeSeriesChunk, 0, numSeries)} + response := client.CombinedQueryStreamResponse{Chunkseries: make([]client.CustomTimeSeriesChunk, 0, numSeries)} for i := 0; i < numSeries; i++ { lbls := labels.NewBuilder(commonLabels) lbls.Set("series_id", strconv.Itoa(i)) - response.Chunkseries = append(response.Chunkseries, client.TimeSeriesChunk{ - Labels: mimirpb.FromLabelsToLabelAdapters(lbls.Labels()), - Chunks: clientChunks, + response.Chunkseries = append(response.Chunkseries, client.CustomTimeSeriesChunk{ + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: mimirpb.FromLabelsToLabelAdapters(lbls.Labels()), + Chunks: clientChunks, + }, }) } diff --git a/pkg/querier/duplicates_test.go b/pkg/querier/duplicates_test.go index 1b3d29e21aa..416026be578 100644 --- a/pkg/querier/duplicates_test.go +++ b/pkg/querier/duplicates_test.go @@ -91,7 +91,11 @@ func dedupeSorted(samples []mimirpb.Sample) []mimirpb.Sample { } func runPromQLAndGetJSONResult(t *testing.T, query string, ts mimirpb.TimeSeries, step time.Duration) string { - tq := &testQueryable{ts: newTimeSeriesSeriesSet([]mimirpb.TimeSeries{ts})} + tq := &testQueryable{ts: newTimeSeriesSeriesSet([]mimirpb.CustomTimeSeries{ + { + TimeSeries: &ts, + }, + })} engine := promql.NewEngine(promql.EngineOpts{ Logger: promslog.NewNopLogger(), diff --git a/pkg/querier/querier_test.go b/pkg/querier/querier_test.go index 35ff3a9b088..a4efba9d028 100644 --- a/pkg/querier/querier_test.go +++ b/pkg/querier/querier_test.go @@ -269,43 +269,49 @@ func TestQuerier_QueryableReturnsChunksOutsideQueriedRange(t *testing.T) { distributor := &mockDistributor{} distributor.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ // Series with data points only before queryStart. { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, []interface{}{ - mimirpb.Sample{TimestampMs: queryStart.Add(-9*time.Minute).Unix() * 1000, Value: 1}, - mimirpb.Sample{TimestampMs: queryStart.Add(-8*time.Minute).Unix() * 1000, Value: 1}, - mimirpb.Sample{TimestampMs: queryStart.Add(-7*time.Minute).Unix() * 1000, Value: 1}, - }, false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, []interface{}{ + mimirpb.Sample{TimestampMs: queryStart.Add(-9*time.Minute).Unix() * 1000, Value: 1}, + mimirpb.Sample{TimestampMs: queryStart.Add(-8*time.Minute).Unix() * 1000, Value: 1}, + mimirpb.Sample{TimestampMs: queryStart.Add(-7*time.Minute).Unix() * 1000, Value: 1}, + }, false), + }, }, // Series with data points before and after queryStart, but before queryEnd. { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, []interface{}{ - mimirpb.Sample{TimestampMs: queryStart.Add(-9*time.Minute).Unix() * 1000, Value: 1}, - mimirpb.Sample{TimestampMs: queryStart.Add(-8*time.Minute).Unix() * 1000, Value: 3}, - mimirpb.Sample{TimestampMs: queryStart.Add(-7*time.Minute).Unix() * 1000, Value: 5}, - mimirpb.Sample{TimestampMs: queryStart.Add(-6*time.Minute).Unix() * 1000, Value: 7}, - mimirpb.Sample{TimestampMs: queryStart.Add(-5*time.Minute).Unix() * 1000, Value: 11}, - mimirpb.Sample{TimestampMs: queryStart.Add(-4*time.Minute).Unix() * 1000, Value: 13}, - mimirpb.Sample{TimestampMs: queryStart.Add(-3*time.Minute).Unix() * 1000, Value: 17}, - mimirpb.Sample{TimestampMs: queryStart.Add(-2*time.Minute).Unix() * 1000, Value: 19}, - mimirpb.Sample{TimestampMs: queryStart.Add(-1*time.Minute).Unix() * 1000, Value: 23}, - mimirpb.Sample{TimestampMs: queryStart.Add(+0*time.Minute).Unix() * 1000, Value: 29}, - mimirpb.Sample{TimestampMs: queryStart.Add(+1*time.Minute).Unix() * 1000, Value: 31}, - mimirpb.Sample{TimestampMs: queryStart.Add(+2*time.Minute).Unix() * 1000, Value: 37}, - }, false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, []interface{}{ + mimirpb.Sample{TimestampMs: queryStart.Add(-9*time.Minute).Unix() * 1000, Value: 1}, + mimirpb.Sample{TimestampMs: queryStart.Add(-8*time.Minute).Unix() * 1000, Value: 3}, + mimirpb.Sample{TimestampMs: queryStart.Add(-7*time.Minute).Unix() * 1000, Value: 5}, + mimirpb.Sample{TimestampMs: queryStart.Add(-6*time.Minute).Unix() * 1000, Value: 7}, + mimirpb.Sample{TimestampMs: queryStart.Add(-5*time.Minute).Unix() * 1000, Value: 11}, + mimirpb.Sample{TimestampMs: queryStart.Add(-4*time.Minute).Unix() * 1000, Value: 13}, + mimirpb.Sample{TimestampMs: queryStart.Add(-3*time.Minute).Unix() * 1000, Value: 17}, + mimirpb.Sample{TimestampMs: queryStart.Add(-2*time.Minute).Unix() * 1000, Value: 19}, + mimirpb.Sample{TimestampMs: queryStart.Add(-1*time.Minute).Unix() * 1000, Value: 23}, + mimirpb.Sample{TimestampMs: queryStart.Add(+0*time.Minute).Unix() * 1000, Value: 29}, + mimirpb.Sample{TimestampMs: queryStart.Add(+1*time.Minute).Unix() * 1000, Value: 31}, + mimirpb.Sample{TimestampMs: queryStart.Add(+2*time.Minute).Unix() * 1000, Value: 37}, + }, false), + }, }, // Series with data points after queryEnd. { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, - Chunks: convertToChunks(t, []interface{}{ - mimirpb.Sample{TimestampMs: queryStart.Add(+4*time.Minute).Unix() * 1000, Value: 41}, - mimirpb.Sample{TimestampMs: queryStart.Add(+5*time.Minute).Unix() * 1000, Value: 43}, - mimirpb.Sample{TimestampMs: queryStart.Add(+6*time.Minute).Unix() * 1000, Value: 47}, - mimirpb.Sample{TimestampMs: queryStart.Add(+7*time.Minute).Unix() * 1000, Value: 53}, - }, false), + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}}, + Chunks: convertToChunks(t, []interface{}{ + mimirpb.Sample{TimestampMs: queryStart.Add(+4*time.Minute).Unix() * 1000, Value: 41}, + mimirpb.Sample{TimestampMs: queryStart.Add(+5*time.Minute).Unix() * 1000, Value: 43}, + mimirpb.Sample{TimestampMs: queryStart.Add(+6*time.Minute).Unix() * 1000, Value: 47}, + mimirpb.Sample{TimestampMs: queryStart.Add(+7*time.Minute).Unix() * 1000, Value: 53}, + }, false), + }, }, }, }, @@ -388,16 +394,20 @@ func TestBatchMergeChunks(t *testing.T) { distributor := &mockDistributor{} distributor.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ // Series with chunks in the 1,2 order, that need merge { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "foo"}}, - Chunks: chunks12, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "foo"}}, + Chunks: chunks12, + }, }, // Series with chunks in the 2,1 order, that need merge { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "bar"}}, - Chunks: chunks21, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "bar"}}, + Chunks: chunks21, + }, }, }, }, @@ -466,10 +476,12 @@ func BenchmarkQueryExecute(b *testing.B) { distributor := &mockDistributor{} distributor.On("QueryStream", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( client.CombinedQueryStreamResponse{ - Chunkseries: []client.TimeSeriesChunk{ + Chunkseries: []client.CustomTimeSeriesChunk{ { - Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "foo"}}, - Chunks: chunks, + TimeSeriesChunk: &client.TimeSeriesChunk{ + Labels: []mimirpb.LabelAdapter{{Name: labels.MetricName, Value: "one"}, {Name: labels.InstanceName, Value: "foo"}}, + Chunks: chunks, + }, }, }, }, diff --git a/pkg/querier/timeseries_series_set.go b/pkg/querier/timeseries_series_set.go index a1c1ea092a5..5d5b044b339 100644 --- a/pkg/querier/timeseries_series_set.go +++ b/pkg/querier/timeseries_series_set.go @@ -20,11 +20,11 @@ import ( // timeSeriesSeriesSet is a wrapper around a mimirpb.TimeSeries slice to implement to SeriesSet interface type timeSeriesSeriesSet struct { - ts []mimirpb.TimeSeries + ts []mimirpb.CustomTimeSeries i int } -func newTimeSeriesSeriesSet(series []mimirpb.TimeSeries) *timeSeriesSeriesSet { +func newTimeSeriesSeriesSet(series []mimirpb.CustomTimeSeries) *timeSeriesSeriesSet { sort.Sort(byTimeSeriesLabels(series)) return &timeSeriesSeriesSet{ ts: series, @@ -51,7 +51,7 @@ func (t *timeSeriesSeriesSet) Warnings() annotations.Annotations { return nil } // timeseries is a type wrapper that implements the storage.Series interface type timeseries struct { - series mimirpb.TimeSeries + series mimirpb.CustomTimeSeries } // timeSeriesSeriesIterator is a wrapper around a mimirpb.TimeSeries to implement the chunkenc.Iterator. @@ -62,7 +62,7 @@ type timeSeriesSeriesIterator struct { atH bool } -type byTimeSeriesLabels []mimirpb.TimeSeries +type byTimeSeriesLabels []mimirpb.CustomTimeSeries func (b byTimeSeriesLabels) Len() int { return len(b) } func (b byTimeSeriesLabels) Swap(i, j int) { b[i], b[j] = b[j], b[i] }