From 946eaad5b45fed163a4466d20bdb201f5f7bb0fb Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 27 Jan 2022 11:47:07 +0100 Subject: [PATCH 01/14] Changes the iterator interface to include labelshash. Signed-off-by: Cyril Tovena --- pkg/iter/cache.go | 18 +++++- pkg/iter/cache_test.go | 1 + pkg/iter/entry_iterator.go | 94 ++++++++++++++++++++------------ pkg/iter/entry_iterator_test.go | 3 +- pkg/iter/iterator.go | 28 ++++++++++ pkg/iter/sample_iterator.go | 73 ++++++++++++++++++------- pkg/iter/sample_iterator_test.go | 6 +- 7 files changed, 164 insertions(+), 59 deletions(-) create mode 100644 pkg/iter/iterator.go diff --git a/pkg/iter/cache.go b/pkg/iter/cache.go index 7e8bf5e84056..49bcfafe28c3 100644 --- a/pkg/iter/cache.go +++ b/pkg/iter/cache.go @@ -53,7 +53,7 @@ func (it *cachedIterator) consumeWrapped() bool { return false } // we're caching entries - it.cache = append(it.cache, entryWithLabels{entry: it.Wrapped().Entry(), labels: it.Wrapped().Labels()}) + it.cache = append(it.cache, entryWithLabels{entry: it.Wrapped().Entry(), labels: it.Wrapped().Labels(), labelsHash: it.Wrapped().LabelsHash()}) it.curr++ return true } @@ -87,6 +87,13 @@ func (it *cachedIterator) Labels() string { return it.cache[it.curr].labels } +func (it *cachedIterator) LabelsHash() uint64 { + if len(it.cache) == 0 || it.curr < 0 || it.curr >= len(it.cache) { + return 0 + } + return it.cache[it.curr].labelsHash +} + func (it *cachedIterator) Error() error { return it.iterErr } func (it *cachedIterator) Close() error { @@ -143,7 +150,7 @@ func (it *cachedSampleIterator) consumeWrapped() bool { return false } // we're caching entries - it.cache = append(it.cache, sampleWithLabels{Sample: it.Wrapped().Sample(), labels: it.Wrapped().Labels()}) + it.cache = append(it.cache, sampleWithLabels{Sample: it.Wrapped().Sample(), labels: it.Wrapped().Labels(), labelsHash: it.Wrapped().LabelsHash()}) it.curr++ return true } @@ -176,6 +183,13 @@ func (it *cachedSampleIterator) Labels() string { return it.cache[it.curr].labels } +func (it *cachedSampleIterator) LabelsHash() uint64 { + if len(it.cache) == 0 || it.curr < 0 || it.curr >= len(it.cache) { + return 0 + } + return it.cache[it.curr].labelsHash +} + func (it *cachedSampleIterator) Error() error { return it.iterErr } func (it *cachedSampleIterator) Close() error { diff --git a/pkg/iter/cache_test.go b/pkg/iter/cache_test.go index a786aa5b7a4b..be42a64599d5 100644 --- a/pkg/iter/cache_test.go +++ b/pkg/iter/cache_test.go @@ -251,6 +251,7 @@ type errorIter struct{} func (errorIter) Next() bool { return false } func (errorIter) Error() error { return errors.New("error") } func (errorIter) Labels() string { return "" } +func (errorIter) LabelsHash() uint64 { return 0 } func (errorIter) Entry() logproto.Entry { return logproto.Entry{} } func (errorIter) Sample() logproto.Sample { return logproto.Sample{} } func (errorIter) Close() error { return errors.New("close") } diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 282b39adc056..917478e54786 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -14,24 +14,10 @@ import ( // EntryIterator iterates over entries in time-order. type EntryIterator interface { - Next() bool + Iterator Entry() logproto.Entry - Labels() string - Error() error - Close() error } -type noOpIterator struct{} - -var NoopIterator = noOpIterator{} - -func (noOpIterator) Next() bool { return false } -func (noOpIterator) Error() error { return nil } -func (noOpIterator) Labels() string { return "" } -func (noOpIterator) Entry() logproto.Entry { return logproto.Entry{} } -func (noOpIterator) Sample() logproto.Sample { return logproto.Sample{} } -func (noOpIterator) Close() error { return nil } - // streamIterator iterates over entries in a stream. type streamIterator struct { i int @@ -57,6 +43,9 @@ func (i *streamIterator) Error() error { return nil } +// todo +func (i *streamIterator) LabelsHash() uint64 { return 0 } + func (i *streamIterator) Labels() string { return i.labels } @@ -102,7 +91,7 @@ func (h iteratorMinHeap) Less(i, j int) bool { case un1 > un2: return false default: // un1 == un2: - return h.iteratorHeap[i].Labels() < h.iteratorHeap[j].Labels() + return h.iteratorHeap[i].LabelsHash() < h.iteratorHeap[j].LabelsHash() } } @@ -122,7 +111,7 @@ func (h iteratorMaxHeap) Less(i, j int) bool { case un1 > un2: return true default: // un1 == un2 - return h.iteratorHeap[i].Labels() < h.iteratorHeap[j].Labels() + return h.iteratorHeap[i].LabelsHash() < h.iteratorHeap[j].LabelsHash() } } @@ -145,10 +134,11 @@ type heapIterator struct { prefetched bool stats *stats.Context - tuples []tuple - currEntry logproto.Entry - currLabels string - errs []error + tuples []tuple + currEntry logproto.Entry + currLabels string + currLabelsHash uint64 + errs []error } // NewHeapIterator returns a new iterator which uses a heap to merge together @@ -224,6 +214,7 @@ func (i *heapIterator) Next() bool { if i.heap.Len() == 1 { i.currEntry = i.heap.Peek().Entry() i.currLabels = i.heap.Peek().Labels() + i.currLabelsHash = i.heap.Peek().LabelsHash() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -237,7 +228,7 @@ func (i *heapIterator) Next() bool { for i.heap.Len() > 0 { next := i.heap.Peek() entry := next.Entry() - if len(i.tuples) > 0 && (i.tuples[0].Labels() != next.Labels() || !i.tuples[0].Timestamp.Equal(entry.Timestamp)) { + if len(i.tuples) > 0 && (i.tuples[0].LabelsHash() != next.LabelsHash() || !i.tuples[0].Timestamp.Equal(entry.Timestamp)) { break } @@ -252,6 +243,7 @@ func (i *heapIterator) Next() bool { if len(i.tuples) == 1 { i.currEntry = i.tuples[0].Entry i.currLabels = i.tuples[0].Labels() + i.currLabelsHash = i.tuples[0].LabelsHash() i.requeue(i.tuples[0].EntryIterator, false) i.tuples = i.tuples[:0] return true @@ -262,6 +254,7 @@ func (i *heapIterator) Next() bool { t := i.tuples[0] i.currEntry = t.Entry i.currLabels = t.Labels() + i.currLabelsHash = t.LabelsHash() // Requeue the iterators, advancing them if they were consumed. for j := range i.tuples { @@ -287,6 +280,8 @@ func (i *heapIterator) Labels() string { return i.currLabels } +func (i *heapIterator) LabelsHash() uint64 { return i.currLabelsHash } + func (i *heapIterator) Error() error { switch len(i.errs) { case 0: @@ -379,6 +374,9 @@ func (i *queryClientIterator) Labels() string { return i.curr.Labels() } +// todo +func (i *queryClientIterator) LabelsHash() uint64 { return 0 } + func (i *queryClientIterator) Error() error { return i.err } @@ -388,16 +386,18 @@ func (i *queryClientIterator) Close() error { } type nonOverlappingIterator struct { - labels string - iterators []EntryIterator - curr EntryIterator + labels string + labelsHash uint64 + iterators []EntryIterator + curr EntryIterator } // NewNonOverlappingIterator gives a chained iterator over a list of iterators. -func NewNonOverlappingIterator(iterators []EntryIterator, labels string) EntryIterator { +func NewNonOverlappingIterator(iterators []EntryIterator, labels string, labelsHash uint64) EntryIterator { return &nonOverlappingIterator{ - labels: labels, - iterators: iterators, + labels: labels, + labelsHash: labelsHash, + iterators: iterators, } } @@ -430,6 +430,13 @@ func (i *nonOverlappingIterator) Labels() string { return i.curr.Labels() } +func (i *nonOverlappingIterator) LabelsHash() uint64 { + if i.labelsHash != 0 { + return i.labelsHash + } + return i.curr.LabelsHash() +} + func (i *nonOverlappingIterator) Error() error { if i.curr != nil { return i.curr.Error() @@ -492,8 +499,9 @@ func (i *timeRangedIterator) Next() bool { } type entryWithLabels struct { - entry logproto.Entry - labels string + entry logproto.Entry + labels string + labelsHash uint64 } type reverseIterator struct { @@ -529,7 +537,7 @@ func (i *reverseIterator) load() { if !i.loaded { i.loaded = true for count := uint32(0); (i.limit == 0 || count < i.limit) && i.iter.Next(); count++ { - i.entriesWithLabels = append(i.entriesWithLabels, entryWithLabels{i.iter.Entry(), i.iter.Labels()}) + i.entriesWithLabels = append(i.entriesWithLabels, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.LabelsHash()}) } i.iter.Close() } @@ -553,6 +561,10 @@ func (i *reverseIterator) Labels() string { return i.cur.labels } +func (i *reverseIterator) LabelsHash() uint64 { + return i.cur.labelsHash +} + func (i *reverseIterator) Error() error { return nil } func (i *reverseIterator) Close() error { @@ -600,7 +612,7 @@ func (i *reverseEntryIterator) load() { if !i.loaded { i.loaded = true for i.iter.Next() { - i.buf.entries = append(i.buf.entries, entryWithLabels{i.iter.Entry(), i.iter.Labels()}) + i.buf.entries = append(i.buf.entries, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.LabelsHash()}) } i.iter.Close() } @@ -625,6 +637,10 @@ func (i *reverseEntryIterator) Labels() string { return i.cur.labels } +func (i *reverseEntryIterator) LabelsHash() uint64 { + return i.cur.labelsHash +} + func (i *reverseEntryIterator) Error() error { return nil } func (i *reverseEntryIterator) Close() error { @@ -685,8 +701,9 @@ func NewPeekingIterator(iter EntryIterator) PeekingEntryIterator { next := &entryWithLabels{} if iter.Next() { cache = &entryWithLabels{ - entry: iter.Entry(), - labels: iter.Labels(), + entry: iter.Entry(), + labels: iter.Labels(), + labelsHash: iter.LabelsHash(), } next.entry = cache.entry next.labels = cache.labels @@ -703,6 +720,7 @@ func (it *peekingEntryIterator) Next() bool { if it.cache != nil { it.next.entry = it.cache.entry it.next.labels = it.cache.labels + it.next.labelsHash = it.cache.labelsHash it.cacheNext() return true } @@ -714,6 +732,7 @@ func (it *peekingEntryIterator) cacheNext() { if it.iter.Next() { it.cache.entry = it.iter.Entry() it.cache.labels = it.iter.Labels() + it.cache.labelsHash = it.iter.LabelsHash() return } // nothing left removes the cached entry @@ -736,6 +755,13 @@ func (it *peekingEntryIterator) Labels() string { return "" } +func (it *peekingEntryIterator) LabelsHash() uint64 { + if it.next != nil { + return it.next.labelsHash + } + return 0 +} + // Entry implements `EntryIterator` func (it *peekingEntryIterator) Entry() logproto.Entry { if it.next != nil { diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 459c39d6b3e7..f4c351d9d1b1 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -615,6 +615,7 @@ type CloseTestingIterator struct { func (i *CloseTestingIterator) Next() bool { return true } func (i *CloseTestingIterator) Entry() logproto.Entry { return i.e } func (i *CloseTestingIterator) Labels() string { return "" } +func (i *CloseTestingIterator) LabelsHash() uint64 { return 0 } func (i *CloseTestingIterator) Error() error { return nil } func (i *CloseTestingIterator) Close() error { i.closed.Store(true) @@ -623,7 +624,7 @@ func (i *CloseTestingIterator) Close() error { func TestNonOverlappingClose(t *testing.T) { a, b := &CloseTestingIterator{}, &CloseTestingIterator{} - itr := NewNonOverlappingIterator([]EntryIterator{a, b}, "") + itr := NewNonOverlappingIterator([]EntryIterator{a, b}, "", 0) // Ensure both itr.cur and itr.iterators are non nil itr.Next() diff --git a/pkg/iter/iterator.go b/pkg/iter/iterator.go new file mode 100644 index 000000000000..fcfe44851500 --- /dev/null +++ b/pkg/iter/iterator.go @@ -0,0 +1,28 @@ +package iter + +import "github.com/grafana/loki/pkg/logproto" + +// Iterator iterates over data in time-order. +type Iterator interface { + // Returns true if there is more data to iterate. + Next() bool + // Labels returns the labels for the current entry. + // The labels can be mutated by the query engine and not reflect the original stream. + Labels() string + // LabelsHash returns a hash of the original stream labels for the current entry. + LabelsHash() uint64 + Error() error + Close() error +} + +type noOpIterator struct{} + +var NoopIterator = noOpIterator{} + +func (noOpIterator) Next() bool { return false } +func (noOpIterator) Error() error { return nil } +func (noOpIterator) Labels() string { return "" } +func (noOpIterator) LabelsHash() uint64 { return 0 } +func (noOpIterator) Entry() logproto.Entry { return logproto.Entry{} } +func (noOpIterator) Sample() logproto.Sample { return logproto.Sample{} } +func (noOpIterator) Close() error { return nil } diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 6d25610bbea8..1c39df08c685 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -13,13 +13,10 @@ import ( // SampleIterator iterates over samples in time-order. type SampleIterator interface { - Next() bool + Iterator // todo(ctovena) we should add `Seek(t int64) bool` // This way we can skip when ranging over samples. Sample() logproto.Sample - Labels() string - Error() error - Close() error } // PeekingSampleIterator is a sample iterator that can peek sample without moving the current sample. @@ -37,7 +34,8 @@ type peekingSampleIterator struct { type sampleWithLabels struct { logproto.Sample - labels string + labels string + labelsHash uint64 } func NewPeekingSampleIterator(iter SampleIterator) PeekingSampleIterator { @@ -46,8 +44,9 @@ func NewPeekingSampleIterator(iter SampleIterator) PeekingSampleIterator { next := &sampleWithLabels{} if iter.Next() { cache = &sampleWithLabels{ - Sample: iter.Sample(), - labels: iter.Labels(), + Sample: iter.Sample(), + labels: iter.Labels(), + labelsHash: iter.LabelsHash(), } next.Sample = cache.Sample next.labels = cache.labels @@ -70,10 +69,18 @@ func (it *peekingSampleIterator) Labels() string { return "" } +func (it *peekingSampleIterator) LabelsHash() uint64 { + if it.next != nil { + return it.next.labelsHash + } + return 0 +} + func (it *peekingSampleIterator) Next() bool { if it.cache != nil { it.next.Sample = it.cache.Sample it.next.labels = it.cache.labels + it.next.labelsHash = it.cache.labelsHash it.cacheNext() return true } @@ -85,6 +92,7 @@ func (it *peekingSampleIterator) cacheNext() { if it.iter.Next() { it.cache.Sample = it.iter.Sample() it.cache.labels = it.iter.Labels() + it.cache.labelsHash = it.iter.LabelsHash() return } // nothing left removes the cached entry @@ -134,7 +142,7 @@ func (h sampleIteratorHeap) Less(i, j int) bool { case s1.Timestamp > s2.Timestamp: return false default: - return h[i].Labels() < h[j].Labels() + return h[i].LabelsHash() < h[j].LabelsHash() } } @@ -145,10 +153,11 @@ type heapSampleIterator struct { prefetched bool stats *stats.Context - tuples []sampletuple - curr logproto.Sample - currLabels string - errs []error + tuples []sampletuple + curr logproto.Sample + currLabels string + currLabelsHash uint64 + errs []error } // NewHeapSampleIterator returns a new iterator which uses a heap to merge together @@ -215,6 +224,7 @@ func (i *heapSampleIterator) Next() bool { if i.heap.Len() == 1 { i.curr = i.heap.Peek().Sample() i.currLabels = i.heap.Peek().Labels() + i.currLabelsHash = i.heap.Peek().LabelsHash() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -228,7 +238,7 @@ func (i *heapSampleIterator) Next() bool { for i.heap.Len() > 0 { next := i.heap.Peek() sample := next.Sample() - if len(i.tuples) > 0 && (i.tuples[0].Labels() != next.Labels() || i.tuples[0].Timestamp != sample.Timestamp) { + if len(i.tuples) > 0 && (i.tuples[0].LabelsHash() != next.LabelsHash() || i.tuples[0].Timestamp != sample.Timestamp) { break } @@ -241,6 +251,7 @@ func (i *heapSampleIterator) Next() bool { i.curr = i.tuples[0].Sample i.currLabels = i.tuples[0].Labels() + i.currLabelsHash = i.tuples[0].LabelsHash() t := i.tuples[0] if len(i.tuples) == 1 { i.requeue(i.tuples[0].SampleIterator, false) @@ -271,6 +282,10 @@ func (i *heapSampleIterator) Labels() string { return i.currLabels } +func (i *heapSampleIterator) LabelsHash() uint64 { + return i.currLabelsHash +} + func (i *heapSampleIterator) Error() error { switch len(i.errs) { case 0: @@ -336,6 +351,10 @@ func (i *sampleQueryClientIterator) Labels() string { return i.curr.Labels() } +func (i *sampleQueryClientIterator) LabelsHash() uint64 { + return i.curr.LabelsHash() +} + func (i *sampleQueryClientIterator) Error() error { return i.err } @@ -416,6 +435,11 @@ func (i *seriesIterator) Labels() string { return i.labels } +// todo +func (i *seriesIterator) LabelsHash() uint64 { + return 0 +} + func (i *seriesIterator) Sample() logproto.Sample { return i.samples[i.i] } @@ -425,17 +449,19 @@ func (i *seriesIterator) Close() error { } type nonOverlappingSampleIterator struct { - labels string - i int - iterators []SampleIterator - curr SampleIterator + labels string + labelsHash uint64 + i int + iterators []SampleIterator + curr SampleIterator } // NewNonOverlappingSampleIterator gives a chained iterator over a list of iterators. -func NewNonOverlappingSampleIterator(iterators []SampleIterator, labels string) SampleIterator { +func NewNonOverlappingSampleIterator(iterators []SampleIterator, labels string, labelsHash uint64) SampleIterator { return &nonOverlappingSampleIterator{ - labels: labels, - iterators: iterators, + labels: labels, + labelsHash: labelsHash, + iterators: iterators, } } @@ -469,6 +495,13 @@ func (i *nonOverlappingSampleIterator) Labels() string { return i.curr.Labels() } +func (i *nonOverlappingSampleIterator) LabelsHash() uint64 { + if i.labels != "" { + return i.labelsHash + } + return i.curr.LabelsHash() +} + func (i *nonOverlappingSampleIterator) Error() error { if i.curr != nil { return i.curr.Error() diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index ed19919e3ff0..00dacf6e2bd6 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -170,13 +170,14 @@ func TestNewSampleQueryClientIterator(t *testing.T) { } func TestNewNonOverlappingSampleIterator(t *testing.T) { + // todo fix this test it := NewNonOverlappingSampleIterator([]SampleIterator{ NewSeriesIterator(varSeries), NewSeriesIterator(logproto.Series{ Labels: varSeries.Labels, Samples: []logproto.Sample{sample(4), sample(5)}, }), - }, varSeries.Labels) + }, varSeries.Labels, 0) for i := 1; i < 6; i++ { require.True(t, it.Next(), i) @@ -207,6 +208,7 @@ type CloseTestingSmplIterator struct { func (i *CloseTestingSmplIterator) Next() bool { return true } func (i *CloseTestingSmplIterator) Sample() logproto.Sample { return i.s } +func (i *CloseTestingSmplIterator) LabelsHash() uint64 { return 0 } func (i *CloseTestingSmplIterator) Labels() string { return "" } func (i *CloseTestingSmplIterator) Error() error { return nil } func (i *CloseTestingSmplIterator) Close() error { @@ -216,7 +218,7 @@ func (i *CloseTestingSmplIterator) Close() error { func TestNonOverlappingSampleClose(t *testing.T) { a, b := &CloseTestingSmplIterator{}, &CloseTestingSmplIterator{} - itr := NewNonOverlappingSampleIterator([]SampleIterator{a, b}, "") + itr := NewNonOverlappingSampleIterator([]SampleIterator{a, b}, "", 0) // Ensure both itr.cur and itr.iterators are non nil itr.Next() From 7ad92382f767a9b5ca95792106dacdf668434328 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 27 Jan 2022 15:26:18 +0100 Subject: [PATCH 02/14] Improve unused args and fixes build Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 4 ++++ pkg/chunkenc/memchunk.go | 6 +++--- pkg/ingester/stream.go | 4 ++-- pkg/iter/entry_iterator.go | 27 +++++++++++---------------- pkg/iter/entry_iterator_test.go | 2 +- pkg/iter/sample_iterator.go | 29 ++++++++++++----------------- pkg/iter/sample_iterator_test.go | 4 ++-- pkg/logql/engine_test.go | 2 ++ pkg/storage/batch.go | 12 ++++++++++-- pkg/storage/lazy_chunk.go | 6 +++--- 10 files changed, 50 insertions(+), 46 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index a67545de9087..7f9d94400883 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -153,6 +153,10 @@ func (i *dumbChunkIterator) Labels() string { return "" } +func (i *dumbChunkIterator) LabelsHash() uint64 { + return 0 +} + func (i *dumbChunkIterator) Error() error { return nil } diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index aa4239175fd4..171ca175e7bc 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -816,7 +816,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi var it iter.EntryIterator if ordered { - it = iter.NewNonOverlappingIterator(blockItrs, "") + it = iter.NewNonOverlappingIterator(blockItrs) } else { it = iter.NewHeapIterator(ctx, blockItrs, direction) } @@ -849,7 +849,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi } if ordered { - return iter.NewNonOverlappingIterator(blockItrs, ""), nil + return iter.NewNonOverlappingIterator(blockItrs), nil } return iter.NewHeapIterator(ctx, blockItrs, direction), nil } @@ -884,7 +884,7 @@ func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, var it iter.SampleIterator if ordered { - it = iter.NewNonOverlappingSampleIterator(its, "") + it = iter.NewNonOverlappingSampleIterator(its) } else { it = iter.NewHeapSampleIterator(ctx, its) } diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 55a881f5685d..bbede4dd72ee 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -468,7 +468,7 @@ func (s *stream) Iterator(ctx context.Context, statsCtx *stats.Context, from, th } if ordered { - return iter.NewNonOverlappingIterator(iterators, ""), nil + return iter.NewNonOverlappingIterator(iterators), nil } return iter.NewHeapIterator(ctx, iterators, direction), nil } @@ -505,7 +505,7 @@ func (s *stream) SampleIterator(ctx context.Context, statsCtx *stats.Context, fr } if ordered { - return iter.NewNonOverlappingSampleIterator(iterators, ""), nil + return iter.NewNonOverlappingSampleIterator(iterators), nil } return iter.NewHeapSampleIterator(ctx, iterators), nil } diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 917478e54786..5546925d99d4 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -386,18 +386,14 @@ func (i *queryClientIterator) Close() error { } type nonOverlappingIterator struct { - labels string - labelsHash uint64 - iterators []EntryIterator - curr EntryIterator + iterators []EntryIterator + curr EntryIterator } // NewNonOverlappingIterator gives a chained iterator over a list of iterators. -func NewNonOverlappingIterator(iterators []EntryIterator, labels string, labelsHash uint64) EntryIterator { +func NewNonOverlappingIterator(iterators []EntryIterator) EntryIterator { return &nonOverlappingIterator{ - labels: labels, - labelsHash: labelsHash, - iterators: iterators, + iterators: iterators, } } @@ -423,25 +419,24 @@ func (i *nonOverlappingIterator) Entry() logproto.Entry { } func (i *nonOverlappingIterator) Labels() string { - if i.labels != "" { - return i.labels + if i.curr == nil { + return "" } - return i.curr.Labels() } func (i *nonOverlappingIterator) LabelsHash() uint64 { - if i.labelsHash != 0 { - return i.labelsHash + if i.curr == nil { + return 0 } return i.curr.LabelsHash() } func (i *nonOverlappingIterator) Error() error { - if i.curr != nil { - return i.curr.Error() + if i.curr == nil { + return nil } - return nil + return i.curr.Error() } func (i *nonOverlappingIterator) Close() error { diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index f4c351d9d1b1..8413ee16784e 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -624,7 +624,7 @@ func (i *CloseTestingIterator) Close() error { func TestNonOverlappingClose(t *testing.T) { a, b := &CloseTestingIterator{}, &CloseTestingIterator{} - itr := NewNonOverlappingIterator([]EntryIterator{a, b}, "", 0) + itr := NewNonOverlappingIterator([]EntryIterator{a, b}) // Ensure both itr.cur and itr.iterators are non nil itr.Next() diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 1c39df08c685..2dcacde5e137 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -449,19 +449,15 @@ func (i *seriesIterator) Close() error { } type nonOverlappingSampleIterator struct { - labels string - labelsHash uint64 - i int - iterators []SampleIterator - curr SampleIterator + i int + iterators []SampleIterator + curr SampleIterator } // NewNonOverlappingSampleIterator gives a chained iterator over a list of iterators. -func NewNonOverlappingSampleIterator(iterators []SampleIterator, labels string, labelsHash uint64) SampleIterator { +func NewNonOverlappingSampleIterator(iterators []SampleIterator) SampleIterator { return &nonOverlappingSampleIterator{ - labels: labels, - labelsHash: labelsHash, - iterators: iterators, + iterators: iterators, } } @@ -488,25 +484,24 @@ func (i *nonOverlappingSampleIterator) Sample() logproto.Sample { } func (i *nonOverlappingSampleIterator) Labels() string { - if i.labels != "" { - return i.labels + if i.curr == nil { + return "" } - return i.curr.Labels() } func (i *nonOverlappingSampleIterator) LabelsHash() uint64 { - if i.labels != "" { - return i.labelsHash + if i.curr == nil { + return 0 } return i.curr.LabelsHash() } func (i *nonOverlappingSampleIterator) Error() error { - if i.curr != nil { - return i.curr.Error() + if i.curr == nil { + return nil } - return nil + return i.curr.Error() } func (i *nonOverlappingSampleIterator) Close() error { diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index 00dacf6e2bd6..fa3cba89adf2 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -177,7 +177,7 @@ func TestNewNonOverlappingSampleIterator(t *testing.T) { Labels: varSeries.Labels, Samples: []logproto.Sample{sample(4), sample(5)}, }), - }, varSeries.Labels, 0) + }) for i := 1; i < 6; i++ { require.True(t, it.Next(), i) @@ -218,7 +218,7 @@ func (i *CloseTestingSmplIterator) Close() error { func TestNonOverlappingSampleClose(t *testing.T) { a, b := &CloseTestingSmplIterator{}, &CloseTestingSmplIterator{} - itr := NewNonOverlappingSampleIterator([]SampleIterator{a, b}, "", 0) + itr := NewNonOverlappingSampleIterator([]SampleIterator{a, b}) // Ensure both itr.cur and itr.iterators are non nil itr.Next() diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 544bcac4cde2..7505c7d8394f 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -2551,6 +2551,8 @@ func (errorIterator) Error() error { return ErrMock } func (errorIterator) Labels() string { return "" } +func (errorIterator) LabelsHash() uint64 { return 0 } + func (errorIterator) Entry() logproto.Entry { return logproto.Entry{} } func (errorIterator) Sample() logproto.Sample { return logproto.Sample{} } diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index e3778ba9db33..edaf519bbf8b 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -338,6 +338,10 @@ func (it *logBatchIterator) Labels() string { return it.curr.Labels() } +func (it *logBatchIterator) LabelsHash() uint64 { + return it.curr.LabelsHash() +} + func (it *logBatchIterator) Error() error { if it.err != nil { return it.err @@ -437,7 +441,7 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through iterators[i], iterators[j] = iterators[j], iterators[i] } } - result = append(result, iter.NewNonOverlappingIterator(iterators, "")) + result = append(result, iter.NewNonOverlappingIterator(iterators)) } return iter.NewHeapIterator(it.ctx, result, it.direction), nil @@ -477,6 +481,10 @@ func (it *sampleBatchIterator) Labels() string { return it.curr.Labels() } +func (it *sampleBatchIterator) LabelsHash() uint64 { + return it.curr.LabelsHash() +} + func (it *sampleBatchIterator) Error() error { if it.err != nil { return it.err @@ -571,7 +579,7 @@ func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, thro } iterators = append(iterators, iterator) } - result = append(result, iter.NewNonOverlappingSampleIterator(iterators, "")) + result = append(result, iter.NewNonOverlappingSampleIterator(iterators)) } return iter.NewHeapSampleIterator(it.ctx, result), nil diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 436e0274dfe1..8757cc9c136f 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -85,7 +85,7 @@ func (c *LazyChunk) Iterator( if direction == logproto.FORWARD { return iter.NewTimeRangedIterator( - iter.NewNonOverlappingIterator(its, ""), + iter.NewNonOverlappingIterator(its), from, through, ), nil @@ -106,7 +106,7 @@ func (c *LazyChunk) Iterator( its[i], its[j] = its[j], its[i] } - return iter.NewNonOverlappingIterator(its, ""), nil + return iter.NewNonOverlappingIterator(its), nil } // SampleIterator returns an sample iterator. @@ -166,7 +166,7 @@ func (c *LazyChunk) SampleIterator( // build the final iterator bound to the requested time range. return iter.NewTimeRangedSampleIterator( - iter.NewNonOverlappingSampleIterator(its, ""), + iter.NewNonOverlappingSampleIterator(its), from.UnixNano(), through.UnixNano(), ), nil From 40e5fe14b9a53a35fd5afd039dbdbf92a7457e58 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 28 Jan 2022 10:23:09 +0100 Subject: [PATCH 03/14] Implement the new hash in the memchunk. Signed-off-by: Cyril Tovena --- pkg/chunkenc/memchunk.go | 4 ++++ pkg/chunkenc/memchunk_test.go | 1 + pkg/logql/log/labels.go | 4 ++-- pkg/logql/log/metrics_extraction.go | 5 +++++ pkg/logql/log/pipeline.go | 5 +++++ 5 files changed, 17 insertions(+), 2 deletions(-) diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 171ca175e7bc..f8e3767eeff9 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -1252,6 +1252,8 @@ func (e *entryBufferedIterator) Entry() logproto.Entry { func (e *entryBufferedIterator) Labels() string { return e.currLabels.String() } +func (e *entryBufferedIterator) LabelsHash() uint64 { return e.pipeline.BaseLabels().Hash() } + func (e *entryBufferedIterator) Next() bool { for e.bufferedIterator.Next() { newLine, lbs, ok := e.pipeline.Process(e.currLine) @@ -1299,6 +1301,8 @@ func (e *sampleBufferedIterator) Next() bool { } func (e *sampleBufferedIterator) Labels() string { return e.currLabels.String() } +func (e *sampleBufferedIterator) LabelsHash() uint64 { return e.extractor.BaseLabels().Hash() } + func (e *sampleBufferedIterator) Sample() logproto.Sample { return e.cur } diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index af4cf2479064..8a4828fb516c 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -677,6 +677,7 @@ func BenchmarkWrite(b *testing.B) { type nomatchPipeline struct{} +func (nomatchPipeline) BaseLabels() log.LabelsResult { return log.EmptyLabelsResult } func (nomatchPipeline) Process(line []byte) ([]byte, log.LabelsResult, bool) { return line, nil, false } func (nomatchPipeline) ProcessString(line string) (string, log.LabelsResult, bool) { return line, nil, false diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index 1471384d6809..633ba595ca01 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -10,7 +10,7 @@ import ( const MaxInternedStrings = 1024 -var emptyLabelsResult = NewLabelsResult(labels.Labels{}, labels.Labels{}.Hash()) +var EmptyLabelsResult = NewLabelsResult(labels.Labels{}, labels.Labels{}.Hash()) // LabelsResult is a computed labels result that contains the labels set with associated string and hash. // The is mainly used for caching and returning labels computations out of pipelines and stages. @@ -274,7 +274,7 @@ func (b *LabelsBuilder) GroupedLabels() LabelsResult { return b.LabelsResult() } if b.noLabels { - return emptyLabelsResult + return EmptyLabelsResult } // unchanged path. if len(b.del) == 0 && len(b.add) == 0 { diff --git a/pkg/logql/log/metrics_extraction.go b/pkg/logql/log/metrics_extraction.go index a34719af8c2b..9faed48034d3 100644 --- a/pkg/logql/log/metrics_extraction.go +++ b/pkg/logql/log/metrics_extraction.go @@ -33,6 +33,7 @@ type SampleExtractor interface { // StreamSampleExtractor extracts sample for a log line. // A StreamSampleExtractor never mutate the received line. type StreamSampleExtractor interface { + BaseLabels() LabelsResult Process(line []byte) (float64, LabelsResult, bool) ProcessString(line string) (float64, LabelsResult, bool) } @@ -97,6 +98,8 @@ func (l *streamLineSampleExtractor) ProcessString(line string) (float64, LabelsR return l.Process(unsafeGetBytes(line)) } +func (l *streamLineSampleExtractor) BaseLabels() LabelsResult { return l.builder.currentResult } + type convertionFn func(value string) (float64, error) type labelSampleExtractor struct { @@ -196,6 +199,8 @@ func (l *streamLabelSampleExtractor) ProcessString(line string) (float64, Labels return l.Process(unsafeGetBytes(line)) } +func (l *streamLabelSampleExtractor) BaseLabels() LabelsResult { return l.builder.currentResult } + func convertFloat(v string) (float64, error) { return strconv.ParseFloat(v, 64) } diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go index 52dd8ab2a654..8430bac68a01 100644 --- a/pkg/logql/log/pipeline.go +++ b/pkg/logql/log/pipeline.go @@ -18,6 +18,7 @@ type Pipeline interface { // StreamPipeline transform and filter log lines and labels. // A StreamPipeline never mutate the received line. type StreamPipeline interface { + BaseLabels() LabelsResult Process(line []byte) (resultLine []byte, resultLabels LabelsResult, skip bool) ProcessString(line string) (resultLine string, resultLabels LabelsResult, skip bool) } @@ -59,6 +60,8 @@ func (n noopStreamPipeline) ProcessString(line string) (string, LabelsResult, bo return line, n.LabelsResult, true } +func (n noopStreamPipeline) BaseLabels() LabelsResult { return n.LabelsResult } + func (n *noopPipeline) ForStream(labels labels.Labels) StreamPipeline { h := labels.Hash() if cached, ok := n.cache[h]; ok { @@ -153,6 +156,8 @@ func (p *streamPipeline) ProcessString(line string) (string, LabelsResult, bool) return unsafeGetString(lb), lr, ok } +func (p *streamPipeline) BaseLabels() LabelsResult { return p.builder.currentResult } + // ReduceStages reduces multiple stages into one. func ReduceStages(stages []Stage) Stage { if len(stages) == 0 { From 5eaf8c8670262ea4f97cd9d22ace1265c1d74952 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 28 Jan 2022 11:03:56 +0100 Subject: [PATCH 04/14] Add the hash in logproto Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator.go | 24 ++-- pkg/iter/sample_iterator.go | 19 ++- pkg/logproto/logproto.pb.go | 272 +++++++++++++++++++++++------------- pkg/logproto/logproto.proto | 2 + pkg/logproto/types.go | 34 ++++- 5 files changed, 228 insertions(+), 123 deletions(-) diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 5546925d99d4..f80da1194118 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -20,38 +20,35 @@ type EntryIterator interface { // streamIterator iterates over entries in a stream. type streamIterator struct { - i int - entries []logproto.Entry - labels string + i int + stream logproto.Stream } // NewStreamIterator iterates over entries in a stream. func NewStreamIterator(stream logproto.Stream) EntryIterator { return &streamIterator{ - i: -1, - entries: stream.Entries, - labels: stream.Labels, + i: -1, + stream: stream, } } func (i *streamIterator) Next() bool { i.i++ - return i.i < len(i.entries) + return i.i < len(i.stream.Entries) } func (i *streamIterator) Error() error { return nil } -// todo -func (i *streamIterator) LabelsHash() uint64 { return 0 } - func (i *streamIterator) Labels() string { - return i.labels + return i.stream.Labels } +func (i *streamIterator) LabelsHash() uint64 { return i.stream.LabelsHash } + func (i *streamIterator) Entry() logproto.Entry { - return i.entries[i.i] + return i.stream.Entries[i.i] } func (i *streamIterator) Close() error { @@ -374,8 +371,7 @@ func (i *queryClientIterator) Labels() string { return i.curr.Labels() } -// todo -func (i *queryClientIterator) LabelsHash() uint64 { return 0 } +func (i *queryClientIterator) LabelsHash() uint64 { return i.curr.LabelsHash() } func (i *queryClientIterator) Error() error { return i.err diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 2dcacde5e137..9123aed1706e 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -369,9 +369,8 @@ func NewSampleQueryResponseIterator(ctx context.Context, resp *logproto.SampleQu } type seriesIterator struct { - i int - samples []logproto.Sample - labels string + i int + series logproto.Series } type withCloseSampleIterator struct { @@ -416,15 +415,14 @@ func NewMultiSeriesIterator(ctx context.Context, series []logproto.Series) Sampl // NewSeriesIterator iterates over sample in a series. func NewSeriesIterator(series logproto.Series) SampleIterator { return &seriesIterator{ - i: -1, - samples: series.Samples, - labels: series.Labels, + i: -1, + series: series, } } func (i *seriesIterator) Next() bool { i.i++ - return i.i < len(i.samples) + return i.i < len(i.series.Samples) } func (i *seriesIterator) Error() error { @@ -432,16 +430,15 @@ func (i *seriesIterator) Error() error { } func (i *seriesIterator) Labels() string { - return i.labels + return i.series.Labels } -// todo func (i *seriesIterator) LabelsHash() uint64 { - return 0 + return i.series.LabelsHash } func (i *seriesIterator) Sample() logproto.Sample { - return i.samples[i.i] + return i.series.Samples[i.i] } func (i *seriesIterator) Close() error { diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index e1579cd3f847..0fa3da25ca35 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -479,8 +479,9 @@ func (m *LabelResponse) GetValues() []string { } type StreamAdapter struct { - Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - Entries []EntryAdapter `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries"` + Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Entries []EntryAdapter `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries"` + LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` } func (m *StreamAdapter) Reset() { *m = StreamAdapter{} } @@ -529,6 +530,13 @@ func (m *StreamAdapter) GetEntries() []EntryAdapter { return nil } +func (m *StreamAdapter) GetLabelsHash() uint64 { + if m != nil { + return m.LabelsHash + } + return 0 +} + type EntryAdapter struct { Timestamp time.Time `protobuf:"bytes,1,opt,name=timestamp,proto3,stdtime" json:"ts"` Line string `protobuf:"bytes,2,opt,name=line,proto3" json:"line"` @@ -640,8 +648,9 @@ func (m *Sample) GetHash() uint64 { } type Series struct { - Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - Samples []Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples"` + Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Samples []Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples"` + LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` } func (m *Series) Reset() { *m = Series{} } @@ -690,6 +699,13 @@ func (m *Series) GetSamples() []Sample { return nil } +func (m *Series) GetLabelsHash() uint64 { + if m != nil { + return m.LabelsHash + } + return 0 +} + type TailRequest struct { Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` DelayFor uint32 `protobuf:"varint,3,opt,name=delayFor,proto3" json:"delayFor,omitempty"` @@ -1423,95 +1439,97 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 1404 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x49, 0x8f, 0x13, 0x47, - 0x14, 0x76, 0x79, 0xe9, 0xb1, 0x9f, 0x17, 0xac, 0x9a, 0x61, 0xc6, 0x31, 0xd0, 0xb6, 0x4a, 0x08, - 0xac, 0x40, 0xec, 0x30, 0xd9, 0x58, 0xb2, 0x68, 0xcc, 0x84, 0x30, 0x04, 0x05, 0x68, 0x90, 0x90, - 0x90, 0x22, 0xd4, 0x63, 0xd7, 0xd8, 0xad, 0xb1, 0xdd, 0xa6, 0xab, 0x8c, 0x34, 0x52, 0xa4, 0xe4, - 0x07, 0x24, 0x12, 0xb7, 0x1c, 0x72, 0xcd, 0x21, 0xca, 0x21, 0xbf, 0x83, 0xdc, 0x50, 0x4e, 0x28, - 0x07, 0x27, 0x98, 0x4b, 0x34, 0xca, 0x81, 0x9f, 0x10, 0xd5, 0xd2, 0xed, 0xb2, 0x99, 0x49, 0x30, - 0x97, 0x5c, 0xda, 0xf5, 0xaa, 0x5e, 0xbd, 0xe5, 0x7b, 0x5f, 0xbd, 0x2a, 0xc3, 0xb1, 0xe1, 0x6e, - 0xa7, 0xd1, 0xf3, 0x3b, 0xc3, 0xc0, 0xe7, 0x7e, 0x34, 0xa8, 0xcb, 0x2f, 0x4e, 0x87, 0x72, 0xb9, - 0xd2, 0xf1, 0xfd, 0x4e, 0x8f, 0x36, 0xa4, 0xb4, 0x3d, 0xda, 0x69, 0x70, 0xaf, 0x4f, 0x19, 0x77, - 0xfb, 0x43, 0xa5, 0x5a, 0x7e, 0xab, 0xe3, 0xf1, 0xee, 0x68, 0xbb, 0xde, 0xf2, 0xfb, 0x8d, 0x8e, - 0xdf, 0xf1, 0xa7, 0x9a, 0x42, 0x52, 0xd6, 0xc5, 0x48, 0xab, 0x57, 0xb5, 0xdb, 0x07, 0xbd, 0xbe, - 0xdf, 0xa6, 0xbd, 0x06, 0xe3, 0x2e, 0x67, 0xea, 0xab, 0x34, 0xc8, 0x5d, 0xc8, 0xde, 0x1c, 0xb1, - 0xae, 0x43, 0x1f, 0x8c, 0x28, 0xe3, 0xf8, 0x2a, 0x2c, 0x31, 0x1e, 0x50, 0xb7, 0xcf, 0x4a, 0xa8, - 0x9a, 0xa8, 0x65, 0xd7, 0xd7, 0xea, 0x51, 0xb0, 0xb7, 0xe5, 0xc2, 0x46, 0xdb, 0x1d, 0x72, 0x1a, - 0x34, 0x8f, 0xfe, 0x3e, 0xae, 0x58, 0x6a, 0x6a, 0x7f, 0x5c, 0x09, 0x77, 0x39, 0xe1, 0x80, 0x14, - 0x20, 0xa7, 0x0c, 0xb3, 0xa1, 0x3f, 0x60, 0x94, 0xfc, 0x10, 0x87, 0xdc, 0xad, 0x11, 0x0d, 0xf6, - 0x42, 0x57, 0x65, 0x48, 0x33, 0xda, 0xa3, 0x2d, 0xee, 0x07, 0x25, 0x54, 0x45, 0xb5, 0x8c, 0x13, - 0xc9, 0x78, 0x05, 0x52, 0x3d, 0xaf, 0xef, 0xf1, 0x52, 0xbc, 0x8a, 0x6a, 0x79, 0x47, 0x09, 0xf8, - 0x22, 0xa4, 0x18, 0x77, 0x03, 0x5e, 0x4a, 0x54, 0x51, 0x2d, 0xbb, 0x5e, 0xae, 0x2b, 0xb4, 0xea, - 0x21, 0x06, 0xf5, 0x3b, 0x21, 0x5a, 0xcd, 0xf4, 0xe3, 0x71, 0x25, 0xf6, 0xe8, 0x8f, 0x0a, 0x72, - 0xd4, 0x16, 0xfc, 0x3e, 0x24, 0xe8, 0xa0, 0x5d, 0x4a, 0x2e, 0xb0, 0x53, 0x6c, 0xc0, 0xe7, 0x20, - 0xd3, 0xf6, 0x02, 0xda, 0xe2, 0x9e, 0x3f, 0x28, 0xa5, 0xaa, 0xa8, 0x56, 0x58, 0x5f, 0x9e, 0x42, - 0xb2, 0x19, 0x2e, 0x39, 0x53, 0x2d, 0x7c, 0x16, 0x2c, 0xd6, 0x75, 0x83, 0x36, 0x2b, 0x2d, 0x55, - 0x13, 0xb5, 0x4c, 0x73, 0x65, 0x7f, 0x5c, 0x29, 0xaa, 0x99, 0xb3, 0x7e, 0xdf, 0xe3, 0xb4, 0x3f, - 0xe4, 0x7b, 0x8e, 0xd6, 0xb9, 0x96, 0x4c, 0x5b, 0xc5, 0x25, 0xf2, 0x1b, 0x02, 0x7c, 0xdb, 0xed, - 0x0f, 0x7b, 0xf4, 0x95, 0x31, 0x8a, 0xd0, 0x88, 0xbf, 0x36, 0x1a, 0x89, 0x45, 0xd1, 0x98, 0xa6, - 0x96, 0xfc, 0xef, 0xd4, 0xc8, 0xd7, 0x90, 0xd7, 0xd9, 0x28, 0x0e, 0xe0, 0x8d, 0x57, 0x66, 0x57, - 0xe1, 0xf1, 0xb8, 0x82, 0xa6, 0x0c, 0x8b, 0x68, 0x85, 0xcf, 0xc8, 0xac, 0x39, 0xd3, 0x59, 0x1f, - 0xa9, 0x2b, 0x32, 0x6f, 0x0d, 0x3a, 0x94, 0x89, 0x8d, 0x49, 0x11, 0xb0, 0xa3, 0x74, 0xc8, 0x57, - 0xb0, 0x3c, 0x03, 0xaa, 0x0e, 0xe3, 0x3c, 0x58, 0x8c, 0x06, 0x1e, 0x0d, 0xa3, 0x28, 0x1a, 0x51, - 0xc8, 0x79, 0xc3, 0xbd, 0x94, 0x1d, 0xad, 0xbf, 0x98, 0xf7, 0x5f, 0x10, 0xe4, 0xae, 0xbb, 0xdb, - 0xb4, 0x17, 0x56, 0x13, 0x43, 0x72, 0xe0, 0xf6, 0xa9, 0xae, 0xa4, 0x1c, 0xe3, 0x55, 0xb0, 0x1e, - 0xba, 0xbd, 0x11, 0x55, 0x26, 0xd3, 0x8e, 0x96, 0x16, 0xe5, 0x3a, 0x7a, 0x6d, 0xae, 0xa3, 0xa8, - 0xba, 0xe4, 0x34, 0xe4, 0x75, 0xbc, 0x1a, 0xa8, 0x69, 0x70, 0x02, 0xa8, 0x4c, 0x18, 0x1c, 0x79, - 0x08, 0xf9, 0x99, 0x72, 0x61, 0x02, 0x56, 0x4f, 0xec, 0x64, 0x2a, 0xb7, 0x26, 0xec, 0x8f, 0x2b, - 0x7a, 0xc6, 0xd1, 0xbf, 0xa2, 0xf8, 0x74, 0xc0, 0x25, 0xec, 0x71, 0x09, 0xfb, 0xea, 0x14, 0xf6, - 0x4f, 0x07, 0x3c, 0xd8, 0x0b, 0x6b, 0x7f, 0x44, 0x80, 0x28, 0x7a, 0x8a, 0x56, 0x77, 0xc2, 0x01, - 0x79, 0x08, 0x39, 0x53, 0x13, 0x5f, 0x85, 0x4c, 0xd4, 0x20, 0xa5, 0xe7, 0x7f, 0x4f, 0xb7, 0xa0, - 0x0d, 0xc7, 0x39, 0x93, 0x49, 0x4f, 0x37, 0xe3, 0xe3, 0x90, 0xec, 0x79, 0x03, 0x2a, 0x8b, 0x90, - 0x69, 0xa6, 0xf7, 0xc7, 0x15, 0x29, 0x3b, 0xf2, 0x4b, 0xfa, 0x60, 0x29, 0x1e, 0xe1, 0x93, 0xf3, - 0x1e, 0x13, 0x4d, 0x4b, 0x59, 0x34, 0xad, 0x55, 0x20, 0x25, 0x91, 0x92, 0xe6, 0x50, 0x33, 0xb3, - 0x3f, 0xae, 0xa8, 0x09, 0x47, 0xfd, 0x08, 0x77, 0x5d, 0x97, 0x75, 0x65, 0x71, 0x93, 0xca, 0x9d, - 0x90, 0x1d, 0xf9, 0x25, 0x1e, 0x68, 0xde, 0xbd, 0x12, 0xae, 0x97, 0x60, 0x89, 0xc9, 0xe0, 0x42, - 0x5c, 0x4d, 0x3a, 0xcb, 0x85, 0x29, 0xa2, 0x5a, 0xd1, 0x09, 0x07, 0xe4, 0x7b, 0x04, 0xd9, 0x3b, - 0xae, 0x17, 0x51, 0x74, 0x05, 0x52, 0x0f, 0xc4, 0x59, 0xd1, 0x1c, 0x55, 0x82, 0x68, 0x43, 0x6d, - 0xda, 0x73, 0xf7, 0xae, 0xf8, 0x81, 0x0c, 0x39, 0xef, 0x44, 0xf2, 0xb4, 0x55, 0x27, 0x0f, 0x6c, - 0xd5, 0xa9, 0x85, 0x9b, 0xd3, 0xb5, 0x64, 0x3a, 0x5e, 0x4c, 0x90, 0x6f, 0x11, 0xe4, 0x54, 0x64, - 0x9a, 0x8c, 0x97, 0xc0, 0x52, 0x4d, 0x40, 0x57, 0xfa, 0xd0, 0xde, 0x01, 0x46, 0xdf, 0xd0, 0x5b, - 0xf0, 0x27, 0x50, 0x68, 0x07, 0xfe, 0x70, 0x48, 0xdb, 0xb7, 0x75, 0x03, 0x8a, 0xcf, 0x37, 0xa0, - 0x4d, 0x73, 0xdd, 0x99, 0x53, 0x27, 0xbf, 0x22, 0xc8, 0xeb, 0x66, 0xa0, 0xa1, 0x8a, 0x52, 0x44, - 0xaf, 0xdd, 0x7f, 0xe3, 0x8b, 0xf6, 0xdf, 0x55, 0xb0, 0x3a, 0x81, 0x3f, 0x1a, 0xb2, 0x52, 0x42, - 0x1d, 0x48, 0x25, 0x2d, 0xd8, 0x97, 0xaf, 0x41, 0x21, 0x4c, 0xe5, 0x90, 0x8e, 0x58, 0x9e, 0xef, - 0x88, 0x5b, 0x6d, 0x3a, 0xe0, 0xde, 0x8e, 0x17, 0xf5, 0x38, 0xad, 0x4f, 0xbe, 0x43, 0x50, 0x9c, - 0x57, 0xc1, 0x1f, 0x1b, 0xb4, 0x15, 0xe6, 0x4e, 0x1d, 0x6e, 0xae, 0x2e, 0x3b, 0x0e, 0x93, 0xc7, - 0x3a, 0xa4, 0x74, 0xf9, 0x02, 0x64, 0x8d, 0x69, 0x5c, 0x84, 0xc4, 0x2e, 0x0d, 0x29, 0x29, 0x86, - 0x82, 0x74, 0xd3, 0x03, 0x96, 0xd1, 0xa7, 0xea, 0x62, 0xfc, 0x3c, 0x12, 0x84, 0xce, 0xcf, 0x54, - 0x12, 0x9f, 0x87, 0xe4, 0x4e, 0xe0, 0xf7, 0x17, 0x2a, 0x93, 0xdc, 0x81, 0xdf, 0x85, 0x38, 0xf7, - 0x17, 0x2a, 0x52, 0x9c, 0xfb, 0xa2, 0x46, 0x3a, 0xf9, 0x84, 0x0c, 0x4e, 0x4b, 0xe4, 0x67, 0x04, - 0x47, 0xc4, 0x1e, 0x85, 0xc0, 0xe5, 0xee, 0x68, 0xb0, 0x8b, 0x6b, 0x50, 0x14, 0x9e, 0xee, 0x7b, - 0xfa, 0x02, 0xb9, 0xef, 0xb5, 0x75, 0x9a, 0x05, 0x31, 0x1f, 0xde, 0x2b, 0x5b, 0x6d, 0xbc, 0x06, - 0x4b, 0x23, 0xa6, 0x14, 0x54, 0xce, 0x96, 0x10, 0xb7, 0xda, 0xf8, 0x8c, 0xe1, 0x4e, 0x60, 0x6d, - 0xbc, 0x4e, 0x24, 0x86, 0x37, 0x5d, 0x2f, 0x88, 0x7a, 0xc5, 0x69, 0xb0, 0x5a, 0xc2, 0xb1, 0xe2, - 0x89, 0xb8, 0xc0, 0x22, 0x65, 0x19, 0x90, 0xa3, 0x97, 0xc9, 0x7b, 0x90, 0x89, 0x76, 0x1f, 0x78, - 0x6f, 0x1d, 0x58, 0x01, 0x72, 0x0c, 0x52, 0x2a, 0x31, 0x0c, 0xc9, 0xb6, 0xcb, 0x5d, 0xb9, 0x25, - 0xe7, 0xc8, 0x31, 0x29, 0xc1, 0xea, 0x9d, 0xc0, 0x1d, 0xb0, 0x1d, 0x1a, 0x48, 0xa5, 0x88, 0x7e, - 0xe4, 0x28, 0x2c, 0x8b, 0xa3, 0x4e, 0x03, 0x76, 0xd9, 0x1f, 0x0d, 0xb8, 0x3e, 0x61, 0xe4, 0x2c, - 0xac, 0xcc, 0x4e, 0x6b, 0xb6, 0xae, 0x40, 0xaa, 0x25, 0x26, 0xa4, 0xf5, 0xbc, 0xa3, 0x04, 0xf2, - 0x23, 0x02, 0xfc, 0x19, 0xe5, 0xd2, 0xf4, 0xd6, 0x26, 0x33, 0x9e, 0x50, 0x7d, 0x97, 0xb7, 0xba, - 0x34, 0x60, 0xe1, 0x13, 0x2a, 0x94, 0xff, 0x8f, 0x27, 0x14, 0x39, 0x07, 0xcb, 0x33, 0x51, 0xea, - 0x9c, 0xca, 0x90, 0x6e, 0xe9, 0x39, 0x7d, 0xd9, 0x46, 0xf2, 0x9b, 0xa7, 0x20, 0x13, 0x3d, 0x34, - 0x71, 0x16, 0x96, 0xae, 0xdc, 0x70, 0xee, 0x6e, 0x38, 0x9b, 0xc5, 0x18, 0xce, 0x41, 0xba, 0xb9, - 0x71, 0xf9, 0x73, 0x29, 0xa1, 0xf5, 0x0d, 0xb0, 0xc4, 0x93, 0x9b, 0x06, 0xf8, 0x03, 0x48, 0x8a, - 0x11, 0x3e, 0x3a, 0xad, 0xaf, 0xf1, 0xca, 0x2f, 0xaf, 0xce, 0x4f, 0xeb, 0x3a, 0xc4, 0xd6, 0xff, - 0x4e, 0xc0, 0x92, 0x78, 0x2c, 0x89, 0x53, 0xfc, 0x21, 0xa4, 0xe4, 0xbb, 0x09, 0x1b, 0xea, 0xe6, - 0xeb, 0xb4, 0xbc, 0xf6, 0xd2, 0x7c, 0x68, 0xe7, 0x6d, 0x84, 0xbf, 0x80, 0xac, 0x9c, 0xd4, 0x17, - 0xe7, 0xf1, 0xf9, 0x4b, 0x69, 0xc6, 0xd2, 0x89, 0x43, 0x56, 0x0d, 0x7b, 0x17, 0x21, 0x25, 0x19, - 0x69, 0x46, 0x63, 0xbe, 0xae, 0xcc, 0x68, 0x66, 0x5e, 0x31, 0x24, 0x86, 0x2f, 0x40, 0x52, 0x10, - 0xc9, 0x84, 0xc3, 0xb8, 0xf4, 0x4c, 0x38, 0xcc, 0x1b, 0x47, 0xba, 0xfd, 0x28, 0xba, 0x8b, 0xd7, - 0xe6, 0x9b, 0x58, 0xb8, 0xbd, 0xf4, 0xf2, 0x42, 0xe4, 0xf9, 0x86, 0xba, 0xc4, 0x42, 0x0a, 0xe3, - 0x13, 0xb3, 0xae, 0xe6, 0x18, 0x5f, 0xb6, 0x0f, 0x5b, 0x8e, 0x0c, 0x5e, 0x87, 0xac, 0x41, 0x1f, - 0x13, 0xd6, 0x97, 0xb9, 0x6f, 0xc2, 0x7a, 0x00, 0xe7, 0x48, 0x6c, 0xfd, 0x4b, 0x48, 0x87, 0x3d, - 0x06, 0xdf, 0x82, 0xc2, 0xec, 0xf1, 0xc4, 0x6f, 0x18, 0xd1, 0xcc, 0x36, 0xae, 0x72, 0xd5, 0x58, - 0x3a, 0xf8, 0x4c, 0xc7, 0x6a, 0xa8, 0x79, 0xef, 0xc9, 0x33, 0x3b, 0xf6, 0xf4, 0x99, 0x1d, 0x7b, - 0xf1, 0xcc, 0x46, 0xdf, 0x4c, 0x6c, 0xf4, 0xd3, 0xc4, 0x46, 0x8f, 0x27, 0x36, 0x7a, 0x32, 0xb1, - 0xd1, 0x9f, 0x13, 0x1b, 0xfd, 0x35, 0xb1, 0x63, 0x2f, 0x26, 0x36, 0x7a, 0xf4, 0xdc, 0x8e, 0x3d, - 0x79, 0x6e, 0xc7, 0x9e, 0x3e, 0xb7, 0x63, 0xf7, 0x4e, 0x9a, 0xff, 0x71, 0x03, 0x77, 0xc7, 0x1d, - 0xb8, 0x8d, 0x9e, 0xbf, 0xeb, 0x35, 0xcc, 0xff, 0xd0, 0xdb, 0x96, 0xfc, 0x79, 0xe7, 0x9f, 0x00, - 0x00, 0x00, 0xff, 0xff, 0x7b, 0x53, 0xc1, 0x06, 0x5a, 0x0f, 0x00, 0x00, + // 1426 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x4b, 0x8f, 0x13, 0x47, + 0x10, 0x76, 0xfb, 0x31, 0xb6, 0xcb, 0x0f, 0xac, 0xde, 0x65, 0xd7, 0x31, 0x30, 0xb6, 0x46, 0x08, + 0xac, 0x40, 0xec, 0xb0, 0x79, 0xf1, 0xc8, 0x43, 0x6b, 0x36, 0x84, 0x25, 0x28, 0xc0, 0x80, 0x84, + 0x84, 0x14, 0xa1, 0x59, 0xbb, 0xd7, 0x1e, 0xad, 0xed, 0x31, 0xd3, 0x6d, 0xa4, 0x95, 0x22, 0x25, + 0x3f, 0x20, 0x91, 0xb8, 0x45, 0x51, 0xae, 0x51, 0x14, 0xe5, 0x90, 0xdf, 0x41, 0x6e, 0x28, 0x27, + 0x94, 0x83, 0x13, 0xcc, 0x25, 0x5a, 0xe5, 0xc0, 0x4f, 0x88, 0xfa, 0x31, 0x33, 0x6d, 0xb3, 0x9b, + 0xe0, 0xbd, 0xe4, 0x32, 0xee, 0xaa, 0xae, 0xae, 0xae, 0xfa, 0xea, 0xd5, 0x86, 0x63, 0xa3, 0x9d, + 0x6e, 0xb3, 0xef, 0x75, 0x47, 0xbe, 0xc7, 0xbc, 0x70, 0xd1, 0x10, 0x5f, 0x9c, 0x09, 0xe8, 0x4a, + 0xb5, 0xeb, 0x79, 0xdd, 0x3e, 0x69, 0x0a, 0x6a, 0x6b, 0xbc, 0xdd, 0x64, 0xee, 0x80, 0x50, 0xe6, + 0x0c, 0x46, 0x52, 0xb4, 0xf2, 0x46, 0xd7, 0x65, 0xbd, 0xf1, 0x56, 0xa3, 0xed, 0x0d, 0x9a, 0x5d, + 0xaf, 0xeb, 0x45, 0x92, 0x9c, 0x92, 0xda, 0xf9, 0x4a, 0x89, 0xd7, 0xd4, 0xb5, 0x0f, 0xfa, 0x03, + 0xaf, 0x43, 0xfa, 0x4d, 0xca, 0x1c, 0x46, 0xe5, 0x57, 0x4a, 0x58, 0x77, 0x21, 0x77, 0x73, 0x4c, + 0x7b, 0x36, 0x79, 0x30, 0x26, 0x94, 0xe1, 0xab, 0x90, 0xa6, 0xcc, 0x27, 0xce, 0x80, 0x96, 0x51, + 0x2d, 0x51, 0xcf, 0xad, 0xad, 0x36, 0x42, 0x63, 0x6f, 0x8b, 0x8d, 0xf5, 0x8e, 0x33, 0x62, 0xc4, + 0x6f, 0x1d, 0xfd, 0x7d, 0x52, 0x35, 0x24, 0x6b, 0x6f, 0x52, 0x0d, 0x4e, 0xd9, 0xc1, 0xc2, 0x2a, + 0x42, 0x5e, 0x2a, 0xa6, 0x23, 0x6f, 0x48, 0x89, 0xf5, 0x7d, 0x1c, 0xf2, 0xb7, 0xc6, 0xc4, 0xdf, + 0x0d, 0xae, 0xaa, 0x40, 0x86, 0x92, 0x3e, 0x69, 0x33, 0xcf, 0x2f, 0xa3, 0x1a, 0xaa, 0x67, 0xed, + 0x90, 0xc6, 0xcb, 0x90, 0xea, 0xbb, 0x03, 0x97, 0x95, 0xe3, 0x35, 0x54, 0x2f, 0xd8, 0x92, 0xc0, + 0x17, 0x21, 0x45, 0x99, 0xe3, 0xb3, 0x72, 0xa2, 0x86, 0xea, 0xb9, 0xb5, 0x4a, 0x43, 0xa2, 0xd5, + 0x08, 0x30, 0x68, 0xdc, 0x09, 0xd0, 0x6a, 0x65, 0x1e, 0x4f, 0xaa, 0xb1, 0x47, 0x7f, 0x54, 0x91, + 0x2d, 0x8f, 0xe0, 0x77, 0x21, 0x41, 0x86, 0x9d, 0x72, 0x72, 0x81, 0x93, 0xfc, 0x00, 0x3e, 0x07, + 0xd9, 0x8e, 0xeb, 0x93, 0x36, 0x73, 0xbd, 0x61, 0x39, 0x55, 0x43, 0xf5, 0xe2, 0xda, 0x52, 0x04, + 0xc9, 0x46, 0xb0, 0x65, 0x47, 0x52, 0xf8, 0x2c, 0x18, 0xb4, 0xe7, 0xf8, 0x1d, 0x5a, 0x4e, 0xd7, + 0x12, 0xf5, 0x6c, 0x6b, 0x79, 0x6f, 0x52, 0x2d, 0x49, 0xce, 0x59, 0x6f, 0xe0, 0x32, 0x32, 0x18, + 0xb1, 0x5d, 0x5b, 0xc9, 0x5c, 0x4b, 0x66, 0x8c, 0x52, 0xda, 0xfa, 0x0d, 0x01, 0xbe, 0xed, 0x0c, + 0x46, 0x7d, 0xf2, 0xca, 0x18, 0x85, 0x68, 0xc4, 0x0f, 0x8d, 0x46, 0x62, 0x51, 0x34, 0x22, 0xd7, + 0x92, 0xff, 0xed, 0x9a, 0xf5, 0x25, 0x14, 0x94, 0x37, 0x32, 0x07, 0xf0, 0xfa, 0x2b, 0x67, 0x57, + 0xf1, 0xf1, 0xa4, 0x8a, 0xa2, 0x0c, 0x0b, 0xd3, 0x0a, 0x9f, 0x11, 0x5e, 0x33, 0xaa, 0xbc, 0x3e, + 0xd2, 0x90, 0xc9, 0xbc, 0x39, 0xec, 0x12, 0xca, 0x0f, 0x26, 0xb9, 0xc1, 0xb6, 0x94, 0xb1, 0xbe, + 0x80, 0xa5, 0x19, 0x50, 0x95, 0x19, 0xe7, 0xc1, 0xa0, 0xc4, 0x77, 0x49, 0x60, 0x45, 0x49, 0xb3, + 0x42, 0xf0, 0xb5, 0xeb, 0x05, 0x6d, 0x2b, 0xf9, 0xc5, 0x6e, 0xff, 0x05, 0x41, 0xfe, 0xba, 0xb3, + 0x45, 0xfa, 0x41, 0x34, 0x31, 0x24, 0x87, 0xce, 0x80, 0xa8, 0x48, 0x8a, 0x35, 0x5e, 0x01, 0xe3, + 0xa1, 0xd3, 0x1f, 0x13, 0xa9, 0x32, 0x63, 0x2b, 0x6a, 0xd1, 0x5c, 0x47, 0x87, 0xce, 0x75, 0x14, + 0x46, 0xd7, 0x3a, 0x0d, 0x05, 0x65, 0xaf, 0x02, 0x2a, 0x32, 0x8e, 0x03, 0x95, 0x0d, 0x8c, 0xb3, + 0x7e, 0x44, 0x50, 0x98, 0x89, 0x17, 0xb6, 0xc0, 0xe8, 0xf3, 0xa3, 0x54, 0x3a, 0xd7, 0x82, 0xbd, + 0x49, 0x55, 0x71, 0x6c, 0xf5, 0xcb, 0xa3, 0x4f, 0x86, 0x4c, 0xe0, 0x1e, 0x17, 0xb8, 0xaf, 0x44, + 0xb8, 0x7f, 0x3c, 0x64, 0xfe, 0x6e, 0x10, 0xfc, 0x23, 0x1c, 0x45, 0xde, 0x54, 0x94, 0xb8, 0x1d, + 0x2c, 0x70, 0x03, 0x40, 0x2a, 0xbb, 0xea, 0xd0, 0x9e, 0x80, 0x26, 0xd9, 0x2a, 0xee, 0x4d, 0xaa, + 0x1a, 0xd7, 0xd6, 0xd6, 0xd6, 0x43, 0xc8, 0xeb, 0x9a, 0xf1, 0x55, 0xc8, 0x86, 0x1d, 0x55, 0x58, + 0xfa, 0xef, 0xf8, 0x14, 0x95, 0x21, 0x71, 0x46, 0x05, 0x4a, 0xd1, 0x61, 0x7c, 0x1c, 0x92, 0x7d, + 0x77, 0x48, 0x44, 0xd4, 0xb2, 0xad, 0xcc, 0xde, 0xa4, 0x2a, 0x68, 0x5b, 0x7c, 0xad, 0x01, 0x18, + 0x32, 0xf1, 0xf0, 0xc9, 0xf9, 0x1b, 0x13, 0x2d, 0x43, 0x6a, 0xd4, 0xb5, 0x55, 0x21, 0x25, 0xa0, + 0x15, 0xea, 0x50, 0x2b, 0xbb, 0x37, 0xa9, 0x4a, 0x86, 0x2d, 0x7f, 0xf8, 0x75, 0xbd, 0xc8, 0x65, + 0x71, 0x1d, 0xa7, 0x6d, 0xf1, 0xb5, 0xbe, 0x43, 0xa0, 0x32, 0xf5, 0x95, 0x02, 0x71, 0x09, 0xd2, + 0x54, 0x58, 0x17, 0x04, 0x42, 0x2f, 0x00, 0xb1, 0x11, 0x85, 0x40, 0x09, 0xda, 0xc1, 0x62, 0xe1, + 0x10, 0x7c, 0x8b, 0x20, 0x77, 0xc7, 0x71, 0xc3, 0x22, 0x58, 0x86, 0xd4, 0x03, 0x5e, 0x8d, 0xaa, + 0x0a, 0x24, 0xc1, 0x1b, 0x5d, 0x87, 0xf4, 0x9d, 0xdd, 0x2b, 0x9e, 0x2f, 0x74, 0x16, 0xec, 0x90, + 0x8e, 0x86, 0x41, 0x72, 0xdf, 0x61, 0x90, 0x5a, 0xb8, 0xfd, 0x5d, 0x4b, 0x66, 0xe2, 0xa5, 0x84, + 0xf5, 0x35, 0x82, 0xbc, 0xb4, 0x4c, 0xa5, 0xfb, 0x25, 0x30, 0x64, 0x9b, 0x51, 0xa9, 0x71, 0x60, + 0x77, 0x02, 0xad, 0x33, 0xa9, 0x23, 0xf8, 0x23, 0x28, 0x76, 0x7c, 0x6f, 0x34, 0x22, 0x9d, 0xdb, + 0xaa, 0xc5, 0xc5, 0xe7, 0x5b, 0xdc, 0x86, 0xbe, 0x6f, 0xcf, 0x89, 0x5b, 0xbf, 0xf2, 0xa2, 0x92, + 0xed, 0x46, 0x41, 0x15, 0xba, 0x88, 0x0e, 0xdd, 0xe1, 0xe3, 0x8b, 0x76, 0xf8, 0x15, 0x30, 0xba, + 0xbe, 0x37, 0x1e, 0xd1, 0x72, 0x42, 0x96, 0xbc, 0xa4, 0x16, 0xec, 0xfc, 0xd7, 0xa0, 0x18, 0xb8, + 0x72, 0x40, 0xcf, 0xad, 0xcc, 0xf7, 0xdc, 0xcd, 0x0e, 0x19, 0x32, 0x77, 0xdb, 0x0d, 0xbb, 0xa8, + 0x92, 0xb7, 0xbe, 0x41, 0x50, 0x9a, 0x17, 0xc1, 0x1f, 0x6a, 0x69, 0xce, 0xd5, 0x9d, 0x3a, 0x58, + 0x5d, 0x43, 0xf4, 0x34, 0x2a, 0xfa, 0x40, 0x50, 0x02, 0x95, 0x0b, 0x90, 0xd3, 0xd8, 0xb8, 0x04, + 0x89, 0x1d, 0x12, 0xa4, 0x24, 0x5f, 0xf2, 0xa4, 0x8b, 0x2a, 0x32, 0xab, 0xca, 0xf0, 0x62, 0xfc, + 0x3c, 0xe2, 0x09, 0x5d, 0x98, 0x89, 0x24, 0x3e, 0x0f, 0xc9, 0x6d, 0xdf, 0x1b, 0x2c, 0x14, 0x26, + 0x71, 0x02, 0xbf, 0x0d, 0x71, 0xe6, 0x2d, 0x14, 0xa4, 0x38, 0xf3, 0x78, 0x8c, 0x94, 0xf3, 0x09, + 0x61, 0x9c, 0xa2, 0xac, 0x9f, 0x11, 0x1c, 0xe1, 0x67, 0x24, 0x02, 0x97, 0x7b, 0xe3, 0xe1, 0x0e, + 0xae, 0x43, 0x89, 0xdf, 0x74, 0xdf, 0x55, 0x23, 0xea, 0xbe, 0xdb, 0x51, 0x6e, 0x16, 0x39, 0x3f, + 0x98, 0x5c, 0x9b, 0x1d, 0xbc, 0x0a, 0xe9, 0x31, 0x95, 0x02, 0xd2, 0x67, 0x83, 0x93, 0x9b, 0x1d, + 0x7c, 0x46, 0xbb, 0x8e, 0x63, 0xad, 0xbd, 0x7f, 0x04, 0x86, 0x37, 0x1d, 0xd7, 0x0f, 0x7b, 0xcb, + 0x69, 0x30, 0xda, 0xfc, 0x62, 0x99, 0x27, 0x7c, 0x44, 0x86, 0xc2, 0xc2, 0x20, 0x5b, 0x6d, 0x5b, + 0xef, 0x40, 0x36, 0x3c, 0xbd, 0xef, 0x64, 0xdc, 0x37, 0x02, 0xd6, 0x31, 0x48, 0x49, 0xc7, 0x30, + 0x24, 0x3b, 0x0e, 0x73, 0xc4, 0x91, 0xbc, 0x2d, 0xd6, 0x56, 0x19, 0x56, 0xee, 0xf8, 0xce, 0x90, + 0x6e, 0x13, 0x5f, 0x08, 0x85, 0xe9, 0x67, 0x1d, 0x85, 0x25, 0x5e, 0xea, 0xc4, 0xa7, 0x97, 0xbd, + 0xf1, 0x90, 0xa9, 0x0a, 0xb3, 0xce, 0xc2, 0xf2, 0x2c, 0x5b, 0x65, 0xeb, 0x32, 0xa4, 0xda, 0x9c, + 0x21, 0xb4, 0x17, 0x6c, 0x49, 0x58, 0x3f, 0x20, 0xc0, 0x9f, 0x10, 0x26, 0x54, 0x6f, 0x6e, 0x50, + 0xed, 0x91, 0x36, 0x70, 0x58, 0xbb, 0x47, 0x7c, 0x1a, 0x3c, 0xd2, 0x02, 0xfa, 0xff, 0x78, 0xa4, + 0x59, 0xe7, 0x60, 0x69, 0xc6, 0x4a, 0xe5, 0x53, 0x05, 0x32, 0x6d, 0xc5, 0x53, 0xe3, 0x3c, 0xa4, + 0x5f, 0x3f, 0x05, 0xd9, 0xf0, 0x29, 0x8b, 0x73, 0x90, 0xbe, 0x72, 0xc3, 0xbe, 0xbb, 0x6e, 0x6f, + 0x94, 0x62, 0x38, 0x0f, 0x99, 0xd6, 0xfa, 0xe5, 0x4f, 0x05, 0x85, 0xd6, 0xd6, 0xc1, 0xe0, 0x8f, + 0x7a, 0xe2, 0xe3, 0xf7, 0x20, 0xc9, 0x57, 0xf8, 0x68, 0x14, 0x5f, 0xed, 0x7f, 0x44, 0x65, 0x65, + 0x9e, 0xad, 0xe2, 0x10, 0x5b, 0xfb, 0x3b, 0x01, 0x69, 0xfe, 0x1c, 0xe3, 0x55, 0xfc, 0x3e, 0xa4, + 0xc4, 0xcb, 0x0c, 0x6b, 0xe2, 0xfa, 0xfb, 0xb7, 0xb2, 0xfa, 0x12, 0x3f, 0xd0, 0xf3, 0x26, 0xc2, + 0x9f, 0x41, 0x4e, 0x30, 0xd5, 0xa4, 0x3d, 0x3e, 0x3f, 0xc4, 0x66, 0x34, 0x9d, 0x38, 0x60, 0x57, + 0xd3, 0x77, 0x11, 0x52, 0x22, 0x23, 0x75, 0x6b, 0xf4, 0xf7, 0x9b, 0x6e, 0xcd, 0xcc, 0x3b, 0xc9, + 0x8a, 0xe1, 0x0b, 0x90, 0xe4, 0x89, 0xa4, 0xc3, 0xa1, 0x0d, 0x3d, 0x1d, 0x0e, 0x7d, 0xe2, 0x88, + 0x6b, 0x3f, 0x08, 0x67, 0xf7, 0xea, 0x7c, 0x13, 0x0b, 0x8e, 0x97, 0x5f, 0xde, 0x08, 0x6f, 0xbe, + 0x21, 0x87, 0x58, 0x90, 0xc2, 0xf8, 0xc4, 0xec, 0x55, 0x73, 0x19, 0x5f, 0x31, 0x0f, 0xda, 0x0e, + 0x15, 0x5e, 0x87, 0x9c, 0x96, 0x3e, 0x3a, 0xac, 0x2f, 0xe7, 0xbe, 0x0e, 0xeb, 0x3e, 0x39, 0x67, + 0xc5, 0xd6, 0x3e, 0x87, 0x4c, 0xd0, 0x63, 0xf0, 0x2d, 0x28, 0xce, 0x96, 0x27, 0x7e, 0x4d, 0xb3, + 0x66, 0xb6, 0x71, 0x55, 0x6a, 0xda, 0xd6, 0xfe, 0x35, 0x1d, 0xab, 0xa3, 0xd6, 0xbd, 0x27, 0xcf, + 0xcc, 0xd8, 0xd3, 0x67, 0x66, 0xec, 0xc5, 0x33, 0x13, 0x7d, 0x35, 0x35, 0xd1, 0x4f, 0x53, 0x13, + 0x3d, 0x9e, 0x9a, 0xe8, 0xc9, 0xd4, 0x44, 0x7f, 0x4e, 0x4d, 0xf4, 0xd7, 0xd4, 0x8c, 0xbd, 0x98, + 0x9a, 0xe8, 0xd1, 0x73, 0x33, 0xf6, 0xe4, 0xb9, 0x19, 0x7b, 0xfa, 0xdc, 0x8c, 0xdd, 0x3b, 0xa9, + 0xff, 0x8b, 0xf6, 0x9d, 0x6d, 0x67, 0xe8, 0x34, 0xfb, 0xde, 0x8e, 0xdb, 0xd4, 0xff, 0xa5, 0x6f, + 0x19, 0xe2, 0xe7, 0xad, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x5d, 0x7b, 0xff, 0xbc, 0x0f, + 0x00, 0x00, } func (x Direction) String() string { @@ -1817,6 +1835,9 @@ func (this *StreamAdapter) Equal(that interface{}) bool { return false } } + if this.LabelsHash != that1.LabelsHash { + return false + } return true } func (this *EntryAdapter) Equal(that interface{}) bool { @@ -1906,6 +1927,9 @@ func (this *Series) Equal(that interface{}) bool { return false } } + if this.LabelsHash != that1.LabelsHash { + return false + } return true } func (this *TailRequest) Equal(that interface{}) bool { @@ -2423,7 +2447,7 @@ func (this *StreamAdapter) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 6) + s := make([]string, 0, 7) s = append(s, "&logproto.StreamAdapter{") s = append(s, "Labels: "+fmt.Sprintf("%#v", this.Labels)+",\n") if this.Entries != nil { @@ -2433,6 +2457,7 @@ func (this *StreamAdapter) GoString() string { } s = append(s, "Entries: "+fmt.Sprintf("%#v", vs)+",\n") } + s = append(s, "LabelsHash: "+fmt.Sprintf("%#v", this.LabelsHash)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -2463,7 +2488,7 @@ func (this *Series) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 6) + s := make([]string, 0, 7) s = append(s, "&logproto.Series{") s = append(s, "Labels: "+fmt.Sprintf("%#v", this.Labels)+",\n") if this.Samples != nil { @@ -2473,6 +2498,7 @@ func (this *Series) GoString() string { } s = append(s, "Samples: "+fmt.Sprintf("%#v", vs)+",\n") } + s = append(s, "LabelsHash: "+fmt.Sprintf("%#v", this.LabelsHash)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -3604,6 +3630,11 @@ func (m *StreamAdapter) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.LabelsHash != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.LabelsHash)) + i-- + dAtA[i] = 0x18 + } if len(m.Entries) > 0 { for iNdEx := len(m.Entries) - 1; iNdEx >= 0; iNdEx-- { { @@ -3725,6 +3756,11 @@ func (m *Series) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.LabelsHash != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.LabelsHash)) + i-- + dAtA[i] = 0x18 + } if len(m.Samples) > 0 { for iNdEx := len(m.Samples) - 1; iNdEx >= 0; iNdEx-- { { @@ -4488,6 +4524,9 @@ func (m *StreamAdapter) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } + if m.LabelsHash != 0 { + n += 1 + sovLogproto(uint64(m.LabelsHash)) + } return n } @@ -4540,6 +4579,9 @@ func (m *Series) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } + if m.LabelsHash != 0 { + n += 1 + sovLogproto(uint64(m.LabelsHash)) + } return n } @@ -4888,6 +4930,7 @@ func (this *StreamAdapter) String() string { s := strings.Join([]string{`&StreamAdapter{`, `Labels:` + fmt.Sprintf("%v", this.Labels) + `,`, `Entries:` + repeatedStringForEntries + `,`, + `LabelsHash:` + fmt.Sprintf("%v", this.LabelsHash) + `,`, `}`, }, "") return s @@ -4927,6 +4970,7 @@ func (this *Series) String() string { s := strings.Join([]string{`&Series{`, `Labels:` + fmt.Sprintf("%v", this.Labels) + `,`, `Samples:` + repeatedStringForSamples + `,`, + `LabelsHash:` + fmt.Sprintf("%v", this.LabelsHash) + `,`, `}`, }, "") return s @@ -6263,6 +6307,25 @@ func (m *StreamAdapter) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + } + m.LabelsHash = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LabelsHash |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipLogproto(dAtA[iNdEx:]) @@ -6602,6 +6665,25 @@ func (m *Series) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + } + m.LabelsHash = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LabelsHash |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipLogproto(dAtA[iNdEx:]) diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index a0cb9f102e88..c2e5582c764b 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -82,6 +82,7 @@ message LabelResponse { message StreamAdapter { string labels = 1 [(gogoproto.jsontag) = "labels"]; repeated EntryAdapter entries = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "entries"]; + uint64 labelsHash = 3 [(gogoproto.jsontag) = "labelsHash"]; } message EntryAdapter { @@ -98,6 +99,7 @@ message Sample { message Series { string labels = 1 [(gogoproto.jsontag) = "labels"]; repeated Sample samples = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "samples"]; + uint64 labelsHash = 3 [(gogoproto.jsontag) = "labelsHash"]; } message TailRequest { diff --git a/pkg/logproto/types.go b/pkg/logproto/types.go index 655fc0b83ccb..4edae3e1922e 100644 --- a/pkg/logproto/types.go +++ b/pkg/logproto/types.go @@ -10,8 +10,9 @@ import ( // We are not using the proto generated version but this custom one so that we // can improve serialization see benchmark. type Stream struct { - Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - Entries []Entry `protobuf:"bytes,2,rep,name=entries,proto3,customtype=EntryAdapter" json:"entries"` + Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Entries []Entry `protobuf:"bytes,2,rep,name=entries,proto3,customtype=EntryAdapter" json:"entries"` + LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` } // Entry is a log entry with a timestamp. @@ -40,6 +41,11 @@ func (m *Stream) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.LabelsHash != 0 { + i = encodeVarintLogproto(dAtA, i, m.LabelsHash) + i-- + dAtA[i] = 0x18 + } if len(m.Entries) > 0 { for iNdEx := len(m.Entries) - 1; iNdEx >= 0; iNdEx-- { { @@ -197,6 +203,25 @@ func (m *Stream) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + } + m.LabelsHash = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LabelsHash |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipLogproto(dAtA[iNdEx:]) @@ -357,6 +382,9 @@ func (m *Stream) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } + if m.LabelsHash != 0 { + n += 1 + sovLogproto(m.LabelsHash) + } return n } @@ -405,7 +433,7 @@ func (m *Stream) Equal(that interface{}) bool { return false } } - return true + return m.LabelsHash == that1.LabelsHash } func (m *Entry) Equal(that interface{}) bool { From 3ddc59a3ce6e3afe0dcf9d38b7173ef750345665 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 28 Jan 2022 11:10:05 +0100 Subject: [PATCH 05/14] Insert the hash from ingester-querier Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator.go | 5 +++-- pkg/iter/sample_iterator.go | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index f80da1194118..5c72abc165ce 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -651,11 +651,12 @@ func ReadBatch(i EntryIterator, size uint32) (*logproto.QueryResponse, uint32, e streams := map[string]*logproto.Stream{} respSize := uint32(0) for ; respSize < size && i.Next(); respSize++ { - labels, entry := i.Labels(), i.Entry() + labels, hash, entry := i.Labels(), i.LabelsHash(), i.Entry() stream, ok := streams[labels] if !ok { stream = &logproto.Stream{ - Labels: labels, + Labels: labels, + LabelsHash: hash, } streams[labels] = stream } diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 9123aed1706e..9cf507b17b02 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -559,11 +559,12 @@ func ReadSampleBatch(i SampleIterator, size uint32) (*logproto.SampleQueryRespon series := map[string]*logproto.Series{} respSize := uint32(0) for ; respSize < size && i.Next(); respSize++ { - labels, sample := i.Labels(), i.Sample() + labels, hash, sample := i.Labels(), i.LabelsHash(), i.Sample() s, ok := series[labels] if !ok { s = &logproto.Series{ - Labels: labels, + Labels: labels, + LabelsHash: hash, } series[labels] = s } From ff842ec83984c6851704f81d8c4cc6ed070f0ce9 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 28 Jan 2022 11:57:55 +0100 Subject: [PATCH 06/14] Rename LabelsHash to StreamHash Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 2 +- pkg/chunkenc/memchunk.go | 4 +- pkg/iter/cache.go | 12 +- pkg/iter/cache_test.go | 2 +- pkg/iter/entry_iterator.go | 54 +++---- pkg/iter/entry_iterator_test.go | 2 +- pkg/iter/iterator.go | 6 +- pkg/iter/sample_iterator.go | 42 +++--- pkg/iter/sample_iterator_test.go | 2 +- pkg/logproto/logproto.pb.go | 239 ++++++++++++++++--------------- pkg/logproto/logproto.proto | 5 +- pkg/logproto/types.go | 22 +-- pkg/logproto/types_test.go | 4 +- pkg/logql/engine_test.go | 2 +- pkg/storage/batch.go | 8 +- 15 files changed, 204 insertions(+), 202 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index 7f9d94400883..3a0e45837bfa 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -153,7 +153,7 @@ func (i *dumbChunkIterator) Labels() string { return "" } -func (i *dumbChunkIterator) LabelsHash() uint64 { +func (i *dumbChunkIterator) StreamHash() uint64 { return 0 } diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index f8e3767eeff9..96afcf553433 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -1252,7 +1252,7 @@ func (e *entryBufferedIterator) Entry() logproto.Entry { func (e *entryBufferedIterator) Labels() string { return e.currLabels.String() } -func (e *entryBufferedIterator) LabelsHash() uint64 { return e.pipeline.BaseLabels().Hash() } +func (e *entryBufferedIterator) StreamHash() uint64 { return e.pipeline.BaseLabels().Hash() } func (e *entryBufferedIterator) Next() bool { for e.bufferedIterator.Next() { @@ -1301,7 +1301,7 @@ func (e *sampleBufferedIterator) Next() bool { } func (e *sampleBufferedIterator) Labels() string { return e.currLabels.String() } -func (e *sampleBufferedIterator) LabelsHash() uint64 { return e.extractor.BaseLabels().Hash() } +func (e *sampleBufferedIterator) StreamHash() uint64 { return e.extractor.BaseLabels().Hash() } func (e *sampleBufferedIterator) Sample() logproto.Sample { return e.cur diff --git a/pkg/iter/cache.go b/pkg/iter/cache.go index 49bcfafe28c3..60e242cbe082 100644 --- a/pkg/iter/cache.go +++ b/pkg/iter/cache.go @@ -53,7 +53,7 @@ func (it *cachedIterator) consumeWrapped() bool { return false } // we're caching entries - it.cache = append(it.cache, entryWithLabels{entry: it.Wrapped().Entry(), labels: it.Wrapped().Labels(), labelsHash: it.Wrapped().LabelsHash()}) + it.cache = append(it.cache, entryWithLabels{entry: it.Wrapped().Entry(), labels: it.Wrapped().Labels(), streamHash: it.Wrapped().StreamHash()}) it.curr++ return true } @@ -87,11 +87,11 @@ func (it *cachedIterator) Labels() string { return it.cache[it.curr].labels } -func (it *cachedIterator) LabelsHash() uint64 { +func (it *cachedIterator) StreamHash() uint64 { if len(it.cache) == 0 || it.curr < 0 || it.curr >= len(it.cache) { return 0 } - return it.cache[it.curr].labelsHash + return it.cache[it.curr].streamHash } func (it *cachedIterator) Error() error { return it.iterErr } @@ -150,7 +150,7 @@ func (it *cachedSampleIterator) consumeWrapped() bool { return false } // we're caching entries - it.cache = append(it.cache, sampleWithLabels{Sample: it.Wrapped().Sample(), labels: it.Wrapped().Labels(), labelsHash: it.Wrapped().LabelsHash()}) + it.cache = append(it.cache, sampleWithLabels{Sample: it.Wrapped().Sample(), labels: it.Wrapped().Labels(), streamHash: it.Wrapped().StreamHash()}) it.curr++ return true } @@ -183,11 +183,11 @@ func (it *cachedSampleIterator) Labels() string { return it.cache[it.curr].labels } -func (it *cachedSampleIterator) LabelsHash() uint64 { +func (it *cachedSampleIterator) StreamHash() uint64 { if len(it.cache) == 0 || it.curr < 0 || it.curr >= len(it.cache) { return 0 } - return it.cache[it.curr].labelsHash + return it.cache[it.curr].streamHash } func (it *cachedSampleIterator) Error() error { return it.iterErr } diff --git a/pkg/iter/cache_test.go b/pkg/iter/cache_test.go index be42a64599d5..9befe3438353 100644 --- a/pkg/iter/cache_test.go +++ b/pkg/iter/cache_test.go @@ -251,7 +251,7 @@ type errorIter struct{} func (errorIter) Next() bool { return false } func (errorIter) Error() error { return errors.New("error") } func (errorIter) Labels() string { return "" } -func (errorIter) LabelsHash() uint64 { return 0 } +func (errorIter) StreamHash() uint64 { return 0 } func (errorIter) Entry() logproto.Entry { return logproto.Entry{} } func (errorIter) Sample() logproto.Sample { return logproto.Sample{} } func (errorIter) Close() error { return errors.New("close") } diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 5c72abc165ce..ea7cb35288ef 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -45,7 +45,7 @@ func (i *streamIterator) Labels() string { return i.stream.Labels } -func (i *streamIterator) LabelsHash() uint64 { return i.stream.LabelsHash } +func (i *streamIterator) StreamHash() uint64 { return i.stream.Hash } func (i *streamIterator) Entry() logproto.Entry { return i.stream.Entries[i.i] @@ -88,7 +88,7 @@ func (h iteratorMinHeap) Less(i, j int) bool { case un1 > un2: return false default: // un1 == un2: - return h.iteratorHeap[i].LabelsHash() < h.iteratorHeap[j].LabelsHash() + return h.iteratorHeap[i].StreamHash() < h.iteratorHeap[j].StreamHash() } } @@ -108,7 +108,7 @@ func (h iteratorMaxHeap) Less(i, j int) bool { case un1 > un2: return true default: // un1 == un2 - return h.iteratorHeap[i].LabelsHash() < h.iteratorHeap[j].LabelsHash() + return h.iteratorHeap[i].StreamHash() < h.iteratorHeap[j].StreamHash() } } @@ -134,7 +134,7 @@ type heapIterator struct { tuples []tuple currEntry logproto.Entry currLabels string - currLabelsHash uint64 + currStreamHash uint64 errs []error } @@ -211,7 +211,7 @@ func (i *heapIterator) Next() bool { if i.heap.Len() == 1 { i.currEntry = i.heap.Peek().Entry() i.currLabels = i.heap.Peek().Labels() - i.currLabelsHash = i.heap.Peek().LabelsHash() + i.currStreamHash = i.heap.Peek().StreamHash() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -225,7 +225,7 @@ func (i *heapIterator) Next() bool { for i.heap.Len() > 0 { next := i.heap.Peek() entry := next.Entry() - if len(i.tuples) > 0 && (i.tuples[0].LabelsHash() != next.LabelsHash() || !i.tuples[0].Timestamp.Equal(entry.Timestamp)) { + if len(i.tuples) > 0 && (i.tuples[0].StreamHash() != next.StreamHash() || !i.tuples[0].Timestamp.Equal(entry.Timestamp)) { break } @@ -240,7 +240,7 @@ func (i *heapIterator) Next() bool { if len(i.tuples) == 1 { i.currEntry = i.tuples[0].Entry i.currLabels = i.tuples[0].Labels() - i.currLabelsHash = i.tuples[0].LabelsHash() + i.currStreamHash = i.tuples[0].StreamHash() i.requeue(i.tuples[0].EntryIterator, false) i.tuples = i.tuples[:0] return true @@ -251,7 +251,7 @@ func (i *heapIterator) Next() bool { t := i.tuples[0] i.currEntry = t.Entry i.currLabels = t.Labels() - i.currLabelsHash = t.LabelsHash() + i.currStreamHash = t.StreamHash() // Requeue the iterators, advancing them if they were consumed. for j := range i.tuples { @@ -277,7 +277,7 @@ func (i *heapIterator) Labels() string { return i.currLabels } -func (i *heapIterator) LabelsHash() uint64 { return i.currLabelsHash } +func (i *heapIterator) StreamHash() uint64 { return i.currStreamHash } func (i *heapIterator) Error() error { switch len(i.errs) { @@ -371,7 +371,7 @@ func (i *queryClientIterator) Labels() string { return i.curr.Labels() } -func (i *queryClientIterator) LabelsHash() uint64 { return i.curr.LabelsHash() } +func (i *queryClientIterator) StreamHash() uint64 { return i.curr.StreamHash() } func (i *queryClientIterator) Error() error { return i.err @@ -421,11 +421,11 @@ func (i *nonOverlappingIterator) Labels() string { return i.curr.Labels() } -func (i *nonOverlappingIterator) LabelsHash() uint64 { +func (i *nonOverlappingIterator) StreamHash() uint64 { if i.curr == nil { return 0 } - return i.curr.LabelsHash() + return i.curr.StreamHash() } func (i *nonOverlappingIterator) Error() error { @@ -492,7 +492,7 @@ func (i *timeRangedIterator) Next() bool { type entryWithLabels struct { entry logproto.Entry labels string - labelsHash uint64 + streamHash uint64 } type reverseIterator struct { @@ -528,7 +528,7 @@ func (i *reverseIterator) load() { if !i.loaded { i.loaded = true for count := uint32(0); (i.limit == 0 || count < i.limit) && i.iter.Next(); count++ { - i.entriesWithLabels = append(i.entriesWithLabels, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.LabelsHash()}) + i.entriesWithLabels = append(i.entriesWithLabels, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.StreamHash()}) } i.iter.Close() } @@ -552,8 +552,8 @@ func (i *reverseIterator) Labels() string { return i.cur.labels } -func (i *reverseIterator) LabelsHash() uint64 { - return i.cur.labelsHash +func (i *reverseIterator) StreamHash() uint64 { + return i.cur.streamHash } func (i *reverseIterator) Error() error { return nil } @@ -603,7 +603,7 @@ func (i *reverseEntryIterator) load() { if !i.loaded { i.loaded = true for i.iter.Next() { - i.buf.entries = append(i.buf.entries, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.LabelsHash()}) + i.buf.entries = append(i.buf.entries, entryWithLabels{i.iter.Entry(), i.iter.Labels(), i.iter.StreamHash()}) } i.iter.Close() } @@ -628,8 +628,8 @@ func (i *reverseEntryIterator) Labels() string { return i.cur.labels } -func (i *reverseEntryIterator) LabelsHash() uint64 { - return i.cur.labelsHash +func (i *reverseEntryIterator) StreamHash() uint64 { + return i.cur.streamHash } func (i *reverseEntryIterator) Error() error { return nil } @@ -651,12 +651,12 @@ func ReadBatch(i EntryIterator, size uint32) (*logproto.QueryResponse, uint32, e streams := map[string]*logproto.Stream{} respSize := uint32(0) for ; respSize < size && i.Next(); respSize++ { - labels, hash, entry := i.Labels(), i.LabelsHash(), i.Entry() + labels, hash, entry := i.Labels(), i.StreamHash(), i.Entry() stream, ok := streams[labels] if !ok { stream = &logproto.Stream{ - Labels: labels, - LabelsHash: hash, + Labels: labels, + Hash: hash, } streams[labels] = stream } @@ -695,7 +695,7 @@ func NewPeekingIterator(iter EntryIterator) PeekingEntryIterator { cache = &entryWithLabels{ entry: iter.Entry(), labels: iter.Labels(), - labelsHash: iter.LabelsHash(), + streamHash: iter.StreamHash(), } next.entry = cache.entry next.labels = cache.labels @@ -712,7 +712,7 @@ func (it *peekingEntryIterator) Next() bool { if it.cache != nil { it.next.entry = it.cache.entry it.next.labels = it.cache.labels - it.next.labelsHash = it.cache.labelsHash + it.next.streamHash = it.cache.streamHash it.cacheNext() return true } @@ -724,7 +724,7 @@ func (it *peekingEntryIterator) cacheNext() { if it.iter.Next() { it.cache.entry = it.iter.Entry() it.cache.labels = it.iter.Labels() - it.cache.labelsHash = it.iter.LabelsHash() + it.cache.streamHash = it.iter.StreamHash() return } // nothing left removes the cached entry @@ -747,9 +747,9 @@ func (it *peekingEntryIterator) Labels() string { return "" } -func (it *peekingEntryIterator) LabelsHash() uint64 { +func (it *peekingEntryIterator) StreamHash() uint64 { if it.next != nil { - return it.next.labelsHash + return it.next.streamHash } return 0 } diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 8413ee16784e..9fa3c7c42a5f 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -615,7 +615,7 @@ type CloseTestingIterator struct { func (i *CloseTestingIterator) Next() bool { return true } func (i *CloseTestingIterator) Entry() logproto.Entry { return i.e } func (i *CloseTestingIterator) Labels() string { return "" } -func (i *CloseTestingIterator) LabelsHash() uint64 { return 0 } +func (i *CloseTestingIterator) StreamHash() uint64 { return 0 } func (i *CloseTestingIterator) Error() error { return nil } func (i *CloseTestingIterator) Close() error { i.closed.Store(true) diff --git a/pkg/iter/iterator.go b/pkg/iter/iterator.go index fcfe44851500..334e20d2bd9e 100644 --- a/pkg/iter/iterator.go +++ b/pkg/iter/iterator.go @@ -9,8 +9,8 @@ type Iterator interface { // Labels returns the labels for the current entry. // The labels can be mutated by the query engine and not reflect the original stream. Labels() string - // LabelsHash returns a hash of the original stream labels for the current entry. - LabelsHash() uint64 + // StreamHash returns a hash of the original stream for the current entry. + StreamHash() uint64 Error() error Close() error } @@ -22,7 +22,7 @@ var NoopIterator = noOpIterator{} func (noOpIterator) Next() bool { return false } func (noOpIterator) Error() error { return nil } func (noOpIterator) Labels() string { return "" } -func (noOpIterator) LabelsHash() uint64 { return 0 } +func (noOpIterator) StreamHash() uint64 { return 0 } func (noOpIterator) Entry() logproto.Entry { return logproto.Entry{} } func (noOpIterator) Sample() logproto.Sample { return logproto.Sample{} } func (noOpIterator) Close() error { return nil } diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 9cf507b17b02..07eedf285420 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -35,7 +35,7 @@ type peekingSampleIterator struct { type sampleWithLabels struct { logproto.Sample labels string - labelsHash uint64 + streamHash uint64 } func NewPeekingSampleIterator(iter SampleIterator) PeekingSampleIterator { @@ -46,7 +46,7 @@ func NewPeekingSampleIterator(iter SampleIterator) PeekingSampleIterator { cache = &sampleWithLabels{ Sample: iter.Sample(), labels: iter.Labels(), - labelsHash: iter.LabelsHash(), + streamHash: iter.StreamHash(), } next.Sample = cache.Sample next.labels = cache.labels @@ -69,9 +69,9 @@ func (it *peekingSampleIterator) Labels() string { return "" } -func (it *peekingSampleIterator) LabelsHash() uint64 { +func (it *peekingSampleIterator) StreamHash() uint64 { if it.next != nil { - return it.next.labelsHash + return it.next.streamHash } return 0 } @@ -80,7 +80,7 @@ func (it *peekingSampleIterator) Next() bool { if it.cache != nil { it.next.Sample = it.cache.Sample it.next.labels = it.cache.labels - it.next.labelsHash = it.cache.labelsHash + it.next.streamHash = it.cache.streamHash it.cacheNext() return true } @@ -92,7 +92,7 @@ func (it *peekingSampleIterator) cacheNext() { if it.iter.Next() { it.cache.Sample = it.iter.Sample() it.cache.labels = it.iter.Labels() - it.cache.labelsHash = it.iter.LabelsHash() + it.cache.streamHash = it.iter.StreamHash() return } // nothing left removes the cached entry @@ -142,7 +142,7 @@ func (h sampleIteratorHeap) Less(i, j int) bool { case s1.Timestamp > s2.Timestamp: return false default: - return h[i].LabelsHash() < h[j].LabelsHash() + return h[i].StreamHash() < h[j].StreamHash() } } @@ -156,7 +156,7 @@ type heapSampleIterator struct { tuples []sampletuple curr logproto.Sample currLabels string - currLabelsHash uint64 + currStreamHash uint64 errs []error } @@ -224,7 +224,7 @@ func (i *heapSampleIterator) Next() bool { if i.heap.Len() == 1 { i.curr = i.heap.Peek().Sample() i.currLabels = i.heap.Peek().Labels() - i.currLabelsHash = i.heap.Peek().LabelsHash() + i.currStreamHash = i.heap.Peek().StreamHash() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -238,7 +238,7 @@ func (i *heapSampleIterator) Next() bool { for i.heap.Len() > 0 { next := i.heap.Peek() sample := next.Sample() - if len(i.tuples) > 0 && (i.tuples[0].LabelsHash() != next.LabelsHash() || i.tuples[0].Timestamp != sample.Timestamp) { + if len(i.tuples) > 0 && (i.tuples[0].StreamHash() != next.StreamHash() || i.tuples[0].Timestamp != sample.Timestamp) { break } @@ -251,7 +251,7 @@ func (i *heapSampleIterator) Next() bool { i.curr = i.tuples[0].Sample i.currLabels = i.tuples[0].Labels() - i.currLabelsHash = i.tuples[0].LabelsHash() + i.currStreamHash = i.tuples[0].StreamHash() t := i.tuples[0] if len(i.tuples) == 1 { i.requeue(i.tuples[0].SampleIterator, false) @@ -282,8 +282,8 @@ func (i *heapSampleIterator) Labels() string { return i.currLabels } -func (i *heapSampleIterator) LabelsHash() uint64 { - return i.currLabelsHash +func (i *heapSampleIterator) StreamHash() uint64 { + return i.currStreamHash } func (i *heapSampleIterator) Error() error { @@ -351,8 +351,8 @@ func (i *sampleQueryClientIterator) Labels() string { return i.curr.Labels() } -func (i *sampleQueryClientIterator) LabelsHash() uint64 { - return i.curr.LabelsHash() +func (i *sampleQueryClientIterator) StreamHash() uint64 { + return i.curr.StreamHash() } func (i *sampleQueryClientIterator) Error() error { @@ -433,8 +433,8 @@ func (i *seriesIterator) Labels() string { return i.series.Labels } -func (i *seriesIterator) LabelsHash() uint64 { - return i.series.LabelsHash +func (i *seriesIterator) StreamHash() uint64 { + return i.series.StreamHash } func (i *seriesIterator) Sample() logproto.Sample { @@ -487,11 +487,11 @@ func (i *nonOverlappingSampleIterator) Labels() string { return i.curr.Labels() } -func (i *nonOverlappingSampleIterator) LabelsHash() uint64 { +func (i *nonOverlappingSampleIterator) StreamHash() uint64 { if i.curr == nil { return 0 } - return i.curr.LabelsHash() + return i.curr.StreamHash() } func (i *nonOverlappingSampleIterator) Error() error { @@ -559,12 +559,12 @@ func ReadSampleBatch(i SampleIterator, size uint32) (*logproto.SampleQueryRespon series := map[string]*logproto.Series{} respSize := uint32(0) for ; respSize < size && i.Next(); respSize++ { - labels, hash, sample := i.Labels(), i.LabelsHash(), i.Sample() + labels, hash, sample := i.Labels(), i.StreamHash(), i.Sample() s, ok := series[labels] if !ok { s = &logproto.Series{ Labels: labels, - LabelsHash: hash, + StreamHash: hash, } series[labels] = s } diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index fa3cba89adf2..d2272714091b 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -208,7 +208,7 @@ type CloseTestingSmplIterator struct { func (i *CloseTestingSmplIterator) Next() bool { return true } func (i *CloseTestingSmplIterator) Sample() logproto.Sample { return i.s } -func (i *CloseTestingSmplIterator) LabelsHash() uint64 { return 0 } +func (i *CloseTestingSmplIterator) StreamHash() uint64 { return 0 } func (i *CloseTestingSmplIterator) Labels() string { return "" } func (i *CloseTestingSmplIterator) Error() error { return nil } func (i *CloseTestingSmplIterator) Close() error { diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 0fa3da25ca35..ea26da9996f4 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -479,9 +479,10 @@ func (m *LabelResponse) GetValues() []string { } type StreamAdapter struct { - Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - Entries []EntryAdapter `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries"` - LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` + Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Entries []EntryAdapter `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries"` + // hash contains the original hash of the stream. + Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash"` } func (m *StreamAdapter) Reset() { *m = StreamAdapter{} } @@ -530,9 +531,9 @@ func (m *StreamAdapter) GetEntries() []EntryAdapter { return nil } -func (m *StreamAdapter) GetLabelsHash() uint64 { +func (m *StreamAdapter) GetHash() uint64 { if m != nil { - return m.LabelsHash + return m.Hash } return 0 } @@ -650,7 +651,7 @@ func (m *Sample) GetHash() uint64 { type Series struct { Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` Samples []Sample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples"` - LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` + StreamHash uint64 `protobuf:"varint,3,opt,name=streamHash,proto3" json:"streamHash"` } func (m *Series) Reset() { *m = Series{} } @@ -699,9 +700,9 @@ func (m *Series) GetSamples() []Sample { return nil } -func (m *Series) GetLabelsHash() uint64 { +func (m *Series) GetStreamHash() uint64 { if m != nil { - return m.LabelsHash + return m.StreamHash } return 0 } @@ -1439,97 +1440,97 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 1426 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x4b, 0x8f, 0x13, 0x47, - 0x10, 0x76, 0xfb, 0x31, 0xb6, 0xcb, 0x0f, 0xac, 0xde, 0x65, 0xd7, 0x31, 0x30, 0xb6, 0x46, 0x08, - 0xac, 0x40, 0xec, 0xb0, 0x79, 0xf1, 0xc8, 0x43, 0x6b, 0x36, 0x84, 0x25, 0x28, 0xc0, 0x80, 0x84, - 0x84, 0x14, 0xa1, 0x59, 0xbb, 0xd7, 0x1e, 0xad, 0xed, 0x31, 0xd3, 0x6d, 0xa4, 0x95, 0x22, 0x25, - 0x3f, 0x20, 0x91, 0xb8, 0x45, 0x51, 0xae, 0x51, 0x14, 0xe5, 0x90, 0xdf, 0x41, 0x6e, 0x28, 0x27, - 0x94, 0x83, 0x13, 0xcc, 0x25, 0x5a, 0xe5, 0xc0, 0x4f, 0x88, 0xfa, 0x31, 0x33, 0x6d, 0xb3, 0x9b, - 0xe0, 0xbd, 0xe4, 0x32, 0xee, 0xaa, 0xae, 0xae, 0xae, 0xfa, 0xea, 0xd5, 0x86, 0x63, 0xa3, 0x9d, - 0x6e, 0xb3, 0xef, 0x75, 0x47, 0xbe, 0xc7, 0xbc, 0x70, 0xd1, 0x10, 0x5f, 0x9c, 0x09, 0xe8, 0x4a, - 0xb5, 0xeb, 0x79, 0xdd, 0x3e, 0x69, 0x0a, 0x6a, 0x6b, 0xbc, 0xdd, 0x64, 0xee, 0x80, 0x50, 0xe6, - 0x0c, 0x46, 0x52, 0xb4, 0xf2, 0x46, 0xd7, 0x65, 0xbd, 0xf1, 0x56, 0xa3, 0xed, 0x0d, 0x9a, 0x5d, - 0xaf, 0xeb, 0x45, 0x92, 0x9c, 0x92, 0xda, 0xf9, 0x4a, 0x89, 0xd7, 0xd4, 0xb5, 0x0f, 0xfa, 0x03, - 0xaf, 0x43, 0xfa, 0x4d, 0xca, 0x1c, 0x46, 0xe5, 0x57, 0x4a, 0x58, 0x77, 0x21, 0x77, 0x73, 0x4c, - 0x7b, 0x36, 0x79, 0x30, 0x26, 0x94, 0xe1, 0xab, 0x90, 0xa6, 0xcc, 0x27, 0xce, 0x80, 0x96, 0x51, - 0x2d, 0x51, 0xcf, 0xad, 0xad, 0x36, 0x42, 0x63, 0x6f, 0x8b, 0x8d, 0xf5, 0x8e, 0x33, 0x62, 0xc4, - 0x6f, 0x1d, 0xfd, 0x7d, 0x52, 0x35, 0x24, 0x6b, 0x6f, 0x52, 0x0d, 0x4e, 0xd9, 0xc1, 0xc2, 0x2a, - 0x42, 0x5e, 0x2a, 0xa6, 0x23, 0x6f, 0x48, 0x89, 0xf5, 0x7d, 0x1c, 0xf2, 0xb7, 0xc6, 0xc4, 0xdf, - 0x0d, 0xae, 0xaa, 0x40, 0x86, 0x92, 0x3e, 0x69, 0x33, 0xcf, 0x2f, 0xa3, 0x1a, 0xaa, 0x67, 0xed, - 0x90, 0xc6, 0xcb, 0x90, 0xea, 0xbb, 0x03, 0x97, 0x95, 0xe3, 0x35, 0x54, 0x2f, 0xd8, 0x92, 0xc0, - 0x17, 0x21, 0x45, 0x99, 0xe3, 0xb3, 0x72, 0xa2, 0x86, 0xea, 0xb9, 0xb5, 0x4a, 0x43, 0xa2, 0xd5, - 0x08, 0x30, 0x68, 0xdc, 0x09, 0xd0, 0x6a, 0x65, 0x1e, 0x4f, 0xaa, 0xb1, 0x47, 0x7f, 0x54, 0x91, - 0x2d, 0x8f, 0xe0, 0x77, 0x21, 0x41, 0x86, 0x9d, 0x72, 0x72, 0x81, 0x93, 0xfc, 0x00, 0x3e, 0x07, - 0xd9, 0x8e, 0xeb, 0x93, 0x36, 0x73, 0xbd, 0x61, 0x39, 0x55, 0x43, 0xf5, 0xe2, 0xda, 0x52, 0x04, - 0xc9, 0x46, 0xb0, 0x65, 0x47, 0x52, 0xf8, 0x2c, 0x18, 0xb4, 0xe7, 0xf8, 0x1d, 0x5a, 0x4e, 0xd7, - 0x12, 0xf5, 0x6c, 0x6b, 0x79, 0x6f, 0x52, 0x2d, 0x49, 0xce, 0x59, 0x6f, 0xe0, 0x32, 0x32, 0x18, - 0xb1, 0x5d, 0x5b, 0xc9, 0x5c, 0x4b, 0x66, 0x8c, 0x52, 0xda, 0xfa, 0x0d, 0x01, 0xbe, 0xed, 0x0c, - 0x46, 0x7d, 0xf2, 0xca, 0x18, 0x85, 0x68, 0xc4, 0x0f, 0x8d, 0x46, 0x62, 0x51, 0x34, 0x22, 0xd7, - 0x92, 0xff, 0xed, 0x9a, 0xf5, 0x25, 0x14, 0x94, 0x37, 0x32, 0x07, 0xf0, 0xfa, 0x2b, 0x67, 0x57, - 0xf1, 0xf1, 0xa4, 0x8a, 0xa2, 0x0c, 0x0b, 0xd3, 0x0a, 0x9f, 0x11, 0x5e, 0x33, 0xaa, 0xbc, 0x3e, - 0xd2, 0x90, 0xc9, 0xbc, 0x39, 0xec, 0x12, 0xca, 0x0f, 0x26, 0xb9, 0xc1, 0xb6, 0x94, 0xb1, 0xbe, - 0x80, 0xa5, 0x19, 0x50, 0x95, 0x19, 0xe7, 0xc1, 0xa0, 0xc4, 0x77, 0x49, 0x60, 0x45, 0x49, 0xb3, - 0x42, 0xf0, 0xb5, 0xeb, 0x05, 0x6d, 0x2b, 0xf9, 0xc5, 0x6e, 0xff, 0x05, 0x41, 0xfe, 0xba, 0xb3, - 0x45, 0xfa, 0x41, 0x34, 0x31, 0x24, 0x87, 0xce, 0x80, 0xa8, 0x48, 0x8a, 0x35, 0x5e, 0x01, 0xe3, - 0xa1, 0xd3, 0x1f, 0x13, 0xa9, 0x32, 0x63, 0x2b, 0x6a, 0xd1, 0x5c, 0x47, 0x87, 0xce, 0x75, 0x14, - 0x46, 0xd7, 0x3a, 0x0d, 0x05, 0x65, 0xaf, 0x02, 0x2a, 0x32, 0x8e, 0x03, 0x95, 0x0d, 0x8c, 0xb3, - 0x7e, 0x44, 0x50, 0x98, 0x89, 0x17, 0xb6, 0xc0, 0xe8, 0xf3, 0xa3, 0x54, 0x3a, 0xd7, 0x82, 0xbd, - 0x49, 0x55, 0x71, 0x6c, 0xf5, 0xcb, 0xa3, 0x4f, 0x86, 0x4c, 0xe0, 0x1e, 0x17, 0xb8, 0xaf, 0x44, - 0xb8, 0x7f, 0x3c, 0x64, 0xfe, 0x6e, 0x10, 0xfc, 0x23, 0x1c, 0x45, 0xde, 0x54, 0x94, 0xb8, 0x1d, - 0x2c, 0x70, 0x03, 0x40, 0x2a, 0xbb, 0xea, 0xd0, 0x9e, 0x80, 0x26, 0xd9, 0x2a, 0xee, 0x4d, 0xaa, - 0x1a, 0xd7, 0xd6, 0xd6, 0xd6, 0x43, 0xc8, 0xeb, 0x9a, 0xf1, 0x55, 0xc8, 0x86, 0x1d, 0x55, 0x58, - 0xfa, 0xef, 0xf8, 0x14, 0x95, 0x21, 0x71, 0x46, 0x05, 0x4a, 0xd1, 0x61, 0x7c, 0x1c, 0x92, 0x7d, - 0x77, 0x48, 0x44, 0xd4, 0xb2, 0xad, 0xcc, 0xde, 0xa4, 0x2a, 0x68, 0x5b, 0x7c, 0xad, 0x01, 0x18, - 0x32, 0xf1, 0xf0, 0xc9, 0xf9, 0x1b, 0x13, 0x2d, 0x43, 0x6a, 0xd4, 0xb5, 0x55, 0x21, 0x25, 0xa0, - 0x15, 0xea, 0x50, 0x2b, 0xbb, 0x37, 0xa9, 0x4a, 0x86, 0x2d, 0x7f, 0xf8, 0x75, 0xbd, 0xc8, 0x65, - 0x71, 0x1d, 0xa7, 0x6d, 0xf1, 0xb5, 0xbe, 0x43, 0xa0, 0x32, 0xf5, 0x95, 0x02, 0x71, 0x09, 0xd2, - 0x54, 0x58, 0x17, 0x04, 0x42, 0x2f, 0x00, 0xb1, 0x11, 0x85, 0x40, 0x09, 0xda, 0xc1, 0x62, 0xe1, - 0x10, 0x7c, 0x8b, 0x20, 0x77, 0xc7, 0x71, 0xc3, 0x22, 0x58, 0x86, 0xd4, 0x03, 0x5e, 0x8d, 0xaa, - 0x0a, 0x24, 0xc1, 0x1b, 0x5d, 0x87, 0xf4, 0x9d, 0xdd, 0x2b, 0x9e, 0x2f, 0x74, 0x16, 0xec, 0x90, - 0x8e, 0x86, 0x41, 0x72, 0xdf, 0x61, 0x90, 0x5a, 0xb8, 0xfd, 0x5d, 0x4b, 0x66, 0xe2, 0xa5, 0x84, - 0xf5, 0x35, 0x82, 0xbc, 0xb4, 0x4c, 0xa5, 0xfb, 0x25, 0x30, 0x64, 0x9b, 0x51, 0xa9, 0x71, 0x60, - 0x77, 0x02, 0xad, 0x33, 0xa9, 0x23, 0xf8, 0x23, 0x28, 0x76, 0x7c, 0x6f, 0x34, 0x22, 0x9d, 0xdb, - 0xaa, 0xc5, 0xc5, 0xe7, 0x5b, 0xdc, 0x86, 0xbe, 0x6f, 0xcf, 0x89, 0x5b, 0xbf, 0xf2, 0xa2, 0x92, - 0xed, 0x46, 0x41, 0x15, 0xba, 0x88, 0x0e, 0xdd, 0xe1, 0xe3, 0x8b, 0x76, 0xf8, 0x15, 0x30, 0xba, - 0xbe, 0x37, 0x1e, 0xd1, 0x72, 0x42, 0x96, 0xbc, 0xa4, 0x16, 0xec, 0xfc, 0xd7, 0xa0, 0x18, 0xb8, - 0x72, 0x40, 0xcf, 0xad, 0xcc, 0xf7, 0xdc, 0xcd, 0x0e, 0x19, 0x32, 0x77, 0xdb, 0x0d, 0xbb, 0xa8, - 0x92, 0xb7, 0xbe, 0x41, 0x50, 0x9a, 0x17, 0xc1, 0x1f, 0x6a, 0x69, 0xce, 0xd5, 0x9d, 0x3a, 0x58, - 0x5d, 0x43, 0xf4, 0x34, 0x2a, 0xfa, 0x40, 0x50, 0x02, 0x95, 0x0b, 0x90, 0xd3, 0xd8, 0xb8, 0x04, - 0x89, 0x1d, 0x12, 0xa4, 0x24, 0x5f, 0xf2, 0xa4, 0x8b, 0x2a, 0x32, 0xab, 0xca, 0xf0, 0x62, 0xfc, - 0x3c, 0xe2, 0x09, 0x5d, 0x98, 0x89, 0x24, 0x3e, 0x0f, 0xc9, 0x6d, 0xdf, 0x1b, 0x2c, 0x14, 0x26, - 0x71, 0x02, 0xbf, 0x0d, 0x71, 0xe6, 0x2d, 0x14, 0xa4, 0x38, 0xf3, 0x78, 0x8c, 0x94, 0xf3, 0x09, - 0x61, 0x9c, 0xa2, 0xac, 0x9f, 0x11, 0x1c, 0xe1, 0x67, 0x24, 0x02, 0x97, 0x7b, 0xe3, 0xe1, 0x0e, - 0xae, 0x43, 0x89, 0xdf, 0x74, 0xdf, 0x55, 0x23, 0xea, 0xbe, 0xdb, 0x51, 0x6e, 0x16, 0x39, 0x3f, - 0x98, 0x5c, 0x9b, 0x1d, 0xbc, 0x0a, 0xe9, 0x31, 0x95, 0x02, 0xd2, 0x67, 0x83, 0x93, 0x9b, 0x1d, - 0x7c, 0x46, 0xbb, 0x8e, 0x63, 0xad, 0xbd, 0x7f, 0x04, 0x86, 0x37, 0x1d, 0xd7, 0x0f, 0x7b, 0xcb, - 0x69, 0x30, 0xda, 0xfc, 0x62, 0x99, 0x27, 0x7c, 0x44, 0x86, 0xc2, 0xc2, 0x20, 0x5b, 0x6d, 0x5b, - 0xef, 0x40, 0x36, 0x3c, 0xbd, 0xef, 0x64, 0xdc, 0x37, 0x02, 0xd6, 0x31, 0x48, 0x49, 0xc7, 0x30, - 0x24, 0x3b, 0x0e, 0x73, 0xc4, 0x91, 0xbc, 0x2d, 0xd6, 0x56, 0x19, 0x56, 0xee, 0xf8, 0xce, 0x90, - 0x6e, 0x13, 0x5f, 0x08, 0x85, 0xe9, 0x67, 0x1d, 0x85, 0x25, 0x5e, 0xea, 0xc4, 0xa7, 0x97, 0xbd, - 0xf1, 0x90, 0xa9, 0x0a, 0xb3, 0xce, 0xc2, 0xf2, 0x2c, 0x5b, 0x65, 0xeb, 0x32, 0xa4, 0xda, 0x9c, - 0x21, 0xb4, 0x17, 0x6c, 0x49, 0x58, 0x3f, 0x20, 0xc0, 0x9f, 0x10, 0x26, 0x54, 0x6f, 0x6e, 0x50, - 0xed, 0x91, 0x36, 0x70, 0x58, 0xbb, 0x47, 0x7c, 0x1a, 0x3c, 0xd2, 0x02, 0xfa, 0xff, 0x78, 0xa4, - 0x59, 0xe7, 0x60, 0x69, 0xc6, 0x4a, 0xe5, 0x53, 0x05, 0x32, 0x6d, 0xc5, 0x53, 0xe3, 0x3c, 0xa4, - 0x5f, 0x3f, 0x05, 0xd9, 0xf0, 0x29, 0x8b, 0x73, 0x90, 0xbe, 0x72, 0xc3, 0xbe, 0xbb, 0x6e, 0x6f, - 0x94, 0x62, 0x38, 0x0f, 0x99, 0xd6, 0xfa, 0xe5, 0x4f, 0x05, 0x85, 0xd6, 0xd6, 0xc1, 0xe0, 0x8f, - 0x7a, 0xe2, 0xe3, 0xf7, 0x20, 0xc9, 0x57, 0xf8, 0x68, 0x14, 0x5f, 0xed, 0x7f, 0x44, 0x65, 0x65, - 0x9e, 0xad, 0xe2, 0x10, 0x5b, 0xfb, 0x3b, 0x01, 0x69, 0xfe, 0x1c, 0xe3, 0x55, 0xfc, 0x3e, 0xa4, - 0xc4, 0xcb, 0x0c, 0x6b, 0xe2, 0xfa, 0xfb, 0xb7, 0xb2, 0xfa, 0x12, 0x3f, 0xd0, 0xf3, 0x26, 0xc2, - 0x9f, 0x41, 0x4e, 0x30, 0xd5, 0xa4, 0x3d, 0x3e, 0x3f, 0xc4, 0x66, 0x34, 0x9d, 0x38, 0x60, 0x57, - 0xd3, 0x77, 0x11, 0x52, 0x22, 0x23, 0x75, 0x6b, 0xf4, 0xf7, 0x9b, 0x6e, 0xcd, 0xcc, 0x3b, 0xc9, - 0x8a, 0xe1, 0x0b, 0x90, 0xe4, 0x89, 0xa4, 0xc3, 0xa1, 0x0d, 0x3d, 0x1d, 0x0e, 0x7d, 0xe2, 0x88, - 0x6b, 0x3f, 0x08, 0x67, 0xf7, 0xea, 0x7c, 0x13, 0x0b, 0x8e, 0x97, 0x5f, 0xde, 0x08, 0x6f, 0xbe, - 0x21, 0x87, 0x58, 0x90, 0xc2, 0xf8, 0xc4, 0xec, 0x55, 0x73, 0x19, 0x5f, 0x31, 0x0f, 0xda, 0x0e, - 0x15, 0x5e, 0x87, 0x9c, 0x96, 0x3e, 0x3a, 0xac, 0x2f, 0xe7, 0xbe, 0x0e, 0xeb, 0x3e, 0x39, 0x67, - 0xc5, 0xd6, 0x3e, 0x87, 0x4c, 0xd0, 0x63, 0xf0, 0x2d, 0x28, 0xce, 0x96, 0x27, 0x7e, 0x4d, 0xb3, - 0x66, 0xb6, 0x71, 0x55, 0x6a, 0xda, 0xd6, 0xfe, 0x35, 0x1d, 0xab, 0xa3, 0xd6, 0xbd, 0x27, 0xcf, - 0xcc, 0xd8, 0xd3, 0x67, 0x66, 0xec, 0xc5, 0x33, 0x13, 0x7d, 0x35, 0x35, 0xd1, 0x4f, 0x53, 0x13, - 0x3d, 0x9e, 0x9a, 0xe8, 0xc9, 0xd4, 0x44, 0x7f, 0x4e, 0x4d, 0xf4, 0xd7, 0xd4, 0x8c, 0xbd, 0x98, - 0x9a, 0xe8, 0xd1, 0x73, 0x33, 0xf6, 0xe4, 0xb9, 0x19, 0x7b, 0xfa, 0xdc, 0x8c, 0xdd, 0x3b, 0xa9, - 0xff, 0x8b, 0xf6, 0x9d, 0x6d, 0x67, 0xe8, 0x34, 0xfb, 0xde, 0x8e, 0xdb, 0xd4, 0xff, 0xa5, 0x6f, - 0x19, 0xe2, 0xe7, 0xad, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x5d, 0x7b, 0xff, 0xbc, 0x0f, - 0x00, 0x00, + // 1427 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x49, 0x8f, 0x13, 0xc7, + 0x17, 0x77, 0x79, 0x69, 0xdb, 0xcf, 0x0b, 0x56, 0xcd, 0x30, 0xe3, 0xbf, 0x81, 0xb6, 0xd5, 0x42, + 0x60, 0xfd, 0x21, 0x76, 0x98, 0x6c, 0x2c, 0x59, 0x34, 0x66, 0x42, 0x18, 0x82, 0x02, 0x34, 0x48, + 0x48, 0x48, 0x11, 0xea, 0xb1, 0x6b, 0xec, 0xd6, 0xd8, 0x6e, 0xd3, 0x55, 0x46, 0x1a, 0x29, 0x52, + 0xf2, 0x01, 0x12, 0x89, 0x43, 0xa4, 0x28, 0xca, 0x35, 0x87, 0x28, 0x87, 0x7c, 0x0e, 0x72, 0x43, + 0x39, 0xa1, 0x1c, 0x9c, 0x60, 0x2e, 0xd1, 0x28, 0x07, 0x3e, 0x42, 0x54, 0x4b, 0x77, 0x97, 0xcd, + 0x4c, 0xc0, 0x73, 0xc9, 0xa5, 0x5d, 0xaf, 0xea, 0xd5, 0x5b, 0x7e, 0xef, 0x57, 0xaf, 0xca, 0x70, + 0x6c, 0xb4, 0xd3, 0x6d, 0xf6, 0xbd, 0xee, 0xc8, 0xf7, 0x98, 0x17, 0x0e, 0x1a, 0xe2, 0x8b, 0x33, + 0x81, 0x5c, 0xa9, 0x76, 0x3d, 0xaf, 0xdb, 0x27, 0x4d, 0x21, 0x6d, 0x8d, 0xb7, 0x9b, 0xcc, 0x1d, + 0x10, 0xca, 0x9c, 0xc1, 0x48, 0xaa, 0x56, 0xde, 0xe8, 0xba, 0xac, 0x37, 0xde, 0x6a, 0xb4, 0xbd, + 0x41, 0xb3, 0xeb, 0x75, 0xbd, 0x48, 0x93, 0x4b, 0xd2, 0x3a, 0x1f, 0x29, 0xf5, 0x9a, 0x72, 0xfb, + 0xa0, 0x3f, 0xf0, 0x3a, 0xa4, 0xdf, 0xa4, 0xcc, 0x61, 0x54, 0x7e, 0xa5, 0x86, 0x75, 0x17, 0x72, + 0x37, 0xc7, 0xb4, 0x67, 0x93, 0x07, 0x63, 0x42, 0x19, 0xbe, 0x0a, 0x69, 0xca, 0x7c, 0xe2, 0x0c, + 0x68, 0x19, 0xd5, 0x12, 0xf5, 0xdc, 0xda, 0x6a, 0x23, 0x0c, 0xf6, 0xb6, 0x58, 0x58, 0xef, 0x38, + 0x23, 0x46, 0xfc, 0xd6, 0xd1, 0xdf, 0x27, 0x55, 0x43, 0x4e, 0xed, 0x4d, 0xaa, 0xc1, 0x2e, 0x3b, + 0x18, 0x58, 0x45, 0xc8, 0x4b, 0xc3, 0x74, 0xe4, 0x0d, 0x29, 0xb1, 0x7e, 0x88, 0x43, 0xfe, 0xd6, + 0x98, 0xf8, 0xbb, 0x81, 0xab, 0x0a, 0x64, 0x28, 0xe9, 0x93, 0x36, 0xf3, 0xfc, 0x32, 0xaa, 0xa1, + 0x7a, 0xd6, 0x0e, 0x65, 0xbc, 0x0c, 0xa9, 0xbe, 0x3b, 0x70, 0x59, 0x39, 0x5e, 0x43, 0xf5, 0x82, + 0x2d, 0x05, 0x7c, 0x11, 0x52, 0x94, 0x39, 0x3e, 0x2b, 0x27, 0x6a, 0xa8, 0x9e, 0x5b, 0xab, 0x34, + 0x24, 0x5a, 0x8d, 0x00, 0x83, 0xc6, 0x9d, 0x00, 0xad, 0x56, 0xe6, 0xf1, 0xa4, 0x1a, 0x7b, 0xf4, + 0x47, 0x15, 0xd9, 0x72, 0x0b, 0x7e, 0x17, 0x12, 0x64, 0xd8, 0x29, 0x27, 0x17, 0xd8, 0xc9, 0x37, + 0xe0, 0x73, 0x90, 0xed, 0xb8, 0x3e, 0x69, 0x33, 0xd7, 0x1b, 0x96, 0x53, 0x35, 0x54, 0x2f, 0xae, + 0x2d, 0x45, 0x90, 0x6c, 0x04, 0x4b, 0x76, 0xa4, 0x85, 0xcf, 0x82, 0x41, 0x7b, 0x8e, 0xdf, 0xa1, + 0xe5, 0x74, 0x2d, 0x51, 0xcf, 0xb6, 0x96, 0xf7, 0x26, 0xd5, 0x92, 0x9c, 0x39, 0xeb, 0x0d, 0x5c, + 0x46, 0x06, 0x23, 0xb6, 0x6b, 0x2b, 0x9d, 0x6b, 0xc9, 0x8c, 0x51, 0x4a, 0x5b, 0xbf, 0x21, 0xc0, + 0xb7, 0x9d, 0xc1, 0xa8, 0x4f, 0x5e, 0x1b, 0xa3, 0x10, 0x8d, 0xf8, 0xa1, 0xd1, 0x48, 0x2c, 0x8a, + 0x46, 0x94, 0x5a, 0xf2, 0xd5, 0xa9, 0x59, 0x5f, 0x42, 0x41, 0x65, 0x23, 0x39, 0x80, 0xd7, 0x5f, + 0x9b, 0x5d, 0xc5, 0xc7, 0x93, 0x2a, 0x8a, 0x18, 0x16, 0xd2, 0x0a, 0x9f, 0x11, 0x59, 0x33, 0xaa, + 0xb2, 0x3e, 0xd2, 0x90, 0x64, 0xde, 0x1c, 0x76, 0x09, 0xe5, 0x1b, 0x93, 0x3c, 0x60, 0x5b, 0xea, + 0x58, 0x5f, 0xc0, 0xd2, 0x0c, 0xa8, 0x2a, 0x8c, 0xf3, 0x60, 0x50, 0xe2, 0xbb, 0x24, 0x88, 0xa2, + 0xa4, 0x45, 0x21, 0xe6, 0x35, 0xf7, 0x42, 0xb6, 0x95, 0xfe, 0x62, 0xde, 0x7f, 0x41, 0x90, 0xbf, + 0xee, 0x6c, 0x91, 0x7e, 0x50, 0x4d, 0x0c, 0xc9, 0xa1, 0x33, 0x20, 0xaa, 0x92, 0x62, 0x8c, 0x57, + 0xc0, 0x78, 0xe8, 0xf4, 0xc7, 0x44, 0x9a, 0xcc, 0xd8, 0x4a, 0x5a, 0x94, 0xeb, 0xe8, 0xd0, 0x5c, + 0x47, 0x61, 0x75, 0xad, 0xd3, 0x50, 0x50, 0xf1, 0x2a, 0xa0, 0xa2, 0xe0, 0x38, 0x50, 0xd9, 0x20, + 0x38, 0xeb, 0x5b, 0x04, 0x85, 0x99, 0x7a, 0x61, 0x0b, 0x8c, 0x3e, 0xdf, 0x4a, 0x65, 0x72, 0x2d, + 0xd8, 0x9b, 0x54, 0xd5, 0x8c, 0xad, 0x7e, 0x79, 0xf5, 0xc9, 0x90, 0x09, 0xdc, 0xe3, 0x02, 0xf7, + 0x95, 0x08, 0xf7, 0x8f, 0x87, 0xcc, 0xdf, 0x0d, 0x8a, 0x7f, 0x84, 0xa3, 0xc8, 0x9b, 0x8a, 0x52, + 0xb7, 0x83, 0x01, 0x3e, 0x0e, 0xc9, 0x9e, 0x43, 0x7b, 0x02, 0x94, 0x64, 0x2b, 0xb3, 0x37, 0xa9, + 0x0a, 0xd9, 0x16, 0x5f, 0xeb, 0x21, 0xe4, 0x75, 0x3b, 0xf8, 0x2a, 0x64, 0xc3, 0xfe, 0x29, 0xe2, + 0xfa, 0x77, 0x34, 0x8a, 0xca, 0x6d, 0x9c, 0x51, 0x81, 0x49, 0xb4, 0x99, 0xfb, 0xed, 0xbb, 0x43, + 0x22, 0x6a, 0x94, 0x95, 0x7e, 0xb9, 0x6c, 0x8b, 0xaf, 0x35, 0x00, 0x43, 0xd2, 0x0c, 0x9f, 0x9c, + 0xf7, 0x98, 0x68, 0x19, 0xd2, 0xa2, 0x6e, 0xad, 0x0a, 0x29, 0x01, 0xa4, 0x30, 0x87, 0x5a, 0xd9, + 0xbd, 0x49, 0x55, 0x4e, 0xd8, 0xf2, 0xe7, 0x15, 0x69, 0x7e, 0x8f, 0x40, 0xf1, 0xf2, 0xb5, 0x60, + 0xbf, 0x04, 0x69, 0x2a, 0xa2, 0x0b, 0x60, 0xd7, 0xe9, 0x2e, 0x16, 0x22, 0xc0, 0x95, 0xa2, 0x1d, + 0x0c, 0x70, 0x03, 0x40, 0x9e, 0xbc, 0xab, 0x51, 0x3c, 0xc5, 0xbd, 0x49, 0x55, 0x9b, 0xb5, 0xb5, + 0xb1, 0xf5, 0x1d, 0x82, 0xdc, 0x1d, 0xc7, 0x0d, 0x29, 0xbf, 0x0c, 0xa9, 0x07, 0xfc, 0xec, 0x29, + 0xce, 0x4b, 0x81, 0xb7, 0xb5, 0x0e, 0xe9, 0x3b, 0xbb, 0x57, 0x3c, 0x5f, 0xd8, 0x2c, 0xd8, 0xa1, + 0x1c, 0xb5, 0xfe, 0xe4, 0xbe, 0xad, 0x3f, 0xb5, 0x70, 0xb3, 0xbb, 0x96, 0xcc, 0xc4, 0x4b, 0x09, + 0xeb, 0x6b, 0x04, 0x79, 0x19, 0x99, 0x22, 0xf7, 0x25, 0x30, 0x64, 0xe0, 0x8a, 0x1a, 0x07, 0xf6, + 0x22, 0xd0, 0xfa, 0x90, 0xda, 0x82, 0x3f, 0x82, 0x62, 0xc7, 0xf7, 0x46, 0x23, 0xd2, 0xb9, 0xad, + 0x1a, 0x5a, 0x7c, 0xbe, 0xa1, 0x6d, 0xe8, 0xeb, 0xf6, 0x9c, 0xba, 0xf5, 0x2b, 0x3f, 0x42, 0xb2, + 0xb9, 0x28, 0xa8, 0xc2, 0x14, 0xd1, 0xa1, 0xfb, 0x79, 0x7c, 0xd1, 0x7e, 0xbe, 0x02, 0x46, 0xd7, + 0xf7, 0xc6, 0x23, 0x5a, 0x4e, 0xc8, 0x03, 0x2e, 0xa5, 0x05, 0xfb, 0xfc, 0x35, 0x28, 0x06, 0xa9, + 0x1c, 0xd0, 0x61, 0x2b, 0xf3, 0x1d, 0x76, 0xb3, 0x43, 0x86, 0xcc, 0xdd, 0x76, 0xc3, 0x9e, 0xa9, + 0xf4, 0xad, 0x6f, 0x10, 0x94, 0xe6, 0x55, 0xf0, 0x87, 0x1a, 0xcd, 0xb9, 0xb9, 0x53, 0x07, 0x9b, + 0x6b, 0x88, 0x0e, 0x46, 0x45, 0x1f, 0x08, 0x8e, 0x40, 0xe5, 0x02, 0xe4, 0xb4, 0x69, 0x5c, 0x82, + 0xc4, 0x0e, 0x09, 0x28, 0xc9, 0x87, 0x9c, 0x74, 0xd1, 0x89, 0xcc, 0xaa, 0x63, 0x78, 0x31, 0x7e, + 0x1e, 0x71, 0x42, 0x17, 0x66, 0x2a, 0x89, 0xcf, 0x43, 0x72, 0xdb, 0xf7, 0x06, 0x0b, 0x95, 0x49, + 0xec, 0xc0, 0x6f, 0x43, 0x9c, 0x79, 0x0b, 0x15, 0x29, 0xce, 0x3c, 0x5e, 0x23, 0x95, 0x7c, 0x42, + 0x04, 0xa7, 0x24, 0xeb, 0x67, 0x04, 0x47, 0xf8, 0x1e, 0x89, 0xc0, 0xe5, 0xde, 0x78, 0xb8, 0x83, + 0xeb, 0x50, 0xe2, 0x9e, 0xee, 0xbb, 0xea, 0x42, 0xba, 0xef, 0x76, 0x54, 0x9a, 0x45, 0x3e, 0x1f, + 0xdc, 0x53, 0x9b, 0x1d, 0xbc, 0x0a, 0xe9, 0x31, 0x95, 0x0a, 0x32, 0x67, 0x83, 0x8b, 0x9b, 0x1d, + 0x7c, 0x46, 0x73, 0xc7, 0xb1, 0xd6, 0x5e, 0x3b, 0x02, 0xc3, 0x9b, 0x8e, 0xeb, 0x87, 0xbd, 0xe5, + 0x34, 0x18, 0x6d, 0xee, 0x58, 0xf2, 0x84, 0x5f, 0x88, 0xa1, 0xb2, 0x08, 0xc8, 0x56, 0xcb, 0xd6, + 0x3b, 0x90, 0x0d, 0x77, 0xef, 0x7b, 0x0f, 0xee, 0x5b, 0x01, 0xeb, 0x18, 0xa4, 0x64, 0x62, 0x18, + 0x92, 0x1d, 0x87, 0x39, 0x62, 0x4b, 0xde, 0x16, 0x63, 0xab, 0x0c, 0x2b, 0x77, 0x7c, 0x67, 0x48, + 0xb7, 0x89, 0x2f, 0x94, 0x42, 0xfa, 0x59, 0x47, 0x61, 0x89, 0x1f, 0x75, 0xe2, 0xd3, 0xcb, 0xde, + 0x78, 0xc8, 0xd4, 0x09, 0xb3, 0xce, 0xc2, 0xf2, 0xec, 0xb4, 0x62, 0xeb, 0x32, 0xa4, 0xda, 0x7c, + 0x42, 0x58, 0x2f, 0xd8, 0x52, 0xb0, 0x7e, 0x44, 0x80, 0x3f, 0x21, 0x4c, 0x98, 0xde, 0xdc, 0xa0, + 0xda, 0x93, 0x6c, 0xe0, 0xb0, 0x76, 0x8f, 0xf8, 0x34, 0x78, 0x92, 0x05, 0xf2, 0x7f, 0xf1, 0x24, + 0xb3, 0xce, 0xc1, 0xd2, 0x4c, 0x94, 0x2a, 0xa7, 0x0a, 0x64, 0xda, 0x6a, 0x4e, 0x5d, 0xde, 0xa1, + 0xfc, 0xff, 0x53, 0x90, 0x0d, 0x1f, 0xae, 0x38, 0x07, 0xe9, 0x2b, 0x37, 0xec, 0xbb, 0xeb, 0xf6, + 0x46, 0x29, 0x86, 0xf3, 0x90, 0x69, 0xad, 0x5f, 0xfe, 0x54, 0x48, 0x68, 0x6d, 0x1d, 0x0c, 0xfe, + 0x84, 0x27, 0x3e, 0x7e, 0x0f, 0x92, 0x7c, 0x84, 0x8f, 0x46, 0xf5, 0xd5, 0xfe, 0x35, 0x54, 0x56, + 0xe6, 0xa7, 0x55, 0x1d, 0x62, 0x6b, 0x7f, 0x27, 0x20, 0xcd, 0x1f, 0x5f, 0xfc, 0x14, 0xbf, 0x0f, + 0x29, 0xf1, 0x0e, 0xc3, 0x9a, 0xba, 0xfe, 0xda, 0xad, 0xac, 0xbe, 0x34, 0x1f, 0xd8, 0x79, 0x13, + 0xe1, 0xcf, 0x20, 0x27, 0x26, 0xd5, 0x4d, 0x7b, 0x7c, 0xfe, 0x12, 0x9b, 0xb1, 0x74, 0xe2, 0x80, + 0x55, 0xcd, 0xde, 0x45, 0x48, 0x09, 0x46, 0xea, 0xd1, 0xe8, 0xaf, 0x35, 0x3d, 0x9a, 0x99, 0x57, + 0x91, 0x15, 0xc3, 0x17, 0x20, 0xc9, 0x89, 0xa4, 0xc3, 0xa1, 0x5d, 0x7a, 0x3a, 0x1c, 0xfa, 0x8d, + 0x23, 0xdc, 0x7e, 0x10, 0xde, 0xdd, 0xab, 0xf3, 0x4d, 0x2c, 0xd8, 0x5e, 0x7e, 0x79, 0x21, 0xf4, + 0x7c, 0x43, 0x5e, 0x62, 0x01, 0x85, 0xf1, 0x89, 0x59, 0x57, 0x73, 0x8c, 0xaf, 0x98, 0x07, 0x2d, + 0x87, 0x06, 0xaf, 0x43, 0x4e, 0xa3, 0x8f, 0x0e, 0xeb, 0xcb, 0xdc, 0xd7, 0x61, 0xdd, 0x87, 0x73, + 0x56, 0x6c, 0xed, 0x73, 0xc8, 0x04, 0x3d, 0x06, 0xdf, 0x82, 0xe2, 0xec, 0xf1, 0xc4, 0xff, 0xd3, + 0xa2, 0x99, 0x6d, 0x5c, 0x95, 0x9a, 0xb6, 0xb4, 0xff, 0x99, 0x8e, 0xd5, 0x51, 0xeb, 0xde, 0x93, + 0x67, 0x66, 0xec, 0xe9, 0x33, 0x33, 0xf6, 0xe2, 0x99, 0x89, 0xbe, 0x9a, 0x9a, 0xe8, 0xa7, 0xa9, + 0x89, 0x1e, 0x4f, 0x4d, 0xf4, 0x64, 0x6a, 0xa2, 0x3f, 0xa7, 0x26, 0xfa, 0x6b, 0x6a, 0xc6, 0x5e, + 0x4c, 0x4d, 0xf4, 0xe8, 0xb9, 0x19, 0x7b, 0xf2, 0xdc, 0x8c, 0x3d, 0x7d, 0x6e, 0xc6, 0xee, 0x9d, + 0xd4, 0xff, 0x33, 0xfb, 0xce, 0xb6, 0x33, 0x74, 0x9a, 0x7d, 0x6f, 0xc7, 0x6d, 0xea, 0xff, 0xc9, + 0xb7, 0x0c, 0xf1, 0xf3, 0xd6, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xae, 0x29, 0xb8, 0x0d, 0xaa, + 0x0f, 0x00, 0x00, } func (x Direction) String() string { @@ -1835,7 +1836,7 @@ func (this *StreamAdapter) Equal(that interface{}) bool { return false } } - if this.LabelsHash != that1.LabelsHash { + if this.Hash != that1.Hash { return false } return true @@ -1927,7 +1928,7 @@ func (this *Series) Equal(that interface{}) bool { return false } } - if this.LabelsHash != that1.LabelsHash { + if this.StreamHash != that1.StreamHash { return false } return true @@ -2457,7 +2458,7 @@ func (this *StreamAdapter) GoString() string { } s = append(s, "Entries: "+fmt.Sprintf("%#v", vs)+",\n") } - s = append(s, "LabelsHash: "+fmt.Sprintf("%#v", this.LabelsHash)+",\n") + s = append(s, "Hash: "+fmt.Sprintf("%#v", this.Hash)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -2498,7 +2499,7 @@ func (this *Series) GoString() string { } s = append(s, "Samples: "+fmt.Sprintf("%#v", vs)+",\n") } - s = append(s, "LabelsHash: "+fmt.Sprintf("%#v", this.LabelsHash)+",\n") + s = append(s, "StreamHash: "+fmt.Sprintf("%#v", this.StreamHash)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -3630,8 +3631,8 @@ func (m *StreamAdapter) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.LabelsHash != 0 { - i = encodeVarintLogproto(dAtA, i, uint64(m.LabelsHash)) + if m.Hash != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Hash)) i-- dAtA[i] = 0x18 } @@ -3756,8 +3757,8 @@ func (m *Series) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.LabelsHash != 0 { - i = encodeVarintLogproto(dAtA, i, uint64(m.LabelsHash)) + if m.StreamHash != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.StreamHash)) i-- dAtA[i] = 0x18 } @@ -4524,8 +4525,8 @@ func (m *StreamAdapter) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } - if m.LabelsHash != 0 { - n += 1 + sovLogproto(uint64(m.LabelsHash)) + if m.Hash != 0 { + n += 1 + sovLogproto(uint64(m.Hash)) } return n } @@ -4579,8 +4580,8 @@ func (m *Series) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } - if m.LabelsHash != 0 { - n += 1 + sovLogproto(uint64(m.LabelsHash)) + if m.StreamHash != 0 { + n += 1 + sovLogproto(uint64(m.StreamHash)) } return n } @@ -4930,7 +4931,7 @@ func (this *StreamAdapter) String() string { s := strings.Join([]string{`&StreamAdapter{`, `Labels:` + fmt.Sprintf("%v", this.Labels) + `,`, `Entries:` + repeatedStringForEntries + `,`, - `LabelsHash:` + fmt.Sprintf("%v", this.LabelsHash) + `,`, + `Hash:` + fmt.Sprintf("%v", this.Hash) + `,`, `}`, }, "") return s @@ -4970,7 +4971,7 @@ func (this *Series) String() string { s := strings.Join([]string{`&Series{`, `Labels:` + fmt.Sprintf("%v", this.Labels) + `,`, `Samples:` + repeatedStringForSamples + `,`, - `LabelsHash:` + fmt.Sprintf("%v", this.LabelsHash) + `,`, + `StreamHash:` + fmt.Sprintf("%v", this.StreamHash) + `,`, `}`, }, "") return s @@ -6309,9 +6310,9 @@ func (m *StreamAdapter) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 3: if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) } - m.LabelsHash = 0 + m.Hash = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowLogproto @@ -6321,7 +6322,7 @@ func (m *StreamAdapter) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.LabelsHash |= uint64(b&0x7F) << shift + m.Hash |= uint64(b&0x7F) << shift if b < 0x80 { break } @@ -6667,9 +6668,9 @@ func (m *Series) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 3: if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field StreamHash", wireType) } - m.LabelsHash = 0 + m.StreamHash = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowLogproto @@ -6679,7 +6680,7 @@ func (m *Series) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.LabelsHash |= uint64(b&0x7F) << shift + m.StreamHash |= uint64(b&0x7F) << shift if b < 0x80 { break } diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index c2e5582c764b..7d2041116085 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -82,7 +82,8 @@ message LabelResponse { message StreamAdapter { string labels = 1 [(gogoproto.jsontag) = "labels"]; repeated EntryAdapter entries = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "entries"]; - uint64 labelsHash = 3 [(gogoproto.jsontag) = "labelsHash"]; + // hash contains the original hash of the stream. + uint64 hash = 3 [(gogoproto.jsontag) = "hash"]; } message EntryAdapter { @@ -99,7 +100,7 @@ message Sample { message Series { string labels = 1 [(gogoproto.jsontag) = "labels"]; repeated Sample samples = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "samples"]; - uint64 labelsHash = 3 [(gogoproto.jsontag) = "labelsHash"]; + uint64 streamHash = 3 [(gogoproto.jsontag) = "streamHash"]; } message TailRequest { diff --git a/pkg/logproto/types.go b/pkg/logproto/types.go index 4edae3e1922e..1dd1822e35d5 100644 --- a/pkg/logproto/types.go +++ b/pkg/logproto/types.go @@ -10,9 +10,9 @@ import ( // We are not using the proto generated version but this custom one so that we // can improve serialization see benchmark. type Stream struct { - Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` - Entries []Entry `protobuf:"bytes,2,rep,name=entries,proto3,customtype=EntryAdapter" json:"entries"` - LabelsHash uint64 `protobuf:"varint,3,opt,name=labelsHash,proto3" json:"labelsHash"` + Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` + Entries []Entry `protobuf:"bytes,2,rep,name=entries,proto3,customtype=EntryAdapter" json:"entries"` + Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash"` } // Entry is a log entry with a timestamp. @@ -41,8 +41,8 @@ func (m *Stream) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l - if m.LabelsHash != 0 { - i = encodeVarintLogproto(dAtA, i, m.LabelsHash) + if m.Hash != 0 { + i = encodeVarintLogproto(dAtA, i, m.Hash) i-- dAtA[i] = 0x18 } @@ -205,9 +205,9 @@ func (m *Stream) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 3: if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field LabelsHash", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Hash", wireType) } - m.LabelsHash = 0 + m.Hash = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowLogproto @@ -217,7 +217,7 @@ func (m *Stream) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.LabelsHash |= uint64(b&0x7F) << shift + m.Hash |= uint64(b&0x7F) << shift if b < 0x80 { break } @@ -382,8 +382,8 @@ func (m *Stream) Size() (n int) { n += 1 + l + sovLogproto(uint64(l)) } } - if m.LabelsHash != 0 { - n += 1 + sovLogproto(m.LabelsHash) + if m.Hash != 0 { + n += 1 + sovLogproto(m.Hash) } return n } @@ -433,7 +433,7 @@ func (m *Stream) Equal(that interface{}) bool { return false } } - return m.LabelsHash == that1.LabelsHash + return m.Hash == that1.Hash } func (m *Entry) Equal(that interface{}) bool { diff --git a/pkg/logproto/types_test.go b/pkg/logproto/types_test.go index 97517ce72488..0dd87f727e26 100644 --- a/pkg/logproto/types_test.go +++ b/pkg/logproto/types_test.go @@ -12,6 +12,7 @@ var ( line = `level=info ts=2019-12-12T15:00:08.325Z caller=compact.go:441 component=tsdb msg="compact blocks" count=3 mint=1576130400000 maxt=1576152000000 ulid=01DVX9ZHNM71GRCJS7M34Q0EV7 sources="[01DVWNC6NWY1A60AZV3Z6DGS65 01DVWW7XXX75GHA6ZDTD170CSZ 01DVX33N5W86CWJJVRPAVXJRWJ]" duration=2.897213221s` stream = Stream{ Labels: `{job="foobar", cluster="foo-central1", namespace="bar", container_name="buzz"}`, + Hash: 1234*10 ^ 9, Entries: []Entry{ {now, line}, {now.Add(1 * time.Second), line}, @@ -21,6 +22,7 @@ var ( } streamAdapter = StreamAdapter{ Labels: `{job="foobar", cluster="foo-central1", namespace="bar", container_name="buzz"}`, + Hash: 1234*10 ^ 9, Entries: []EntryAdapter{ {now, line}, {now.Add(1 * time.Second), line}, @@ -91,7 +93,6 @@ func BenchmarkStream(b *testing.B) { b.Fatal(err) } } - } func BenchmarkStreamAdapter(b *testing.B) { @@ -107,5 +108,4 @@ func BenchmarkStreamAdapter(b *testing.B) { b.Fatal(err) } } - } diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 7505c7d8394f..5ed956764481 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -2551,7 +2551,7 @@ func (errorIterator) Error() error { return ErrMock } func (errorIterator) Labels() string { return "" } -func (errorIterator) LabelsHash() uint64 { return 0 } +func (errorIterator) StreamHash() uint64 { return 0 } func (errorIterator) Entry() logproto.Entry { return logproto.Entry{} } diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index edaf519bbf8b..435906de9a0d 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -338,8 +338,8 @@ func (it *logBatchIterator) Labels() string { return it.curr.Labels() } -func (it *logBatchIterator) LabelsHash() uint64 { - return it.curr.LabelsHash() +func (it *logBatchIterator) StreamHash() uint64 { + return it.curr.StreamHash() } func (it *logBatchIterator) Error() error { @@ -481,8 +481,8 @@ func (it *sampleBatchIterator) Labels() string { return it.curr.Labels() } -func (it *sampleBatchIterator) LabelsHash() uint64 { - return it.curr.LabelsHash() +func (it *sampleBatchIterator) StreamHash() uint64 { + return it.curr.StreamHash() } func (it *sampleBatchIterator) Error() error { From 12e5333acc01209b1416237d6ff52cfef577f890 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 28 Jan 2022 16:45:12 +0100 Subject: [PATCH 07/14] Working through fixing tests. Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator_test.go | 18 ++- pkg/iter/iterator.go | 2 +- pkg/iter/sample_iterator_test.go | 8 +- pkg/logql/engine_test.go | 20 +++- pkg/logql/range_vector_test.go | 15 +-- pkg/logql/sharding.go | 2 +- pkg/logql/test_utils.go | 5 +- pkg/storage/batch_test.go | 194 +++++++++++++++---------------- pkg/storage/lazy_chunk_test.go | 6 +- pkg/storage/store_test.go | 7 +- pkg/storage/util_test.go | 4 +- 11 files changed, 154 insertions(+), 127 deletions(-) diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 9fa3c7c42a5f..0c12d56384f8 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -3,6 +3,7 @@ package iter import ( "context" "fmt" + "hash/fnv" "math/rand" "testing" "time" @@ -120,9 +121,9 @@ func TestIteratorMultipleLabels(t *testing.T) { length: testSize * 2, labels: func(i int64) string { if i%2 == 0 { - return "{foobar: \"baz1\"}" + return "{foobar: \"baz2\"}" } - return "{foobar: \"baz2\"}" + return "{foobar: \"baz1\"}" }, }, @@ -138,9 +139,9 @@ func TestIteratorMultipleLabels(t *testing.T) { length: testSize * 2, labels: func(i int64) string { if i/testSize == 0 { - return "{foobar: \"baz1\"}" + return "{foobar: \"baz2\"}" } - return "{foobar: \"baz2\"}" + return "{foobar: \"baz1\"}" }, }, } { @@ -202,9 +203,16 @@ func mkStreamIterator(f generator, labels string) EntryIterator { return NewStreamIterator(logproto.Stream{ Entries: entries, Labels: labels, + Hash: hashLabels(labels), }) } +func hashLabels(lbs string) uint64 { + h := fnv.New64a() + h.Write([]byte(lbs)) + return h.Sum64() +} + func identity(i int64) logproto.Entry { return logproto.Entry{ Timestamp: time.Unix(i, 0), @@ -237,6 +245,7 @@ func inverse(g generator) generator { func TestHeapIteratorDeduplication(t *testing.T) { foo := logproto.Stream{ Labels: `{app="foo"}`, + Hash: hashLabels(`{app="foo"}`), Entries: []logproto.Entry{ {Timestamp: time.Unix(0, 1), Line: "1"}, {Timestamp: time.Unix(0, 2), Line: "2"}, @@ -245,6 +254,7 @@ func TestHeapIteratorDeduplication(t *testing.T) { } bar := logproto.Stream{ Labels: `{app="bar"}`, + Hash: hashLabels(`{app="bar"}`), Entries: []logproto.Entry{ {Timestamp: time.Unix(0, 1), Line: "1"}, {Timestamp: time.Unix(0, 2), Line: "2"}, diff --git a/pkg/iter/iterator.go b/pkg/iter/iterator.go index 334e20d2bd9e..8d0bde08ec65 100644 --- a/pkg/iter/iterator.go +++ b/pkg/iter/iterator.go @@ -9,7 +9,7 @@ type Iterator interface { // Labels returns the labels for the current entry. // The labels can be mutated by the query engine and not reflect the original stream. Labels() string - // StreamHash returns a hash of the original stream for the current entry. + // StreamHash returns the hash of the original stream for the current entry. StreamHash() uint64 Error() error Close() error diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index d2272714091b..1599a7a44981 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -91,14 +91,16 @@ func sample(i int) logproto.Sample { } var varSeries = logproto.Series{ - Labels: `{foo="var"}`, + Labels: `{foo="var"}`, + StreamHash: hashLabels(`{foo="var"}`), Samples: []logproto.Sample{ sample(1), sample(2), sample(3), }, } var carSeries = logproto.Series{ - Labels: `{foo="car"}`, + Labels: `{foo="car"}`, + StreamHash: hashLabels(`{foo="car"}`), Samples: []logproto.Sample{ sample(1), sample(2), sample(3), }, @@ -191,7 +193,7 @@ func TestNewNonOverlappingSampleIterator(t *testing.T) { func TestReadSampleBatch(t *testing.T) { res, size, err := ReadSampleBatch(NewSeriesIterator(carSeries), 1) - require.Equal(t, &logproto.SampleQueryResponse{Series: []logproto.Series{{Labels: carSeries.Labels, Samples: []logproto.Sample{sample(1)}}}}, res) + require.Equal(t, &logproto.SampleQueryResponse{Series: []logproto.Series{{Labels: carSeries.Labels, StreamHash: carSeries.StreamHash, Samples: []logproto.Sample{sample(1)}}}}, res) require.Equal(t, uint32(1), size) require.NoError(t, err) diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 5ed956764481..14a945cec975 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -2383,25 +2383,35 @@ type logData struct { type generator func(i int64) logData -func newStream(n int64, f generator, labels string) logproto.Stream { +func newStream(n int64, f generator, lbsString string) logproto.Stream { + labels, err := ParseLabels(lbsString) + if err != nil { + panic(err) + } entries := []logproto.Entry{} for i := int64(0); i < n; i++ { entries = append(entries, f(i).Entry) } return logproto.Stream{ Entries: entries, - Labels: labels, + Labels: labels.String(), + Hash: labels.Hash(), } } -func newSeries(n int64, f generator, labels string) logproto.Series { +func newSeries(n int64, f generator, lbsString string) logproto.Series { + labels, err := ParseLabels(lbsString) + if err != nil { + panic(err) + } samples := []logproto.Sample{} for i := int64(0); i < n; i++ { samples = append(samples, f(i).Sample) } return logproto.Series{ - Samples: samples, - Labels: labels, + Samples: samples, + Labels: labels.String(), + StreamHash: labels.Hash(), } } diff --git a/pkg/logql/range_vector_test.go b/pkg/logql/range_vector_test.go index fca777db876e..fa2d2d03e5a4 100644 --- a/pkg/logql/range_vector_test.go +++ b/pkg/logql/range_vector_test.go @@ -7,7 +7,6 @@ import ( "time" "github.com/prometheus/prometheus/promql" - promql_parser "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/iter" @@ -29,19 +28,21 @@ var samples = []logproto.Sample{ } var ( - labelFoo, _ = promql_parser.ParseMetric("{app=\"foo\"}") - labelBar, _ = promql_parser.ParseMetric("{app=\"bar\"}") + labelFoo, _ = ParseLabels("{app=\"foo\"}") + labelBar, _ = ParseLabels("{app=\"bar\"}") ) func newSampleIterator() iter.SampleIterator { return iter.NewHeapSampleIterator(context.Background(), []iter.SampleIterator{ iter.NewSeriesIterator(logproto.Series{ - Labels: labelFoo.String(), - Samples: samples, + Labels: labelFoo.String(), + Samples: samples, + StreamHash: labelFoo.Hash(), }), iter.NewSeriesIterator(logproto.Series{ - Labels: labelBar.String(), - Samples: samples, + Labels: labelBar.String(), + Samples: samples, + StreamHash: labelBar.Hash(), }), }) } diff --git a/pkg/logql/sharding.go b/pkg/logql/sharding.go index aca924b9a953..4bdd7ca783a2 100644 --- a/pkg/logql/sharding.go +++ b/pkg/logql/sharding.go @@ -323,7 +323,7 @@ func (ev *DownstreamEvaluator) Iterator( } xs = append(xs, iter) } - + // todo we can't use this anymore ! return iter.NewHeapIterator(ctx, xs, params.Direction()), nil default: diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index e5a0e7f57fbd..2e2a43b68e09 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -104,7 +104,7 @@ func processStream(in []logproto.Stream, pipeline log.Pipeline) []logproto.Strea var found bool s, found = resByStream[out.String()] if !found { - s = &logproto.Stream{Labels: out.String()} + s = &logproto.Stream{Labels: out.String(), Hash: sp.BaseLabels().Hash()} resByStream[out.String()] = s } s.Entries = append(s.Entries, logproto.Entry{ @@ -132,7 +132,7 @@ func processSeries(in []logproto.Stream, ex log.SampleExtractor) []logproto.Seri var found bool s, found = resBySeries[lbs.String()] if !found { - s = &logproto.Series{Labels: lbs.String()} + s = &logproto.Series{Labels: lbs.String(), StreamHash: exs.BaseLabels().Hash()} resBySeries[lbs.String()] = s } s.Samples = append(s.Samples, logproto.Sample{ @@ -264,6 +264,7 @@ func randomStreams(nStreams, nEntries, nShards int, labelNames []string) (stream } stream.Labels = ls.String() + stream.Hash = ls.Hash() streams = append(streams, stream) } return streams diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 4d1a620a931d..b1d9efc3eb38 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -26,7 +26,7 @@ var NilMetrics = NewChunkMetrics(nil, 0) func Test_batchIterSafeStart(t *testing.T) { stream := logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -76,7 +76,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "forward with overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -89,7 +89,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -102,7 +102,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -115,7 +115,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -128,7 +128,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -141,7 +141,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -156,7 +156,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -177,7 +177,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(4 * time.Millisecond), logproto.FORWARD, 2, @@ -185,7 +185,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "forward all overlap and all chunks have a from time less than query from time": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -198,7 +198,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -215,7 +215,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -232,7 +232,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -249,7 +249,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -266,7 +266,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -285,7 +285,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -306,7 +306,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from.Add(1 * time.Millisecond), from.Add(5 * time.Millisecond), logproto.FORWARD, 2, @@ -314,7 +314,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "forward with overlapping non-continuous entries": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -331,7 +331,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -344,7 +344,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -357,7 +357,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -372,7 +372,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -389,7 +389,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(3 * time.Millisecond), logproto.FORWARD, 2, @@ -397,7 +397,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "backward with overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -410,7 +410,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -423,7 +423,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -436,7 +436,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -449,7 +449,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -462,7 +462,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -477,7 +477,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -498,7 +498,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(4 * time.Millisecond), logproto.BACKWARD, 2, @@ -506,7 +506,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "backward all overlap and all chunks have a through time greater than query through time": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -523,7 +523,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -540,7 +540,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -557,7 +557,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -574,7 +574,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -591,7 +591,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -606,7 +606,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -627,7 +627,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(4 * time.Millisecond), logproto.BACKWARD, 2, @@ -635,7 +635,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "backward with overlapping non-continuous entries": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(0 * time.Millisecond), @@ -648,7 +648,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(1 * time.Millisecond), @@ -661,7 +661,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -674,7 +674,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(4 * time.Millisecond), @@ -689,7 +689,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(7 * time.Millisecond), @@ -726,7 +726,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(8 * time.Millisecond), logproto.BACKWARD, 2, @@ -734,7 +734,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "forward without overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -747,7 +747,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -756,7 +756,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -767,7 +767,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -784,7 +784,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(3 * time.Millisecond), logproto.FORWARD, 2, @@ -792,7 +792,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "backward without overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -805,7 +805,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -814,7 +814,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -825,7 +825,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -842,7 +842,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(3 * time.Millisecond), logproto.BACKWARD, 2, @@ -858,7 +858,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { "forward identicals": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -867,7 +867,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -876,7 +876,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -889,7 +889,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -898,7 +898,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -907,7 +907,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -916,7 +916,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -927,7 +927,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -944,7 +944,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(4 * time.Millisecond), logproto.FORWARD, 1, @@ -988,7 +988,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { "forward with overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -1001,7 +1001,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -1014,7 +1014,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -1027,7 +1027,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -1040,7 +1040,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -1053,7 +1053,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -1068,7 +1068,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, []logproto.Series{ { - Labels: fooLabels, + Labels: fooLabels.String(), Samples: []logproto.Sample{ { Timestamp: from.UnixNano(), @@ -1093,14 +1093,14 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(4 * time.Millisecond), 2, }, "forward with overlapping non-continuous entries": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -1117,7 +1117,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -1130,7 +1130,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(time.Millisecond), @@ -1143,7 +1143,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -1158,7 +1158,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, []logproto.Series{ { - Labels: fooLabels, + Labels: fooLabels.String(), Samples: []logproto.Sample{ { Timestamp: from.UnixNano(), @@ -1178,14 +1178,14 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(3 * time.Millisecond), 2, }, "forward last chunk boundaries equal to end": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: time.Unix(1, 0), @@ -1198,7 +1198,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { @@ -1212,7 +1212,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: time.Unix(3, 0), @@ -1227,7 +1227,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, []logproto.Series{ { - Labels: fooLabels, + Labels: fooLabels.String(), Samples: []logproto.Sample{ { Timestamp: time.Unix(1, 0).UnixNano(), @@ -1242,14 +1242,14 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), time.Unix(1, 0), time.Unix(3, 0), 2, }, "forward last chunk boundaries equal to end and start": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: time.Unix(1, 0), @@ -1262,7 +1262,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { @@ -1278,7 +1278,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, []logproto.Series{ { - Labels: fooLabels, + Labels: fooLabels.String(), Samples: []logproto.Sample{ { Timestamp: time.Unix(1, 0).UnixNano(), @@ -1293,14 +1293,14 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), time.Unix(1, 0), time.Unix(1, 0), 2, }, "forward without overlap": { []*LazyChunk{ newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -1313,7 +1313,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -1322,7 +1322,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }), newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(3 * time.Millisecond), @@ -1333,7 +1333,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, []logproto.Series{ { - Labels: fooLabels, + Labels: fooLabels.String(), Samples: []logproto.Sample{ { Timestamp: from.UnixNano(), @@ -1353,7 +1353,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { }, }, }, - fooLabelsWithName, + fooLabelsWithName.String(), from, from.Add(3 * time.Millisecond), 2, }, @@ -1391,7 +1391,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { func TestPartitionOverlappingchunks(t *testing.T) { var ( oneThroughFour = newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -1404,7 +1404,7 @@ func TestPartitionOverlappingchunks(t *testing.T) { }, }) two = newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(1 * time.Millisecond), @@ -1413,7 +1413,7 @@ func TestPartitionOverlappingchunks(t *testing.T) { }, }) three = newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from.Add(2 * time.Millisecond), @@ -1636,7 +1636,7 @@ func Test_IsInvalidChunkError(t *testing.T) { func TestBatchCancel(t *testing.T) { createChunk := func(from time.Time) *LazyChunk { return newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), Entries: []logproto.Entry{ { Timestamp: from, @@ -1665,7 +1665,7 @@ func TestBatchCancel(t *testing.T) { }, } - it, err := newLogBatchIterator(ctx, s, NilMetrics, chunks, 1, newMatchers(fooLabels), log.NewNoopPipeline(), logproto.FORWARD, from, time.Now(), nil) + it, err := newLogBatchIterator(ctx, s, NilMetrics, chunks, 1, newMatchers(fooLabels.String()), log.NewNoopPipeline(), logproto.FORWARD, from, time.Now(), nil) require.NoError(t, err) defer require.NoError(t, it.Close()) for it.Next() { diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 8d8fc18ab000..c29e37ef6f8c 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -24,7 +24,8 @@ func TestLazyChunkIterator(t *testing.T) { }{ { newLazyChunk(logproto.Stream{ - Labels: fooLabelsWithName, + Labels: fooLabelsWithName.String(), + Hash: fooLabelsWithName.Hash(), Entries: []logproto.Entry{ { Timestamp: from, @@ -34,7 +35,8 @@ func TestLazyChunkIterator(t *testing.T) { }), []logproto.Stream{ { - Labels: fooLabels, + Labels: fooLabels.String(), + Hash: fooLabels.Hash(), Entries: []logproto.Entry{ { Timestamp: from, diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index 0c0047194e34..b7a68d25fdb0 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -910,7 +910,7 @@ func TestStore_MultipleBoltDBShippersInConfig(t *testing.T) { defer store.Stop() // get all the chunks from both the stores - chunks, err := store.Get(ctx, "fake", timeToModelTime(firstStoreDate), timeToModelTime(secondStoreDate.Add(24*time.Hour)), newMatchers(fooLabelsWithName)...) + chunks, err := store.Get(ctx, "fake", timeToModelTime(firstStoreDate), timeToModelTime(secondStoreDate.Add(24*time.Hour)), newMatchers(fooLabelsWithName.String())...) require.NoError(t, err) // we get common chunk twice because it is indexed in both the stores @@ -940,9 +940,10 @@ func parseDate(in string) time.Time { return t } -func buildTestStreams(labels string, tr timeRange) logproto.Stream { +func buildTestStreams(labels labels.Labels, tr timeRange) logproto.Stream { stream := logproto.Stream{ - Labels: labels, + Labels: labels.String(), + Hash: labels.Hash(), Entries: []logproto.Entry{}, } diff --git a/pkg/storage/util_test.go b/pkg/storage/util_test.go index 2844b9dc21a9..fb54e53b6ac7 100644 --- a/pkg/storage/util_test.go +++ b/pkg/storage/util_test.go @@ -23,8 +23,8 @@ import ( ) var ( - fooLabelsWithName = "{foo=\"bar\", __name__=\"logs\"}" - fooLabels = "{foo=\"bar\"}" + fooLabelsWithName = labels.Labels{{Name: "foo", Value: "bar"}, {Name: "__name__", Value: "logs"}} + fooLabels = labels.Labels{{Name: "foo", Value: "bar"}} ) var from = time.Unix(0, time.Millisecond.Nanoseconds()) From 398f6c8dacc7ab753891915f7754b47b84fc277d Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 31 Jan 2022 16:56:42 +0100 Subject: [PATCH 08/14] Refactor HeapIterator into merge and sort Iterator. Signed-off-by: Cyril Tovena --- pkg/chunkenc/memchunk.go | 10 +- pkg/chunkenc/unordered.go | 4 +- pkg/ingester/ingester.go | 23 ++-- pkg/ingester/instance.go | 8 +- pkg/ingester/instance_test.go | 9 +- pkg/ingester/stream.go | 4 +- pkg/iter/entry_iterator.go | 185 ++++++++++++++++++++++++------- pkg/iter/entry_iterator_test.go | 166 ++++++++++++++++++++++----- pkg/iter/sample_iterator.go | 162 ++++++++++++++++++++++----- pkg/iter/sample_iterator_test.go | 114 ++++++++++++++++--- pkg/logcli/client/file.go | 6 +- pkg/logql/engine_test.go | 32 ++---- pkg/logql/range_vector_test.go | 2 +- pkg/logql/sharding.go | 4 +- pkg/logql/test_utils.go | 4 +- pkg/querier/querier.go | 8 +- pkg/querier/tail.go | 2 +- pkg/storage/batch.go | 12 +- 18 files changed, 571 insertions(+), 184 deletions(-) diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index aa4239175fd4..97405365713c 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -818,7 +818,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi if ordered { it = iter.NewNonOverlappingIterator(blockItrs, "") } else { - it = iter.NewHeapIterator(ctx, blockItrs, direction) + it = iter.NewSortEntryIterator(blockItrs, direction) } return iter.NewTimeRangedIterator( @@ -851,7 +851,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi if ordered { return iter.NewNonOverlappingIterator(blockItrs, ""), nil } - return iter.NewHeapIterator(ctx, blockItrs, direction), nil + return iter.NewSortEntryIterator(blockItrs, direction), nil } // Iterator implements Chunk. @@ -886,7 +886,7 @@ func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, if ordered { it = iter.NewNonOverlappingSampleIterator(its, "") } else { - it = iter.NewHeapSampleIterator(ctx, its) + it = iter.NewSortSampleIterator(its) } return iter.NewTimeRangedSampleIterator( @@ -1041,7 +1041,7 @@ func (hb *headBlock) Iterator(ctx context.Context, direction logproto.Direction, for _, stream := range streams { streamsResult = append(streamsResult, *stream) } - return iter.NewStreamsIterator(ctx, streamsResult, direction) + return iter.NewStreamsIterator(streamsResult, direction) } func (hb *headBlock) SampleIterator(ctx context.Context, mint, maxt int64, extractor log.StreamSampleExtractor) iter.SampleIterator { @@ -1082,7 +1082,7 @@ func (hb *headBlock) SampleIterator(ctx context.Context, mint, maxt int64, extra for _, s := range series { seriesRes = append(seriesRes, *s) } - return iter.SampleIteratorWithClose(iter.NewMultiSeriesIterator(ctx, seriesRes), func() error { + return iter.SampleIteratorWithClose(iter.NewMultiSeriesIterator(seriesRes), func() error { for _, s := range series { SamplesPool.Put(s.Samples) } diff --git a/pkg/chunkenc/unordered.go b/pkg/chunkenc/unordered.go index 4c5a416ab491..f7bf4ae6a088 100644 --- a/pkg/chunkenc/unordered.go +++ b/pkg/chunkenc/unordered.go @@ -257,7 +257,7 @@ func (hb *unorderedHeadBlock) Iterator( for _, stream := range streams { streamsResult = append(streamsResult, *stream) } - return iter.NewStreamsIterator(ctx, streamsResult, direction) + return iter.NewStreamsIterator(streamsResult, direction) } // nolint:unused @@ -308,7 +308,7 @@ func (hb *unorderedHeadBlock) SampleIterator( for _, s := range series { seriesRes = append(seriesRes, *s) } - return iter.SampleIteratorWithClose(iter.NewMultiSeriesIterator(ctx, seriesRes), func() error { + return iter.SampleIteratorWithClose(iter.NewMultiSeriesIterator(seriesRes), func() error { for _, s := range series { SamplesPool.Put(s.Samples) } diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 87226d357590..def6e16f7421 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -561,7 +561,7 @@ func (i *Ingester) Query(req *logproto.QueryRequest, queryServer logproto.Querie } instance := i.GetOrCreateInstance(instanceID) - itrs, err := instance.Query(ctx, logql.SelectLogParams{QueryRequest: req}) + it, err := instance.Query(ctx, logql.SelectLogParams{QueryRequest: req}) if err != nil { return err } @@ -577,17 +577,15 @@ func (i *Ingester) Query(req *logproto.QueryRequest, queryServer logproto.Querie }} storeItr, err := i.store.SelectLogs(ctx, storeReq) if err != nil { + errUtil.LogErrorWithContext(ctx, "closing iterator", it.Close) return err } - - itrs = append(itrs, storeItr) + it = iter.NewMergeEntryIterator(ctx, []iter.EntryIterator{it, storeItr}, req.Direction) } - heapItr := iter.NewHeapIterator(ctx, itrs, req.Direction) - - defer errUtil.LogErrorWithContext(ctx, "closing iterator", heapItr.Close) + defer errUtil.LogErrorWithContext(ctx, "closing iterator", it.Close) - return sendBatches(ctx, heapItr, queryServer, req.Limit) + return sendBatches(ctx, it, queryServer, req.Limit) } // QuerySample the ingesters for series from logs matching a set of matchers. @@ -601,7 +599,7 @@ func (i *Ingester) QuerySample(req *logproto.SampleQueryRequest, queryServer log } instance := i.GetOrCreateInstance(instanceID) - itrs, err := instance.QuerySample(ctx, logql.SelectSampleParams{SampleQueryRequest: req}) + it, err := instance.QuerySample(ctx, logql.SelectSampleParams{SampleQueryRequest: req}) if err != nil { return err } @@ -615,17 +613,16 @@ func (i *Ingester) QuerySample(req *logproto.SampleQueryRequest, queryServer log }} storeItr, err := i.store.SelectSamples(ctx, storeReq) if err != nil { + errUtil.LogErrorWithContext(ctx, "closing iterator", it.Close) return err } - itrs = append(itrs, storeItr) + it = iter.NewMergeSampleIterator(ctx, []iter.SampleIterator{it, storeItr}) } - heapItr := iter.NewHeapSampleIterator(ctx, itrs) - - defer errUtil.LogErrorWithContext(ctx, "closing iterator", heapItr.Close) + defer errUtil.LogErrorWithContext(ctx, "closing iterator", it.Close) - return sendSampleBatches(ctx, heapItr, queryServer) + return sendSampleBatches(ctx, it, queryServer) } // boltdbShipperMaxLookBack returns a max look back period only if active index type is boltdb-shipper. diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index b21f896690b2..6bf66c347217 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -306,7 +306,7 @@ func (i *instance) getLabelsFromFingerprint(fp model.Fingerprint) labels.Labels return s.labels } -func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter.EntryIterator, error) { +func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) (iter.EntryIterator, error) { expr, err := req.LogSelector() if err != nil { return nil, err @@ -341,10 +341,10 @@ func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter return nil, err } - return iters, nil + return iter.NewSortEntryIterator(iters, req.Direction), nil } -func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams) ([]iter.SampleIterator, error) { +func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams) (iter.SampleIterator, error) { expr, err := req.Expr() if err != nil { return nil, err @@ -386,7 +386,7 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams return nil, err } - return iters, nil + return iter.NewSortSampleIterator(iters), nil } // Label returns the label names or values depending on the given request diff --git a/pkg/ingester/instance_test.go b/pkg/ingester/instance_test.go index 5a21111023b4..5918e443cc99 100644 --- a/pkg/ingester/instance_test.go +++ b/pkg/ingester/instance_test.go @@ -16,7 +16,6 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" loki_runtime "github.com/grafana/loki/pkg/runtime" @@ -497,7 +496,7 @@ func Test_Iterator(t *testing.T) { } // prepare iterators. - itrs, err := instance.Query(ctx, + it, err := instance.Query(ctx, logql.SelectLogParams{ QueryRequest: &logproto.QueryRequest{ Selector: `{job="3"} | logfmt`, @@ -509,12 +508,11 @@ func Test_Iterator(t *testing.T) { }, ) require.NoError(t, err) - heapItr := iter.NewHeapIterator(ctx, itrs, direction) // assert the order is preserved. var res *logproto.QueryResponse require.NoError(t, - sendBatches(ctx, heapItr, + sendBatches(ctx, it, fakeQueryServer( func(qr *logproto.QueryResponse) error { res = qr @@ -578,7 +576,7 @@ func Test_ChunkFilter(t *testing.T) { } // prepare iterators. - itrs, err := instance.Query(ctx, + it, err := instance.Query(ctx, logql.SelectLogParams{ QueryRequest: &logproto.QueryRequest{ Selector: `{job="3"}`, @@ -590,7 +588,6 @@ func Test_ChunkFilter(t *testing.T) { }, ) require.NoError(t, err) - it := iter.NewHeapIterator(ctx, itrs, direction) defer it.Close() for it.Next() { diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 55a881f5685d..87f1d13bfdb2 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -470,7 +470,7 @@ func (s *stream) Iterator(ctx context.Context, statsCtx *stats.Context, from, th if ordered { return iter.NewNonOverlappingIterator(iterators, ""), nil } - return iter.NewHeapIterator(ctx, iterators, direction), nil + return iter.NewSortEntryIterator(iterators, direction), nil } // Returns an SampleIterator. @@ -507,7 +507,7 @@ func (s *stream) SampleIterator(ctx context.Context, statsCtx *stats.Context, fr if ordered { return iter.NewNonOverlappingSampleIterator(iterators, ""), nil } - return iter.NewHeapSampleIterator(ctx, iterators), nil + return iter.NewSortSampleIterator(iterators), nil } func (s *stream) addTailer(t *tailer) { diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 282b39adc056..3b8ec7d704c0 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -135,8 +135,8 @@ type HeapIterator interface { Push(EntryIterator) } -// heapIterator iterates over a heap of iterators. -type heapIterator struct { +// mergeEntryIterator iterates over a heap of iterators and merge duplicate entries. +type mergeEntryIterator struct { heap interface { heap.Interface Peek() EntryIterator @@ -145,16 +145,17 @@ type heapIterator struct { prefetched bool stats *stats.Context - tuples []tuple - currEntry logproto.Entry - currLabels string - errs []error + tuples []tuple + currEntry entryWithLabels + errs []error } -// NewHeapIterator returns a new iterator which uses a heap to merge together -// entries for multiple interators. -func NewHeapIterator(ctx context.Context, is []EntryIterator, direction logproto.Direction) HeapIterator { - result := &heapIterator{is: is, stats: stats.FromContext(ctx)} +// NewMergeEntryIterator returns a new iterator which uses a heap to merge together entries for multiple iterators and deduplicate entries if any. +// The iterator only order and merge entries across given `is` iterators, it does not merge entries within individual iterator. +// This means using this iterator with a single iterator will result in the same result as the input iterator. +// If you don't need to deduplicate entries, use `NewSortEntryIterator` instead. +func NewMergeEntryIterator(ctx context.Context, is []EntryIterator, direction logproto.Direction) HeapIterator { + result := &mergeEntryIterator{is: is, stats: stats.FromContext(ctx)} switch direction { case logproto.BACKWARD: result.heap = &iteratorMaxHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} @@ -171,7 +172,7 @@ func NewHeapIterator(ctx context.Context, is []EntryIterator, direction logproto // prefetch iterates over all inner iterators to merge together, calls Next() on // each of them to prefetch the first entry and pushes of them - who are not // empty - to the heap -func (i *heapIterator) prefetch() { +func (i *mergeEntryIterator) prefetch() { if i.prefetched { return } @@ -192,7 +193,7 @@ func (i *heapIterator) prefetch() { // // If the iterator has no more entries or an error occur while advancing it, the iterator // is not pushed to the heap and any possible error captured, so that can be get via Error(). -func (i *heapIterator) requeue(ei EntryIterator, advanced bool) { +func (i *mergeEntryIterator) requeue(ei EntryIterator, advanced bool) { if advanced || ei.Next() { heap.Push(i.heap, ei) return @@ -204,7 +205,7 @@ func (i *heapIterator) requeue(ei EntryIterator, advanced bool) { util.LogError("closing iterator", ei.Close) } -func (i *heapIterator) Push(ei EntryIterator) { +func (i *mergeEntryIterator) Push(ei EntryIterator) { i.requeue(ei, false) } @@ -213,7 +214,7 @@ type tuple struct { EntryIterator } -func (i *heapIterator) Next() bool { +func (i *mergeEntryIterator) Next() bool { i.prefetch() if i.heap.Len() == 0 { @@ -222,8 +223,8 @@ func (i *heapIterator) Next() bool { // shortcut for the last iterator. if i.heap.Len() == 1 { - i.currEntry = i.heap.Peek().Entry() - i.currLabels = i.heap.Peek().Labels() + i.currEntry.entry = i.heap.Peek().Entry() + i.currEntry.labels = i.heap.Peek().Labels() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -250,8 +251,8 @@ func (i *heapIterator) Next() bool { // shortcut if we have a single tuple. if len(i.tuples) == 1 { - i.currEntry = i.tuples[0].Entry - i.currLabels = i.tuples[0].Labels() + i.currEntry.entry = i.tuples[0].Entry + i.currEntry.labels = i.tuples[0].Labels() i.requeue(i.tuples[0].EntryIterator, false) i.tuples = i.tuples[:0] return true @@ -260,12 +261,12 @@ func (i *heapIterator) Next() bool { // Find in tuples which entry occurs most often which, due to quorum based // replication, is guaranteed to be the correct next entry. t := i.tuples[0] - i.currEntry = t.Entry - i.currLabels = t.Labels() + i.currEntry.entry = t.Entry + i.currEntry.labels = t.Labels() // Requeue the iterators, advancing them if they were consumed. for j := range i.tuples { - if i.tuples[j].Line != i.currEntry.Line { + if i.tuples[j].Line != i.currEntry.entry.Line { i.requeue(i.tuples[j].EntryIterator, true) continue } @@ -279,15 +280,15 @@ func (i *heapIterator) Next() bool { return true } -func (i *heapIterator) Entry() logproto.Entry { - return i.currEntry +func (i *mergeEntryIterator) Entry() logproto.Entry { + return i.currEntry.entry } -func (i *heapIterator) Labels() string { - return i.currLabels +func (i *mergeEntryIterator) Labels() string { + return i.currEntry.labels } -func (i *heapIterator) Error() error { +func (i *mergeEntryIterator) Error() error { switch len(i.errs) { case 0: return nil @@ -298,7 +299,7 @@ func (i *heapIterator) Error() error { } } -func (i *heapIterator) Close() error { +func (i *mergeEntryIterator) Close() error { for i.heap.Len() > 0 { if err := i.heap.Pop().(EntryIterator).Close(); err != nil { return err @@ -308,35 +309,143 @@ func (i *heapIterator) Close() error { return nil } -func (i *heapIterator) Peek() time.Time { +func (i *mergeEntryIterator) Peek() time.Time { i.prefetch() return i.heap.Peek().Entry().Timestamp } // Len returns the number of inner iterators on the heap, still having entries -func (i *heapIterator) Len() int { +func (i *mergeEntryIterator) Len() int { i.prefetch() return i.heap.Len() } +type entrySortIterator struct { + heap interface { + heap.Interface + Peek() EntryIterator + } + is []EntryIterator + prefetched bool + + currEntry entryWithLabels + errs []error +} + +// NewSortEntryIterator returns a new EntryIterator that sorts entries by timestamp (depending on the direction) the input iterators. +// The iterator only order entries across given `is` iterators, it does not sort entries within individual iterator. +// This means using this iterator with a single iterator will result in the same result as the input iterator. +func NewSortEntryIterator(is []EntryIterator, direction logproto.Direction) EntryIterator { + if len(is) == 0 { + return NoopIterator + } + if len(is) == 1 { + return is[0] + } + result := &entrySortIterator{is: is} + switch direction { + case logproto.BACKWARD: + result.heap = &iteratorMaxHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + case logproto.FORWARD: + result.heap = &iteratorMinHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + default: + panic("bad direction") + } + return result +} + +// init initialize the underlaying heap +func (i *entrySortIterator) init() { + if i.prefetched { + return + } + + i.prefetched = true + for _, it := range i.is { + if it.Next() { + i.heap.Push(it) + continue + } + + if err := it.Error(); err != nil { + i.errs = append(i.errs, err) + } + util.LogError("closing iterator", it.Close) + } + heap.Init(i.heap) + + // We can now clear the list of input iterators to merge, given they have all + // been processed and the non empty ones have been pushed to the heap + i.is = nil +} + +func (i *entrySortIterator) Next() bool { + i.init() + + if i.heap.Len() == 0 { + return false + } + + next := i.heap.Peek() + i.currEntry.entry = next.Entry() + i.currEntry.labels = next.Labels() + // if the top iterator is empty, we remove it. + if !next.Next() { + heap.Pop(i.heap) + if err := next.Error(); err != nil { + i.errs = append(i.errs, err) + } + util.LogError("closing iterator", next.Close) + return true + } + if i.heap.Len() > 1 { + heap.Fix(i.heap, 0) + } + return true +} + +func (i *entrySortIterator) Entry() logproto.Entry { + return i.currEntry.entry +} + +func (i *entrySortIterator) Labels() string { + return i.currEntry.labels +} + +func (i *entrySortIterator) Error() error { + switch len(i.errs) { + case 0: + return nil + case 1: + return i.errs[0] + default: + return util.MultiError(i.errs) + } +} + +func (i *entrySortIterator) Close() error { + for i.heap.Len() > 0 { + if err := i.heap.Pop().(EntryIterator).Close(); err != nil { + return err + } + } + return nil +} + // NewStreamsIterator returns an iterator over logproto.Stream -func NewStreamsIterator(ctx context.Context, streams []logproto.Stream, direction logproto.Direction) EntryIterator { +func NewStreamsIterator(streams []logproto.Stream, direction logproto.Direction) EntryIterator { is := make([]EntryIterator, 0, len(streams)) for i := range streams { is = append(is, NewStreamIterator(streams[i])) } - return NewHeapIterator(ctx, is, direction) + return NewSortEntryIterator(is, direction) } // NewQueryResponseIterator returns an iterator over a QueryResponse. -func NewQueryResponseIterator(ctx context.Context, resp *logproto.QueryResponse, direction logproto.Direction) EntryIterator { - is := make([]EntryIterator, 0, len(resp.Streams)) - for i := range resp.Streams { - is = append(is, NewStreamIterator(resp.Streams[i])) - } - return NewHeapIterator(ctx, is, direction) +func NewQueryResponseIterator(resp *logproto.QueryResponse, direction logproto.Direction) EntryIterator { + return NewStreamsIterator(resp.Streams, direction) } type queryClientIterator struct { @@ -365,7 +474,7 @@ func (i *queryClientIterator) Next() bool { return false } stats.JoinIngesters(ctx, batch.Stats) - i.curr = NewQueryResponseIterator(ctx, batch, i.direction) + i.curr = NewQueryResponseIterator(batch, i.direction) } return true diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 459c39d6b3e7..3a563432d1cc 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -45,7 +45,7 @@ func TestIterator(t *testing.T) { // Test dedupe of overlapping iterators with the heap iterator. { - iterator: NewHeapIterator(context.Background(), []EntryIterator{ + iterator: NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(offset(0, identity), defaultLabels), mkStreamIterator(offset(testSize/2, identity), defaultLabels), mkStreamIterator(offset(testSize, identity), defaultLabels), @@ -57,7 +57,7 @@ func TestIterator(t *testing.T) { // Test dedupe of overlapping iterators with the heap iterator (backward). { - iterator: NewHeapIterator(context.Background(), []EntryIterator{ + iterator: NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(inverse(offset(0, identity)), defaultLabels), mkStreamIterator(inverse(offset(-testSize/2, identity)), defaultLabels), mkStreamIterator(inverse(offset(-testSize, identity)), defaultLabels), @@ -69,7 +69,7 @@ func TestIterator(t *testing.T) { // Test dedupe of entries with the same timestamp but different entries. { - iterator: NewHeapIterator(context.Background(), []EntryIterator{ + iterator: NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(offset(0, constant(0)), defaultLabels), mkStreamIterator(offset(0, constant(0)), defaultLabels), mkStreamIterator(offset(testSize, constant(0)), defaultLabels), @@ -110,7 +110,7 @@ func TestIteratorMultipleLabels(t *testing.T) { }{ // Test merging with differing labels but same timestamps and values. { - iterator: NewHeapIterator(context.Background(), []EntryIterator{ + iterator: NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(identity, "{foobar: \"baz1\"}"), mkStreamIterator(identity, "{foobar: \"baz2\"}"), }, logproto.FORWARD), @@ -128,7 +128,7 @@ func TestIteratorMultipleLabels(t *testing.T) { // Test merging with differing labels but all the same timestamps and different values. { - iterator: NewHeapIterator(context.Background(), []EntryIterator{ + iterator: NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(constant(0), "{foobar: \"baz1\"}"), mkStreamIterator(constant(0), "{foobar: \"baz2\"}"), }, logproto.FORWARD), @@ -158,7 +158,7 @@ func TestIteratorMultipleLabels(t *testing.T) { } } -func TestHeapIteratorPrefetch(t *testing.T) { +func TestMergeIteratorPrefetch(t *testing.T) { t.Parallel() type tester func(t *testing.T, i HeapIterator) @@ -182,7 +182,7 @@ func TestHeapIteratorPrefetch(t *testing.T) { t.Run(testName, func(t *testing.T) { t.Parallel() - i := NewHeapIterator(context.Background(), []EntryIterator{ + i := NewMergeEntryIterator(context.Background(), []EntryIterator{ mkStreamIterator(identity, "{foobar: \"baz1\"}"), mkStreamIterator(identity, "{foobar: \"baz2\"}"), }, logproto.FORWARD) @@ -234,7 +234,7 @@ func inverse(g generator) generator { } } -func TestHeapIteratorDeduplication(t *testing.T) { +func TestMergeIteratorDeduplication(t *testing.T) { foo := logproto.Stream{ Labels: `{app="foo"}`, Entries: []logproto.Entry{ @@ -272,7 +272,7 @@ func TestHeapIteratorDeduplication(t *testing.T) { require.NoError(t, it.Error()) } // forward iteration - it := NewHeapIterator(context.Background(), []EntryIterator{ + it := NewMergeEntryIterator(context.Background(), []EntryIterator{ NewStreamIterator(foo), NewStreamIterator(bar), NewStreamIterator(foo), @@ -284,7 +284,7 @@ func TestHeapIteratorDeduplication(t *testing.T) { assertIt(it, false, len(foo.Entries)) // backward iteration - it = NewHeapIterator(context.Background(), []EntryIterator{ + it = NewMergeEntryIterator(context.Background(), []EntryIterator{ mustReverseStreamIterator(NewStreamIterator(foo)), mustReverseStreamIterator(NewStreamIterator(bar)), mustReverseStreamIterator(NewStreamIterator(foo)), @@ -308,8 +308,8 @@ func TestReverseIterator(t *testing.T) { itr1 := mkStreamIterator(inverse(offset(testSize, identity)), defaultLabels) itr2 := mkStreamIterator(inverse(offset(testSize, identity)), "{foobar: \"bazbar\"}") - heapIterator := NewHeapIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD) - reversedIter, err := NewReversedIter(heapIterator, testSize, false) + mergeIterator := NewMergeEntryIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD) + reversedIter, err := NewReversedIter(mergeIterator, testSize, false) require.NoError(t, err) for i := int64((testSize / 2) + 1); i <= testSize; i++ { @@ -347,8 +347,8 @@ func TestReverseEntryIteratorUnlimited(t *testing.T) { itr1 := mkStreamIterator(offset(testSize, identity), defaultLabels) itr2 := mkStreamIterator(offset(testSize, identity), "{foobar: \"bazbar\"}") - heapIterator := NewHeapIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD) - reversedIter, err := NewReversedIter(heapIterator, 0, false) + mergeIterator := NewMergeEntryIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD) + reversedIter, err := NewReversedIter(mergeIterator, 0, false) require.NoError(t, err) var ct int @@ -546,7 +546,7 @@ func Test_DuplicateCount(t *testing.T) { } { t.Run(test.name, func(t *testing.T) { _, ctx := stats.NewContext(context.Background()) - it := NewHeapIterator(ctx, test.iters, test.direction) + it := NewMergeEntryIterator(ctx, test.iters, test.direction) defer it.Close() for it.Next() { } @@ -636,7 +636,7 @@ func TestNonOverlappingClose(t *testing.T) { require.Equal(t, true, b.closed.Load()) } -func BenchmarkHeapIterator(b *testing.B) { +func BenchmarkSortIterator(b *testing.B) { var ( ctx = context.Background() streams []logproto.Stream @@ -658,18 +658,130 @@ func BenchmarkHeapIterator(b *testing.B) { streams[i], streams[j] = streams[j], streams[i] }) - b.ResetTimer() - for i := 0; i < b.N; i++ { - b.StopTimer() - var itrs []EntryIterator - for i := 0; i < streamsCount; i++ { - itrs = append(itrs, NewStreamIterator(streams[i])) + b.Run("merge sort", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + b.StopTimer() + var itrs []EntryIterator + for i := 0; i < streamsCount; i++ { + itrs = append(itrs, NewStreamIterator(streams[i])) + } + b.StartTimer() + it := NewMergeEntryIterator(ctx, itrs, logproto.BACKWARD) + for it.Next() { + it.Entry() + } + it.Close() } - b.StartTimer() - it := NewHeapIterator(ctx, itrs, logproto.BACKWARD) + }) + + b.Run("sort", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + b.StopTimer() + var itrs []EntryIterator + for i := 0; i < streamsCount; i++ { + itrs = append(itrs, NewStreamIterator(streams[i])) + } + b.StartTimer() + it := NewSortEntryIterator(itrs, logproto.BACKWARD) + for it.Next() { + it.Entry() + } + it.Close() + } + }) +} + +func Test_EntrySortIterator(t *testing.T) { + t.Run("backward", func(t *testing.T) { + t.Parallel() + it := NewSortEntryIterator( + []EntryIterator{ + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 5)}, + {Timestamp: time.Unix(0, 3)}, + {Timestamp: time.Unix(0, 0)}, + }, + Labels: `{foo="bar"}`, + }), + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 4)}, + {Timestamp: time.Unix(0, 2)}, + {Timestamp: time.Unix(0, 1)}, + }, + Labels: `{foo="buzz"}`, + }), + }, logproto.BACKWARD) + var i int64 = 5 + defer it.Close() for it.Next() { - it.Entry() + require.Equal(t, time.Unix(0, i), it.Entry().Timestamp) + i-- } - it.Close() - } + }) + t.Run("forward", func(t *testing.T) { + t.Parallel() + it := NewSortEntryIterator( + []EntryIterator{ + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 0)}, + {Timestamp: time.Unix(0, 3)}, + {Timestamp: time.Unix(0, 5)}, + }, + Labels: `{foo="bar"}`, + }), + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 1)}, + {Timestamp: time.Unix(0, 2)}, + {Timestamp: time.Unix(0, 4)}, + }, + Labels: `{foo="buzz"}`, + }), + }, logproto.FORWARD) + var i int64 + defer it.Close() + for it.Next() { + require.Equal(t, time.Unix(0, i), it.Entry().Timestamp) + i++ + } + }) + t.Run("forward sort by stream", func(t *testing.T) { + t.Parallel() + it := NewSortEntryIterator( + []EntryIterator{ + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 0)}, + {Timestamp: time.Unix(0, 3)}, + {Timestamp: time.Unix(0, 5)}, + }, + Labels: `b`, + }), + NewStreamIterator(logproto.Stream{ + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 0)}, + {Timestamp: time.Unix(0, 1)}, + {Timestamp: time.Unix(0, 2)}, + {Timestamp: time.Unix(0, 4)}, + }, + Labels: `a`, + }), + }, logproto.FORWARD) + // The first entry appears in both so we expect it to be sorted by Labels. + require.True(t, it.Next()) + require.Equal(t, time.Unix(0, 0), it.Entry().Timestamp) + require.Equal(t, `a`, it.Labels()) + + var i int64 + defer it.Close() + for it.Next() { + require.Equal(t, time.Unix(0, i), it.Entry().Timestamp) + i++ + } + }) } diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 6d25610bbea8..70145c23ac99 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -138,24 +138,25 @@ func (h sampleIteratorHeap) Less(i, j int) bool { } } -// heapSampleIterator iterates over a heap of iterators. -type heapSampleIterator struct { +// mergeSampleIterator iterates over a heap of iterators by merging samples. +type mergeSampleIterator struct { heap *sampleIteratorHeap is []SampleIterator prefetched bool stats *stats.Context - tuples []sampletuple - curr logproto.Sample - currLabels string - errs []error + tuples []sampletuple + curr sampleWithLabels + errs []error } -// NewHeapSampleIterator returns a new iterator which uses a heap to merge together -// entries for multiple iterators. -func NewHeapSampleIterator(ctx context.Context, is []SampleIterator) SampleIterator { +// NewMergeSampleIterator returns a new iterator which uses a heap to merge together samples for multiple iterators and deduplicate if any. +// The iterator only order and merge entries across given `is` iterators, it does not merge entries within individual iterator. +// This means using this iterator with a single iterator will result in the same result as the input iterator. +// If you don't need to deduplicate sample, use `NewSortSampleIterator` instead. +func NewMergeSampleIterator(ctx context.Context, is []SampleIterator) SampleIterator { h := sampleIteratorHeap(make([]SampleIterator, 0, len(is))) - return &heapSampleIterator{ + return &mergeSampleIterator{ stats: stats.FromContext(ctx), is: is, heap: &h, @@ -166,7 +167,7 @@ func NewHeapSampleIterator(ctx context.Context, is []SampleIterator) SampleItera // prefetch iterates over all inner iterators to merge together, calls Next() on // each of them to prefetch the first entry and pushes of them - who are not // empty - to the heap -func (i *heapSampleIterator) prefetch() { +func (i *mergeSampleIterator) prefetch() { if i.prefetched { return } @@ -187,7 +188,7 @@ func (i *heapSampleIterator) prefetch() { // // If the iterator has no more entries or an error occur while advancing it, the iterator // is not pushed to the heap and any possible error captured, so that can be get via Error(). -func (i *heapSampleIterator) requeue(ei SampleIterator, advanced bool) { +func (i *mergeSampleIterator) requeue(ei SampleIterator, advanced bool) { if advanced || ei.Next() { heap.Push(i.heap, ei) return @@ -204,7 +205,7 @@ type sampletuple struct { SampleIterator } -func (i *heapSampleIterator) Next() bool { +func (i *mergeSampleIterator) Next() bool { i.prefetch() if i.heap.Len() == 0 { @@ -213,8 +214,8 @@ func (i *heapSampleIterator) Next() bool { // shortcut for the last iterator. if i.heap.Len() == 1 { - i.curr = i.heap.Peek().Sample() - i.currLabels = i.heap.Peek().Labels() + i.curr.Sample = i.heap.Peek().Sample() + i.curr.labels = i.heap.Peek().Labels() if !i.heap.Peek().Next() { i.heap.Pop() } @@ -239,8 +240,8 @@ func (i *heapSampleIterator) Next() bool { }) } - i.curr = i.tuples[0].Sample - i.currLabels = i.tuples[0].Labels() + i.curr.Sample = i.tuples[0].Sample + i.curr.labels = i.tuples[0].Labels() t := i.tuples[0] if len(i.tuples) == 1 { i.requeue(i.tuples[0].SampleIterator, false) @@ -263,15 +264,15 @@ func (i *heapSampleIterator) Next() bool { return true } -func (i *heapSampleIterator) Sample() logproto.Sample { - return i.curr +func (i *mergeSampleIterator) Sample() logproto.Sample { + return i.curr.Sample } -func (i *heapSampleIterator) Labels() string { - return i.currLabels +func (i *mergeSampleIterator) Labels() string { + return i.curr.labels } -func (i *heapSampleIterator) Error() error { +func (i *mergeSampleIterator) Error() error { switch len(i.errs) { case 0: return nil @@ -282,7 +283,7 @@ func (i *heapSampleIterator) Error() error { } } -func (i *heapSampleIterator) Close() error { +func (i *mergeSampleIterator) Close() error { for i.heap.Len() > 0 { if err := i.heap.Pop().(SampleIterator).Close(); err != nil { return err @@ -292,6 +293,111 @@ func (i *heapSampleIterator) Close() error { return nil } +// sortSampleIterator iterates over a heap of iterators by sorting samples. +type sortSampleIterator struct { + heap *sampleIteratorHeap + is []SampleIterator + prefetched bool + + curr sampleWithLabels + errs []error +} + +// NewSortSampleIterator returns a new SampleIterator that sorts samples by ascending timestamp the input iterators. +// The iterator only order sample across given `is` iterators, it does not sort samples within individual iterator. +// This means using this iterator with a single iterator will result in the same result as the input iterator. +func NewSortSampleIterator(is []SampleIterator) SampleIterator { + if len(is) == 0 { + return NoopIterator + } + if len(is) == 1 { + return is[0] + } + h := sampleIteratorHeap(make([]SampleIterator, 0, len(is))) + return &sortSampleIterator{ + is: is, + heap: &h, + } +} + +// init initialize the underlaying heap +func (i *sortSampleIterator) init() { + if i.prefetched { + return + } + + i.prefetched = true + for _, it := range i.is { + if it.Next() { + i.heap.Push(it) + continue + } + + if err := it.Error(); err != nil { + i.errs = append(i.errs, err) + } + util.LogError("closing iterator", it.Close) + } + heap.Init(i.heap) + + // We can now clear the list of input iterators to merge, given they have all + // been processed and the non empty ones have been pushed to the heap + i.is = nil +} + +func (i *sortSampleIterator) Next() bool { + i.init() + + if i.heap.Len() == 0 { + return false + } + + next := i.heap.Peek() + i.curr.Sample = next.Sample() + i.curr.labels = next.Labels() + // if the top iterator is empty, we remove it. + if !next.Next() { + heap.Pop(i.heap) + if err := next.Error(); err != nil { + i.errs = append(i.errs, err) + } + util.LogError("closing iterator", next.Close) + return true + } + if i.heap.Len() > 1 { + heap.Fix(i.heap, 0) + } + return true +} + +func (i *sortSampleIterator) Sample() logproto.Sample { + return i.curr.Sample +} + +func (i *sortSampleIterator) Labels() string { + return i.curr.labels +} + +func (i *sortSampleIterator) Error() error { + switch len(i.errs) { + case 0: + return nil + case 1: + return i.errs[0] + default: + return util.MultiError(i.errs) + } +} + +func (i *sortSampleIterator) Close() error { + for i.heap.Len() > 0 { + if err := i.heap.Pop().(SampleIterator).Close(); err != nil { + return err + } + } + return nil +} + type sampleQueryClientIterator struct { client QuerySampleClient err error @@ -323,7 +429,7 @@ func (i *sampleQueryClientIterator) Next() bool { return false } stats.JoinIngesters(ctx, batch.Stats) - i.curr = NewSampleQueryResponseIterator(ctx, batch) + i.curr = NewSampleQueryResponseIterator(batch) } return true } @@ -345,8 +451,8 @@ func (i *sampleQueryClientIterator) Close() error { } // NewSampleQueryResponseIterator returns an iterator over a SampleQueryResponse. -func NewSampleQueryResponseIterator(ctx context.Context, resp *logproto.SampleQueryResponse) SampleIterator { - return NewMultiSeriesIterator(ctx, resp.Series) +func NewSampleQueryResponseIterator(resp *logproto.SampleQueryResponse) SampleIterator { + return NewMultiSeriesIterator(resp.Series) } type seriesIterator struct { @@ -386,12 +492,12 @@ func SampleIteratorWithClose(it SampleIterator, closeFn func() error) SampleIter } // NewMultiSeriesIterator returns an iterator over multiple logproto.Series -func NewMultiSeriesIterator(ctx context.Context, series []logproto.Series) SampleIterator { +func NewMultiSeriesIterator(series []logproto.Series) SampleIterator { is := make([]SampleIterator, 0, len(series)) for i := range series { is = append(is, NewSeriesIterator(series[i])) } - return NewHeapSampleIterator(ctx, is) + return NewSortSampleIterator(is) } // NewSeriesIterator iterates over sample in a series. diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index ed19919e3ff0..b1952f6376a9 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -104,8 +104,8 @@ var carSeries = logproto.Series{ }, } -func TestNewHeapSampleIterator(t *testing.T) { - it := NewHeapSampleIterator(context.Background(), +func TestNewMergeSampleIterator(t *testing.T) { + it := NewMergeSampleIterator(context.Background(), []SampleIterator{ NewSeriesIterator(varSeries), NewSeriesIterator(carSeries), @@ -194,7 +194,7 @@ func TestReadSampleBatch(t *testing.T) { require.Equal(t, uint32(1), size) require.NoError(t, err) - res, size, err = ReadSampleBatch(NewMultiSeriesIterator(context.Background(), []logproto.Series{carSeries, varSeries}), 100) + res, size, err = ReadSampleBatch(NewMultiSeriesIterator([]logproto.Series{carSeries, varSeries}), 100) require.ElementsMatch(t, []logproto.Series{carSeries, varSeries}, res.Series) require.Equal(t, uint32(6), size) require.NoError(t, err) @@ -277,7 +277,7 @@ func TestSampleIteratorWithClose_ReturnsError(t *testing.T) { assert.Equal(t, err, err2) } -func BenchmarkHeapSampleIterator(b *testing.B) { +func BenchmarkSortSampleIterator(b *testing.B) { var ( ctx = context.Background() series []logproto.Series @@ -299,18 +299,102 @@ func BenchmarkHeapSampleIterator(b *testing.B) { series[i], series[j] = series[j], series[i] }) - b.ResetTimer() - for i := 0; i < b.N; i++ { - b.StopTimer() - var itrs []SampleIterator - for i := 0; i < seriesCount; i++ { - itrs = append(itrs, NewSeriesIterator(series[i])) + b.Run("merge", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + b.StopTimer() + var itrs []SampleIterator + for i := 0; i < seriesCount; i++ { + itrs = append(itrs, NewSeriesIterator(series[i])) + } + b.StartTimer() + it := NewMergeSampleIterator(ctx, itrs) + for it.Next() { + it.Sample() + } + it.Close() } - b.StartTimer() - it := NewHeapSampleIterator(ctx, itrs) + }) + b.Run("sort", func(b *testing.B) { + b.ResetTimer() + for i := 0; i < b.N; i++ { + b.StopTimer() + var itrs []SampleIterator + for i := 0; i < seriesCount; i++ { + itrs = append(itrs, NewSeriesIterator(series[i])) + } + b.StartTimer() + it := NewSortSampleIterator(itrs) + for it.Next() { + it.Sample() + } + it.Close() + } + }) +} + +func Test_SampleSortIterator(t *testing.T) { + t.Run("forward", func(t *testing.T) { + t.Parallel() + it := NewSortSampleIterator( + []SampleIterator{ + NewSeriesIterator(logproto.Series{ + Samples: []logproto.Sample{ + {Timestamp: 0}, + {Timestamp: 3}, + {Timestamp: 5}, + }, + Labels: `{foo="bar"}`, + }), + NewSeriesIterator(logproto.Series{ + Samples: []logproto.Sample{ + {Timestamp: 1}, + {Timestamp: 2}, + {Timestamp: 4}, + }, + Labels: `{foo="bar"}`, + }), + }) + var i int64 + defer it.Close() for it.Next() { - it.Sample() + require.Equal(t, i, it.Sample().Timestamp) + i++ } - it.Close() - } + }) + t.Run("forward sort by stream", func(t *testing.T) { + t.Parallel() + it := NewSortSampleIterator( + []SampleIterator{ + NewSeriesIterator(logproto.Series{ + Samples: []logproto.Sample{ + {Timestamp: 0}, + {Timestamp: 3}, + {Timestamp: 5}, + }, + Labels: `b`, + }), + NewSeriesIterator(logproto.Series{ + Samples: []logproto.Sample{ + {Timestamp: 0}, + {Timestamp: 1}, + {Timestamp: 2}, + {Timestamp: 4}, + }, + Labels: `a`, + }), + }) + + // The first entry appears in both so we expect it to be sorted by Labels. + require.True(t, it.Next()) + require.Equal(t, int64(0), it.Sample().Timestamp) + require.Equal(t, `a`, it.Labels()) + + var i int64 + defer it.Close() + for it.Next() { + require.Equal(t, i, it.Sample().Timestamp) + i++ + } + }) } diff --git a/pkg/logcli/client/file.go b/pkg/logcli/client/file.go index 8debdf58e16e..a12c0981e77f 100644 --- a/pkg/logcli/client/file.go +++ b/pkg/logcli/client/file.go @@ -32,9 +32,7 @@ const ( defaultMaxFileSize = 20 * (1 << 20) // 20MB ) -var ( - ErrNotSupported = errors.New("not supported") -) +var ErrNotSupported = errors.New("not supported") // FileClient is a type of LogCLI client that do LogQL on log lines from // the given file directly, instead get log lines from Loki servers. @@ -63,7 +61,6 @@ func NewFileClient(r io.ReadCloser) *FileClient { labels: []string{defaultLabelKey}, labelValues: []string{defaultLabelValue}, } - } func (f *FileClient) Query(q string, limit int, t time.Time, direction logproto.Direction, quiet bool) (*loghttp.QueryResponse, error) { @@ -278,7 +275,6 @@ func newFileIterator( } return iter.NewStreamsIterator( - ctx, streamResult, params.Direction, ), nil diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 544bcac4cde2..5b6935af11fb 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -1340,7 +1340,7 @@ func TestEngine_RangeQuery(t *testing.T) { `topk(1,rate(({app=~"foo|bar"} |~".+bar")[1m])) by (app)`, time.Unix(60, 0), time.Unix(180, 0), 30 * time.Second, 0, logproto.FORWARD, 100, [][]logproto.Series{ { - newSeries(testSize, factor(10, identity), `{app="foo"}`), newSeries(testSize, factor(15, identity), `{app="fuzz"}`), + newSeries(testSize, factor(10, identity), `{app="foo"}`), newSeries(testSize, factor(5, identity), `{app="fuzz"}`), newSeries(testSize, identity, `{app="buzz"}`), }, }, @@ -2085,11 +2085,11 @@ type errorIteratorQuerier struct { } func (e errorIteratorQuerier) SelectLogs(ctx context.Context, p SelectLogParams) (iter.EntryIterator, error) { - return iter.NewHeapIterator(ctx, e.entries, p.Direction), nil + return iter.NewSortEntryIterator(e.entries, p.Direction), nil } func (e errorIteratorQuerier) SelectSamples(ctx context.Context, p SelectSampleParams) (iter.SampleIterator, error) { - return iter.NewHeapSampleIterator(ctx, e.samples), nil + return iter.NewSortSampleIterator(e.samples), nil } func TestStepEvaluator_Error(t *testing.T) { @@ -2269,11 +2269,6 @@ func getLocalQuerier(size int64) Querier { newSeries(size, identity, `{app="bar",bar="foo"}`), newSeries(size, identity, `{app="bar",bar="bazz"}`), newSeries(size, identity, `{app="bar",bar="fuzz"}`), - // some duplicates - newSeries(size, identity, `{app="foo"}`), - newSeries(size, identity, `{app="bar"}`), - newSeries(size, identity, `{app="bar",bar="bazz"}`), - newSeries(size, identity, `{app="bar"}`), }, }, streams: map[string][]logproto.Stream{ @@ -2286,11 +2281,6 @@ func getLocalQuerier(size int64) Querier { newStream(size, identity, `{app="bar",bar="foo"}`), newStream(size, identity, `{app="bar",bar="bazz"}`), newStream(size, identity, `{app="bar",bar="fuzz"}`), - // some duplicates - newStream(size, identity, `{app="foo"}`), - newStream(size, identity, `{app="bar"}`), - newStream(size, identity, `{app="bar",bar="bazz"}`), - newStream(size, identity, `{app="bar"}`), }, }, } @@ -2331,7 +2321,7 @@ func newQuerierRecorder(t *testing.T, data interface{}, params interface{}) *que func (q *querierRecorder) SelectLogs(ctx context.Context, p SelectLogParams) (iter.EntryIterator, error) { if !q.match { for _, s := range q.streams { - return iter.NewStreamsIterator(ctx, s, p.Direction), nil + return iter.NewStreamsIterator(s, p.Direction), nil } } recordID := paramsID(p) @@ -2339,17 +2329,13 @@ func (q *querierRecorder) SelectLogs(ctx context.Context, p SelectLogParams) (it if !ok { return nil, fmt.Errorf("no streams found for id: %s has: %+v", recordID, q.streams) } - iters := make([]iter.EntryIterator, 0, len(streams)) - for _, s := range streams { - iters = append(iters, iter.NewStreamIterator(s)) - } - return iter.NewHeapIterator(ctx, iters, p.Direction), nil + return iter.NewStreamsIterator(streams, p.Direction), nil } func (q *querierRecorder) SelectSamples(ctx context.Context, p SelectSampleParams) (iter.SampleIterator, error) { if !q.match { for _, s := range q.series { - return iter.NewMultiSeriesIterator(ctx, s), nil + return iter.NewMultiSeriesIterator(s), nil } } recordID := paramsID(p) @@ -2360,11 +2346,7 @@ func (q *querierRecorder) SelectSamples(ctx context.Context, p SelectSampleParam if !ok { return nil, fmt.Errorf("no series found for id: %s has: %+v", recordID, q.series) } - iters := make([]iter.SampleIterator, 0, len(series)) - for _, s := range series { - iters = append(iters, iter.NewSeriesIterator(s)) - } - return iter.NewHeapSampleIterator(ctx, iters), nil + return iter.NewMultiSeriesIterator(series), nil } func paramsID(p interface{}) string { diff --git a/pkg/logql/range_vector_test.go b/pkg/logql/range_vector_test.go index fca777db876e..74583781c9a4 100644 --- a/pkg/logql/range_vector_test.go +++ b/pkg/logql/range_vector_test.go @@ -34,7 +34,7 @@ var ( ) func newSampleIterator() iter.SampleIterator { - return iter.NewHeapSampleIterator(context.Background(), []iter.SampleIterator{ + return iter.NewSortSampleIterator([]iter.SampleIterator{ iter.NewSeriesIterator(logproto.Series{ Labels: labelFoo.String(), Samples: samples, diff --git a/pkg/logql/sharding.go b/pkg/logql/sharding.go index aca924b9a953..0e4ad4f72f30 100644 --- a/pkg/logql/sharding.go +++ b/pkg/logql/sharding.go @@ -324,7 +324,7 @@ func (ev *DownstreamEvaluator) Iterator( xs = append(xs, iter) } - return iter.NewHeapIterator(ctx, xs, params.Direction()), nil + return iter.NewSortEntryIterator(xs, params.Direction()), nil default: return nil, EvaluatorUnsupportedType(expr, ev) @@ -401,5 +401,5 @@ func ResultIterator(res logqlmodel.Result, params Params) (iter.EntryIterator, e if !ok { return nil, fmt.Errorf("unexpected type (%s) for ResultIterator; expected %s", res.Data.Type(), logqlmodel.ValueTypeStreams) } - return iter.NewStreamsIterator(context.Background(), streams, params.Direction()), nil + return iter.NewStreamsIterator(streams, params.Direction()), nil } diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index e5a0e7f57fbd..6f0cd94f1360 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -90,7 +90,7 @@ outer: } } - return iter.NewHeapIterator(ctx, streamIters, req.Direction), nil + return iter.NewSortEntryIterator(streamIters, req.Direction), nil } func processStream(in []logproto.Stream, pipeline log.Pipeline) []logproto.Stream { @@ -200,7 +200,7 @@ outer: filtered := processSeries(matched, extractor) return iter.NewTimeRangedSampleIterator( - iter.NewMultiSeriesIterator(ctx, filtered), + iter.NewMultiSeriesIterator(filtered), req.Start.UnixNano(), req.End.UnixNano()+1, ), nil diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index ee066c556ec9..052c192b5130 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -142,8 +142,10 @@ func (q *Querier) SelectLogs(ctx context.Context, params logql.SelectLogParams) iters = append(iters, storeIter) } - - return iter.NewHeapIterator(ctx, iters, params.Direction), nil + if len(iters) == 1 { + return iters[0], nil + } + return iter.NewMergeEntryIterator(ctx, iters, params.Direction), nil } func (q *Querier) SelectSamples(ctx context.Context, params logql.SelectSampleParams) (iter.SampleIterator, error) { @@ -185,7 +187,7 @@ func (q *Querier) SelectSamples(ctx context.Context, params logql.SelectSamplePa iters = append(iters, storeIter) } - return iter.NewHeapSampleIterator(ctx, iters), nil + return iter.NewMergeSampleIterator(ctx, iters), nil } func (q *Querier) buildQueryIntervals(queryStart, queryEnd time.Time) (*interval, *interval) { diff --git a/pkg/querier/tail.go b/pkg/querier/tail.go index 93938c44ec4a..a6454c05354c 100644 --- a/pkg/querier/tail.go +++ b/pkg/querier/tail.go @@ -272,7 +272,7 @@ func newTailer( waitEntryThrottle time.Duration, ) *Tailer { t := Tailer{ - openStreamIterator: iter.NewHeapIterator(context.Background(), []iter.EntryIterator{historicEntries}, logproto.FORWARD), + openStreamIterator: iter.NewMergeEntryIterator(context.Background(), []iter.EntryIterator{historicEntries}, logproto.FORWARD), querierTailClients: querierTailClients, delayFor: delayFor, responseChan: make(chan *loghttp.TailResponse, maxBufferedTailResponses), diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index e3778ba9db33..ddc8f5fa8040 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -396,8 +396,10 @@ func (it *logBatchIterator) newChunksIterator(b *chunkBatch) (iter.EntryIterator if err != nil { return nil, err } - - return iter.NewHeapIterator(it.ctx, iters, it.direction), nil + if len(iters) == 1 { + return iters[0], nil + } + return iter.NewSortEntryIterator(iters, it.direction), nil } func (it *logBatchIterator) buildIterators(chks map[model.Fingerprint][][]*LazyChunk, from, through time.Time, nextChunk *LazyChunk) ([]iter.EntryIterator, error) { @@ -440,7 +442,7 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through result = append(result, iter.NewNonOverlappingIterator(iterators, "")) } - return iter.NewHeapIterator(it.ctx, result, it.direction), nil + return iter.NewMergeEntryIterator(it.ctx, result, it.direction), nil } type sampleBatchIterator struct { @@ -537,7 +539,7 @@ func (it *sampleBatchIterator) newChunksIterator(b *chunkBatch) (iter.SampleIter return nil, err } - return iter.NewHeapSampleIterator(it.ctx, iters), nil + return iter.NewSortSampleIterator(iters), nil } func (it *sampleBatchIterator) buildIterators(chks map[model.Fingerprint][][]*LazyChunk, from, through time.Time, nextChunk *LazyChunk) ([]iter.SampleIterator, error) { @@ -574,7 +576,7 @@ func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, thro result = append(result, iter.NewNonOverlappingSampleIterator(iterators, "")) } - return iter.NewHeapSampleIterator(it.ctx, result), nil + return iter.NewMergeSampleIterator(it.ctx, result), nil } func removeMatchersByName(matchers []*labels.Matcher, names ...string) []*labels.Matcher { From 450e002399a9182d8b795d8677326dca2a702318 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 31 Jan 2022 17:17:45 +0100 Subject: [PATCH 09/14] lint Signed-off-by: Cyril Tovena --- pkg/logcli/client/file.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pkg/logcli/client/file.go b/pkg/logcli/client/file.go index a12c0981e77f..f8493d78dbaf 100644 --- a/pkg/logcli/client/file.go +++ b/pkg/logcli/client/file.go @@ -195,7 +195,7 @@ type querier struct { labels labels.Labels } -func (q *querier) SelectLogs(ctx context.Context, params logql.SelectLogParams) (iter.EntryIterator, error) { +func (q *querier) SelectLogs(_ context.Context, params logql.SelectLogParams) (iter.EntryIterator, error) { expr, err := params.LogSelector() if err != nil { return nil, fmt.Errorf("failed to extract selector for logs: %w", err) @@ -204,7 +204,7 @@ func (q *querier) SelectLogs(ctx context.Context, params logql.SelectLogParams) if err != nil { return nil, fmt.Errorf("failed to extract pipeline for logs: %w", err) } - return newFileIterator(ctx, q.r, params, pipeline.ForStream(q.labels)) + return newFileIterator(q.r, params, pipeline.ForStream(q.labels)) } func (q *querier) SelectSamples(ctx context.Context, params logql.SelectSampleParams) (iter.SampleIterator, error) { @@ -212,7 +212,6 @@ func (q *querier) SelectSamples(ctx context.Context, params logql.SelectSamplePa } func newFileIterator( - ctx context.Context, r io.Reader, params logql.SelectLogParams, pipeline logqllog.StreamPipeline, From 8fb36e32463873c789e76addeca34f9e3550ab25 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 31 Jan 2022 21:37:37 +0100 Subject: [PATCH 10/14] Order alphabetically when ordering samples. Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator.go | 53 ++++++++++++------------------------- pkg/iter/sample_iterator.go | 45 ++++++++++++++++++------------- pkg/logql/engine_test.go | 6 ++--- 3 files changed, 45 insertions(+), 59 deletions(-) diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index e14c8478ef08..8907ab5f4ff8 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -72,44 +72,24 @@ func (h *iteratorHeap) Pop() interface{} { return x } -type iteratorMinHeap struct { +type iteratorSortHeap struct { iteratorHeap + byAlphabetical bool + ascendingTime bool } -func (h iteratorMinHeap) Less(i, j int) bool { - t1, t2 := h.iteratorHeap[i].Entry().Timestamp, h.iteratorHeap[j].Entry().Timestamp - - un1 := t1.UnixNano() - un2 := t2.UnixNano() - - switch { - case un1 < un2: - return true - case un1 > un2: - return false - default: // un1 == un2: +func (h iteratorSortHeap) Less(i, j int) bool { + t1, t2 := h.iteratorHeap[i].Entry().Timestamp.UnixNano(), h.iteratorHeap[j].Entry().Timestamp.UnixNano() + if t1 == t2 { + if h.byAlphabetical { + return h.iteratorHeap[i].Labels() < h.iteratorHeap[j].Labels() + } return h.iteratorHeap[i].StreamHash() < h.iteratorHeap[j].StreamHash() } -} - -type iteratorMaxHeap struct { - iteratorHeap -} - -func (h iteratorMaxHeap) Less(i, j int) bool { - t1, t2 := h.iteratorHeap[i].Entry().Timestamp, h.iteratorHeap[j].Entry().Timestamp - - un1 := t1.UnixNano() - un2 := t2.UnixNano() - - switch { - case un1 < un2: - return false - case un1 > un2: - return true - default: // un1 == un2 - return h.iteratorHeap[i].StreamHash() < h.iteratorHeap[j].StreamHash() + if h.ascendingTime { + return t1 < t2 } + return t1 > t2 } // HeapIterator iterates over a heap of iterators with ability to push new iterators and get some properties like time of entry at peek and len @@ -144,9 +124,9 @@ func NewMergeEntryIterator(ctx context.Context, is []EntryIterator, direction lo result := &mergeEntryIterator{is: is, stats: stats.FromContext(ctx)} switch direction { case logproto.BACKWARD: - result.heap = &iteratorMaxHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: false} case logproto.FORWARD: - result.heap = &iteratorMinHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: true} default: panic("bad direction") } @@ -329,6 +309,7 @@ type entrySortIterator struct { // NewSortEntryIterator returns a new EntryIterator that sorts entries by timestamp (depending on the direction) the input iterators. // The iterator only order entries across given `is` iterators, it does not sort entries within individual iterator. // This means using this iterator with a single iterator will result in the same result as the input iterator. +// When timestamp is equal, the iterator sorts samples by their label alphabetically. func NewSortEntryIterator(is []EntryIterator, direction logproto.Direction) EntryIterator { if len(is) == 0 { return NoopIterator @@ -339,9 +320,9 @@ func NewSortEntryIterator(is []EntryIterator, direction logproto.Direction) Entr result := &entrySortIterator{is: is} switch direction { case logproto.BACKWARD: - result.heap = &iteratorMaxHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: false, byAlphabetical: true} case logproto.FORWARD: - result.heap = &iteratorMinHeap{iteratorHeap: make([]EntryIterator, 0, len(is))} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: true, byAlphabetical: true} default: panic("bad direction") } diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 3855aaa0fd5c..f21ef5cd21b1 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -117,33 +117,34 @@ func (it *peekingSampleIterator) Error() error { return it.iter.Error() } -type sampleIteratorHeap []SampleIterator +type sampleIteratorHeap struct { + its []SampleIterator + byAlphabetical bool +} -func (h sampleIteratorHeap) Len() int { return len(h) } -func (h sampleIteratorHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] } -func (h sampleIteratorHeap) Peek() SampleIterator { return h[0] } +func (h sampleIteratorHeap) Len() int { return len(h.its) } +func (h sampleIteratorHeap) Swap(i, j int) { h.its[i], h.its[j] = h.its[j], h.its[i] } +func (h sampleIteratorHeap) Peek() SampleIterator { return h.its[0] } func (h *sampleIteratorHeap) Push(x interface{}) { - *h = append(*h, x.(SampleIterator)) + h.its = append(h.its, x.(SampleIterator)) } func (h *sampleIteratorHeap) Pop() interface{} { - old := *h - n := len(old) - x := old[n-1] - *h = old[0 : n-1] + n := len(h.its) + x := h.its[n-1] + h.its = h.its[0 : n-1] return x } func (h sampleIteratorHeap) Less(i, j int) bool { - s1, s2 := h[i].Sample(), h[j].Sample() - switch { - case s1.Timestamp < s2.Timestamp: - return true - case s1.Timestamp > s2.Timestamp: - return false - default: - return h[i].StreamHash() < h[j].StreamHash() + s1, s2 := h.its[i].Sample(), h.its[j].Sample() + if s1.Timestamp == s2.Timestamp { + if h.byAlphabetical { + return h.its[i].Labels() < h.its[j].Labels() + } + return h.its[i].StreamHash() < h.its[j].StreamHash() } + return s1.Timestamp < s2.Timestamp } // mergeSampleIterator iterates over a heap of iterators by merging samples. @@ -163,7 +164,9 @@ type mergeSampleIterator struct { // This means using this iterator with a single iterator will result in the same result as the input iterator. // If you don't need to deduplicate sample, use `NewSortSampleIterator` instead. func NewMergeSampleIterator(ctx context.Context, is []SampleIterator) SampleIterator { - h := sampleIteratorHeap(make([]SampleIterator, 0, len(is))) + h := sampleIteratorHeap{ + its: make([]SampleIterator, 0, len(is)), + } return &mergeSampleIterator{ stats: stats.FromContext(ctx), is: is, @@ -320,6 +323,7 @@ type sortSampleIterator struct { // NewSortSampleIterator returns a new SampleIterator that sorts samples by ascending timestamp the input iterators. // The iterator only order sample across given `is` iterators, it does not sort samples within individual iterator. // This means using this iterator with a single iterator will result in the same result as the input iterator. +// When timestamp is equal, the iterator sorts samples by their label alphabetically. func NewSortSampleIterator(is []SampleIterator) SampleIterator { if len(is) == 0 { return NoopIterator @@ -327,7 +331,10 @@ func NewSortSampleIterator(is []SampleIterator) SampleIterator { if len(is) == 1 { return is[0] } - h := sampleIteratorHeap(make([]SampleIterator, 0, len(is))) + h := sampleIteratorHeap{ + its: make([]SampleIterator, 0, len(is)), + byAlphabetical: true, + } return &sortSampleIterator{ is: is, heap: &h, diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 617978631ff5..67a21f15fe79 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -2377,7 +2377,6 @@ func newStream(n int64, f generator, lbsString string) logproto.Stream { return logproto.Stream{ Entries: entries, Labels: labels.String(), - Hash: labels.Hash(), } } @@ -2391,9 +2390,8 @@ func newSeries(n int64, f generator, lbsString string) logproto.Series { samples = append(samples, f(i).Sample) } return logproto.Series{ - Samples: samples, - Labels: labels.String(), - StreamHash: labels.Hash(), + Samples: samples, + Labels: labels.String(), } } From 3aafc9426db116edf9aa11feb2d83f3a1c1e7ecd Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 1 Feb 2022 10:23:09 +0100 Subject: [PATCH 11/14] Do not JSON encode the hash for legacy model. Signed-off-by: Cyril Tovena --- pkg/logproto/logproto.pb.go | 184 ++++++++++++++++++------------------ pkg/logproto/logproto.proto | 2 +- pkg/logproto/types.go | 2 +- 3 files changed, 94 insertions(+), 94 deletions(-) diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index ea26da9996f4..e520c5b21026 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -482,7 +482,7 @@ type StreamAdapter struct { Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` Entries []EntryAdapter `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries"` // hash contains the original hash of the stream. - Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash"` + Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"-"` } func (m *StreamAdapter) Reset() { *m = StreamAdapter{} } @@ -1440,97 +1440,97 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 1427 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x49, 0x8f, 0x13, 0xc7, - 0x17, 0x77, 0x79, 0x69, 0xdb, 0xcf, 0x0b, 0x56, 0xcd, 0x30, 0xe3, 0xbf, 0x81, 0xb6, 0xd5, 0x42, - 0x60, 0xfd, 0x21, 0x76, 0x98, 0x6c, 0x2c, 0x59, 0x34, 0x66, 0x42, 0x18, 0x82, 0x02, 0x34, 0x48, - 0x48, 0x48, 0x11, 0xea, 0xb1, 0x6b, 0xec, 0xd6, 0xd8, 0x6e, 0xd3, 0x55, 0x46, 0x1a, 0x29, 0x52, - 0xf2, 0x01, 0x12, 0x89, 0x43, 0xa4, 0x28, 0xca, 0x35, 0x87, 0x28, 0x87, 0x7c, 0x0e, 0x72, 0x43, - 0x39, 0xa1, 0x1c, 0x9c, 0x60, 0x2e, 0xd1, 0x28, 0x07, 0x3e, 0x42, 0x54, 0x4b, 0x77, 0x97, 0xcd, - 0x4c, 0xc0, 0x73, 0xc9, 0xa5, 0x5d, 0xaf, 0xea, 0xd5, 0x5b, 0x7e, 0xef, 0x57, 0xaf, 0xca, 0x70, - 0x6c, 0xb4, 0xd3, 0x6d, 0xf6, 0xbd, 0xee, 0xc8, 0xf7, 0x98, 0x17, 0x0e, 0x1a, 0xe2, 0x8b, 0x33, - 0x81, 0x5c, 0xa9, 0x76, 0x3d, 0xaf, 0xdb, 0x27, 0x4d, 0x21, 0x6d, 0x8d, 0xb7, 0x9b, 0xcc, 0x1d, - 0x10, 0xca, 0x9c, 0xc1, 0x48, 0xaa, 0x56, 0xde, 0xe8, 0xba, 0xac, 0x37, 0xde, 0x6a, 0xb4, 0xbd, - 0x41, 0xb3, 0xeb, 0x75, 0xbd, 0x48, 0x93, 0x4b, 0xd2, 0x3a, 0x1f, 0x29, 0xf5, 0x9a, 0x72, 0xfb, - 0xa0, 0x3f, 0xf0, 0x3a, 0xa4, 0xdf, 0xa4, 0xcc, 0x61, 0x54, 0x7e, 0xa5, 0x86, 0x75, 0x17, 0x72, - 0x37, 0xc7, 0xb4, 0x67, 0x93, 0x07, 0x63, 0x42, 0x19, 0xbe, 0x0a, 0x69, 0xca, 0x7c, 0xe2, 0x0c, - 0x68, 0x19, 0xd5, 0x12, 0xf5, 0xdc, 0xda, 0x6a, 0x23, 0x0c, 0xf6, 0xb6, 0x58, 0x58, 0xef, 0x38, - 0x23, 0x46, 0xfc, 0xd6, 0xd1, 0xdf, 0x27, 0x55, 0x43, 0x4e, 0xed, 0x4d, 0xaa, 0xc1, 0x2e, 0x3b, - 0x18, 0x58, 0x45, 0xc8, 0x4b, 0xc3, 0x74, 0xe4, 0x0d, 0x29, 0xb1, 0x7e, 0x88, 0x43, 0xfe, 0xd6, - 0x98, 0xf8, 0xbb, 0x81, 0xab, 0x0a, 0x64, 0x28, 0xe9, 0x93, 0x36, 0xf3, 0xfc, 0x32, 0xaa, 0xa1, - 0x7a, 0xd6, 0x0e, 0x65, 0xbc, 0x0c, 0xa9, 0xbe, 0x3b, 0x70, 0x59, 0x39, 0x5e, 0x43, 0xf5, 0x82, - 0x2d, 0x05, 0x7c, 0x11, 0x52, 0x94, 0x39, 0x3e, 0x2b, 0x27, 0x6a, 0xa8, 0x9e, 0x5b, 0xab, 0x34, - 0x24, 0x5a, 0x8d, 0x00, 0x83, 0xc6, 0x9d, 0x00, 0xad, 0x56, 0xe6, 0xf1, 0xa4, 0x1a, 0x7b, 0xf4, - 0x47, 0x15, 0xd9, 0x72, 0x0b, 0x7e, 0x17, 0x12, 0x64, 0xd8, 0x29, 0x27, 0x17, 0xd8, 0xc9, 0x37, - 0xe0, 0x73, 0x90, 0xed, 0xb8, 0x3e, 0x69, 0x33, 0xd7, 0x1b, 0x96, 0x53, 0x35, 0x54, 0x2f, 0xae, - 0x2d, 0x45, 0x90, 0x6c, 0x04, 0x4b, 0x76, 0xa4, 0x85, 0xcf, 0x82, 0x41, 0x7b, 0x8e, 0xdf, 0xa1, - 0xe5, 0x74, 0x2d, 0x51, 0xcf, 0xb6, 0x96, 0xf7, 0x26, 0xd5, 0x92, 0x9c, 0x39, 0xeb, 0x0d, 0x5c, - 0x46, 0x06, 0x23, 0xb6, 0x6b, 0x2b, 0x9d, 0x6b, 0xc9, 0x8c, 0x51, 0x4a, 0x5b, 0xbf, 0x21, 0xc0, - 0xb7, 0x9d, 0xc1, 0xa8, 0x4f, 0x5e, 0x1b, 0xa3, 0x10, 0x8d, 0xf8, 0xa1, 0xd1, 0x48, 0x2c, 0x8a, - 0x46, 0x94, 0x5a, 0xf2, 0xd5, 0xa9, 0x59, 0x5f, 0x42, 0x41, 0x65, 0x23, 0x39, 0x80, 0xd7, 0x5f, - 0x9b, 0x5d, 0xc5, 0xc7, 0x93, 0x2a, 0x8a, 0x18, 0x16, 0xd2, 0x0a, 0x9f, 0x11, 0x59, 0x33, 0xaa, - 0xb2, 0x3e, 0xd2, 0x90, 0x64, 0xde, 0x1c, 0x76, 0x09, 0xe5, 0x1b, 0x93, 0x3c, 0x60, 0x5b, 0xea, - 0x58, 0x5f, 0xc0, 0xd2, 0x0c, 0xa8, 0x2a, 0x8c, 0xf3, 0x60, 0x50, 0xe2, 0xbb, 0x24, 0x88, 0xa2, - 0xa4, 0x45, 0x21, 0xe6, 0x35, 0xf7, 0x42, 0xb6, 0x95, 0xfe, 0x62, 0xde, 0x7f, 0x41, 0x90, 0xbf, - 0xee, 0x6c, 0x91, 0x7e, 0x50, 0x4d, 0x0c, 0xc9, 0xa1, 0x33, 0x20, 0xaa, 0x92, 0x62, 0x8c, 0x57, - 0xc0, 0x78, 0xe8, 0xf4, 0xc7, 0x44, 0x9a, 0xcc, 0xd8, 0x4a, 0x5a, 0x94, 0xeb, 0xe8, 0xd0, 0x5c, - 0x47, 0x61, 0x75, 0xad, 0xd3, 0x50, 0x50, 0xf1, 0x2a, 0xa0, 0xa2, 0xe0, 0x38, 0x50, 0xd9, 0x20, - 0x38, 0xeb, 0x5b, 0x04, 0x85, 0x99, 0x7a, 0x61, 0x0b, 0x8c, 0x3e, 0xdf, 0x4a, 0x65, 0x72, 0x2d, - 0xd8, 0x9b, 0x54, 0xd5, 0x8c, 0xad, 0x7e, 0x79, 0xf5, 0xc9, 0x90, 0x09, 0xdc, 0xe3, 0x02, 0xf7, - 0x95, 0x08, 0xf7, 0x8f, 0x87, 0xcc, 0xdf, 0x0d, 0x8a, 0x7f, 0x84, 0xa3, 0xc8, 0x9b, 0x8a, 0x52, - 0xb7, 0x83, 0x01, 0x3e, 0x0e, 0xc9, 0x9e, 0x43, 0x7b, 0x02, 0x94, 0x64, 0x2b, 0xb3, 0x37, 0xa9, - 0x0a, 0xd9, 0x16, 0x5f, 0xeb, 0x21, 0xe4, 0x75, 0x3b, 0xf8, 0x2a, 0x64, 0xc3, 0xfe, 0x29, 0xe2, - 0xfa, 0x77, 0x34, 0x8a, 0xca, 0x6d, 0x9c, 0x51, 0x81, 0x49, 0xb4, 0x99, 0xfb, 0xed, 0xbb, 0x43, - 0x22, 0x6a, 0x94, 0x95, 0x7e, 0xb9, 0x6c, 0x8b, 0xaf, 0x35, 0x00, 0x43, 0xd2, 0x0c, 0x9f, 0x9c, - 0xf7, 0x98, 0x68, 0x19, 0xd2, 0xa2, 0x6e, 0xad, 0x0a, 0x29, 0x01, 0xa4, 0x30, 0x87, 0x5a, 0xd9, - 0xbd, 0x49, 0x55, 0x4e, 0xd8, 0xf2, 0xe7, 0x15, 0x69, 0x7e, 0x8f, 0x40, 0xf1, 0xf2, 0xb5, 0x60, - 0xbf, 0x04, 0x69, 0x2a, 0xa2, 0x0b, 0x60, 0xd7, 0xe9, 0x2e, 0x16, 0x22, 0xc0, 0x95, 0xa2, 0x1d, - 0x0c, 0x70, 0x03, 0x40, 0x9e, 0xbc, 0xab, 0x51, 0x3c, 0xc5, 0xbd, 0x49, 0x55, 0x9b, 0xb5, 0xb5, - 0xb1, 0xf5, 0x1d, 0x82, 0xdc, 0x1d, 0xc7, 0x0d, 0x29, 0xbf, 0x0c, 0xa9, 0x07, 0xfc, 0xec, 0x29, - 0xce, 0x4b, 0x81, 0xb7, 0xb5, 0x0e, 0xe9, 0x3b, 0xbb, 0x57, 0x3c, 0x5f, 0xd8, 0x2c, 0xd8, 0xa1, - 0x1c, 0xb5, 0xfe, 0xe4, 0xbe, 0xad, 0x3f, 0xb5, 0x70, 0xb3, 0xbb, 0x96, 0xcc, 0xc4, 0x4b, 0x09, - 0xeb, 0x6b, 0x04, 0x79, 0x19, 0x99, 0x22, 0xf7, 0x25, 0x30, 0x64, 0xe0, 0x8a, 0x1a, 0x07, 0xf6, - 0x22, 0xd0, 0xfa, 0x90, 0xda, 0x82, 0x3f, 0x82, 0x62, 0xc7, 0xf7, 0x46, 0x23, 0xd2, 0xb9, 0xad, - 0x1a, 0x5a, 0x7c, 0xbe, 0xa1, 0x6d, 0xe8, 0xeb, 0xf6, 0x9c, 0xba, 0xf5, 0x2b, 0x3f, 0x42, 0xb2, - 0xb9, 0x28, 0xa8, 0xc2, 0x14, 0xd1, 0xa1, 0xfb, 0x79, 0x7c, 0xd1, 0x7e, 0xbe, 0x02, 0x46, 0xd7, - 0xf7, 0xc6, 0x23, 0x5a, 0x4e, 0xc8, 0x03, 0x2e, 0xa5, 0x05, 0xfb, 0xfc, 0x35, 0x28, 0x06, 0xa9, - 0x1c, 0xd0, 0x61, 0x2b, 0xf3, 0x1d, 0x76, 0xb3, 0x43, 0x86, 0xcc, 0xdd, 0x76, 0xc3, 0x9e, 0xa9, - 0xf4, 0xad, 0x6f, 0x10, 0x94, 0xe6, 0x55, 0xf0, 0x87, 0x1a, 0xcd, 0xb9, 0xb9, 0x53, 0x07, 0x9b, - 0x6b, 0x88, 0x0e, 0x46, 0x45, 0x1f, 0x08, 0x8e, 0x40, 0xe5, 0x02, 0xe4, 0xb4, 0x69, 0x5c, 0x82, - 0xc4, 0x0e, 0x09, 0x28, 0xc9, 0x87, 0x9c, 0x74, 0xd1, 0x89, 0xcc, 0xaa, 0x63, 0x78, 0x31, 0x7e, - 0x1e, 0x71, 0x42, 0x17, 0x66, 0x2a, 0x89, 0xcf, 0x43, 0x72, 0xdb, 0xf7, 0x06, 0x0b, 0x95, 0x49, - 0xec, 0xc0, 0x6f, 0x43, 0x9c, 0x79, 0x0b, 0x15, 0x29, 0xce, 0x3c, 0x5e, 0x23, 0x95, 0x7c, 0x42, - 0x04, 0xa7, 0x24, 0xeb, 0x67, 0x04, 0x47, 0xf8, 0x1e, 0x89, 0xc0, 0xe5, 0xde, 0x78, 0xb8, 0x83, - 0xeb, 0x50, 0xe2, 0x9e, 0xee, 0xbb, 0xea, 0x42, 0xba, 0xef, 0x76, 0x54, 0x9a, 0x45, 0x3e, 0x1f, - 0xdc, 0x53, 0x9b, 0x1d, 0xbc, 0x0a, 0xe9, 0x31, 0x95, 0x0a, 0x32, 0x67, 0x83, 0x8b, 0x9b, 0x1d, - 0x7c, 0x46, 0x73, 0xc7, 0xb1, 0xd6, 0x5e, 0x3b, 0x02, 0xc3, 0x9b, 0x8e, 0xeb, 0x87, 0xbd, 0xe5, - 0x34, 0x18, 0x6d, 0xee, 0x58, 0xf2, 0x84, 0x5f, 0x88, 0xa1, 0xb2, 0x08, 0xc8, 0x56, 0xcb, 0xd6, - 0x3b, 0x90, 0x0d, 0x77, 0xef, 0x7b, 0x0f, 0xee, 0x5b, 0x01, 0xeb, 0x18, 0xa4, 0x64, 0x62, 0x18, - 0x92, 0x1d, 0x87, 0x39, 0x62, 0x4b, 0xde, 0x16, 0x63, 0xab, 0x0c, 0x2b, 0x77, 0x7c, 0x67, 0x48, - 0xb7, 0x89, 0x2f, 0x94, 0x42, 0xfa, 0x59, 0x47, 0x61, 0x89, 0x1f, 0x75, 0xe2, 0xd3, 0xcb, 0xde, - 0x78, 0xc8, 0xd4, 0x09, 0xb3, 0xce, 0xc2, 0xf2, 0xec, 0xb4, 0x62, 0xeb, 0x32, 0xa4, 0xda, 0x7c, - 0x42, 0x58, 0x2f, 0xd8, 0x52, 0xb0, 0x7e, 0x44, 0x80, 0x3f, 0x21, 0x4c, 0x98, 0xde, 0xdc, 0xa0, - 0xda, 0x93, 0x6c, 0xe0, 0xb0, 0x76, 0x8f, 0xf8, 0x34, 0x78, 0x92, 0x05, 0xf2, 0x7f, 0xf1, 0x24, - 0xb3, 0xce, 0xc1, 0xd2, 0x4c, 0x94, 0x2a, 0xa7, 0x0a, 0x64, 0xda, 0x6a, 0x4e, 0x5d, 0xde, 0xa1, - 0xfc, 0xff, 0x53, 0x90, 0x0d, 0x1f, 0xae, 0x38, 0x07, 0xe9, 0x2b, 0x37, 0xec, 0xbb, 0xeb, 0xf6, - 0x46, 0x29, 0x86, 0xf3, 0x90, 0x69, 0xad, 0x5f, 0xfe, 0x54, 0x48, 0x68, 0x6d, 0x1d, 0x0c, 0xfe, - 0x84, 0x27, 0x3e, 0x7e, 0x0f, 0x92, 0x7c, 0x84, 0x8f, 0x46, 0xf5, 0xd5, 0xfe, 0x35, 0x54, 0x56, - 0xe6, 0xa7, 0x55, 0x1d, 0x62, 0x6b, 0x7f, 0x27, 0x20, 0xcd, 0x1f, 0x5f, 0xfc, 0x14, 0xbf, 0x0f, - 0x29, 0xf1, 0x0e, 0xc3, 0x9a, 0xba, 0xfe, 0xda, 0xad, 0xac, 0xbe, 0x34, 0x1f, 0xd8, 0x79, 0x13, - 0xe1, 0xcf, 0x20, 0x27, 0x26, 0xd5, 0x4d, 0x7b, 0x7c, 0xfe, 0x12, 0x9b, 0xb1, 0x74, 0xe2, 0x80, - 0x55, 0xcd, 0xde, 0x45, 0x48, 0x09, 0x46, 0xea, 0xd1, 0xe8, 0xaf, 0x35, 0x3d, 0x9a, 0x99, 0x57, - 0x91, 0x15, 0xc3, 0x17, 0x20, 0xc9, 0x89, 0xa4, 0xc3, 0xa1, 0x5d, 0x7a, 0x3a, 0x1c, 0xfa, 0x8d, - 0x23, 0xdc, 0x7e, 0x10, 0xde, 0xdd, 0xab, 0xf3, 0x4d, 0x2c, 0xd8, 0x5e, 0x7e, 0x79, 0x21, 0xf4, - 0x7c, 0x43, 0x5e, 0x62, 0x01, 0x85, 0xf1, 0x89, 0x59, 0x57, 0x73, 0x8c, 0xaf, 0x98, 0x07, 0x2d, - 0x87, 0x06, 0xaf, 0x43, 0x4e, 0xa3, 0x8f, 0x0e, 0xeb, 0xcb, 0xdc, 0xd7, 0x61, 0xdd, 0x87, 0x73, - 0x56, 0x6c, 0xed, 0x73, 0xc8, 0x04, 0x3d, 0x06, 0xdf, 0x82, 0xe2, 0xec, 0xf1, 0xc4, 0xff, 0xd3, - 0xa2, 0x99, 0x6d, 0x5c, 0x95, 0x9a, 0xb6, 0xb4, 0xff, 0x99, 0x8e, 0xd5, 0x51, 0xeb, 0xde, 0x93, - 0x67, 0x66, 0xec, 0xe9, 0x33, 0x33, 0xf6, 0xe2, 0x99, 0x89, 0xbe, 0x9a, 0x9a, 0xe8, 0xa7, 0xa9, - 0x89, 0x1e, 0x4f, 0x4d, 0xf4, 0x64, 0x6a, 0xa2, 0x3f, 0xa7, 0x26, 0xfa, 0x6b, 0x6a, 0xc6, 0x5e, - 0x4c, 0x4d, 0xf4, 0xe8, 0xb9, 0x19, 0x7b, 0xf2, 0xdc, 0x8c, 0x3d, 0x7d, 0x6e, 0xc6, 0xee, 0x9d, - 0xd4, 0xff, 0x33, 0xfb, 0xce, 0xb6, 0x33, 0x74, 0x9a, 0x7d, 0x6f, 0xc7, 0x6d, 0xea, 0xff, 0xc9, - 0xb7, 0x0c, 0xf1, 0xf3, 0xd6, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xae, 0x29, 0xb8, 0x0d, 0xaa, - 0x0f, 0x00, 0x00, + // 1436 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x4b, 0x8f, 0x13, 0x47, + 0x10, 0x76, 0xdb, 0xe3, 0x59, 0xbb, 0xfc, 0xc0, 0xea, 0x5d, 0x76, 0x8d, 0x81, 0xb1, 0x35, 0x42, + 0x60, 0x05, 0xb0, 0xc3, 0xe6, 0xc5, 0x23, 0x0f, 0xad, 0xd9, 0x10, 0x96, 0xa0, 0x00, 0x03, 0x12, + 0x12, 0x52, 0x84, 0x66, 0xed, 0x5e, 0x7b, 0xb4, 0xb6, 0xc7, 0x4c, 0xb7, 0x91, 0x56, 0x8a, 0x94, + 0xfc, 0x80, 0x44, 0x22, 0xa7, 0x28, 0xca, 0x35, 0x87, 0x28, 0x87, 0xfc, 0x0e, 0x72, 0x43, 0x39, + 0xa1, 0x1c, 0x9c, 0x60, 0x2e, 0x91, 0x95, 0x03, 0x3f, 0x21, 0xea, 0xc7, 0x8c, 0xdb, 0x66, 0x37, + 0xc1, 0x5c, 0x72, 0xb1, 0xbb, 0xaa, 0xab, 0xab, 0xab, 0xbe, 0xfa, 0xba, 0xba, 0x07, 0x8e, 0x0e, + 0x76, 0xdb, 0xf5, 0xae, 0xdf, 0x1e, 0x04, 0x3e, 0xf3, 0xa3, 0x41, 0x4d, 0xfc, 0xe2, 0x54, 0x28, + 0x97, 0xca, 0x6d, 0xdf, 0x6f, 0x77, 0x49, 0x5d, 0x48, 0xdb, 0xc3, 0x9d, 0x3a, 0xf3, 0x7a, 0x84, + 0x32, 0xb7, 0x37, 0x90, 0xa6, 0xa5, 0xb3, 0x6d, 0x8f, 0x75, 0x86, 0xdb, 0xb5, 0xa6, 0xdf, 0xab, + 0xb7, 0xfd, 0xb6, 0x3f, 0xb5, 0xe4, 0x92, 0xf4, 0xce, 0x47, 0xca, 0xbc, 0xa2, 0xb6, 0x7d, 0xd0, + 0xed, 0xf9, 0x2d, 0xd2, 0xad, 0x53, 0xe6, 0x32, 0x2a, 0x7f, 0xa5, 0x85, 0x7d, 0x17, 0x32, 0x37, + 0x87, 0xb4, 0xe3, 0x90, 0x07, 0x43, 0x42, 0x19, 0xbe, 0x0a, 0x4b, 0x94, 0x05, 0xc4, 0xed, 0xd1, + 0x22, 0xaa, 0x24, 0xaa, 0x99, 0xf5, 0xb5, 0x5a, 0x14, 0xec, 0x6d, 0x31, 0xb1, 0xd1, 0x72, 0x07, + 0x8c, 0x04, 0x8d, 0xc3, 0xbf, 0x8f, 0xca, 0xa6, 0x54, 0x4d, 0x46, 0xe5, 0x70, 0x95, 0x13, 0x0e, + 0xec, 0x3c, 0x64, 0xa5, 0x63, 0x3a, 0xf0, 0xfb, 0x94, 0xd8, 0x3f, 0xc4, 0x21, 0x7b, 0x6b, 0x48, + 0x82, 0xbd, 0x70, 0xab, 0x12, 0xa4, 0x28, 0xe9, 0x92, 0x26, 0xf3, 0x83, 0x22, 0xaa, 0xa0, 0x6a, + 0xda, 0x89, 0x64, 0xbc, 0x02, 0xc9, 0xae, 0xd7, 0xf3, 0x58, 0x31, 0x5e, 0x41, 0xd5, 0x9c, 0x23, + 0x05, 0x7c, 0x11, 0x92, 0x94, 0xb9, 0x01, 0x2b, 0x26, 0x2a, 0xa8, 0x9a, 0x59, 0x2f, 0xd5, 0x24, + 0x5a, 0xb5, 0x10, 0x83, 0xda, 0x9d, 0x10, 0xad, 0x46, 0xea, 0xf1, 0xa8, 0x1c, 0x7b, 0xf4, 0x47, + 0x19, 0x39, 0x72, 0x09, 0x7e, 0x17, 0x12, 0xa4, 0xdf, 0x2a, 0x1a, 0x0b, 0xac, 0xe4, 0x0b, 0xf0, + 0x39, 0x48, 0xb7, 0xbc, 0x80, 0x34, 0x99, 0xe7, 0xf7, 0x8b, 0xc9, 0x0a, 0xaa, 0xe6, 0xd7, 0x97, + 0xa7, 0x90, 0x6c, 0x86, 0x53, 0xce, 0xd4, 0x0a, 0x9f, 0x01, 0x93, 0x76, 0xdc, 0xa0, 0x45, 0x8b, + 0x4b, 0x95, 0x44, 0x35, 0xdd, 0x58, 0x99, 0x8c, 0xca, 0x05, 0xa9, 0x39, 0xe3, 0xf7, 0x3c, 0x46, + 0x7a, 0x03, 0xb6, 0xe7, 0x28, 0x9b, 0x6b, 0x46, 0xca, 0x2c, 0x2c, 0xd9, 0xbf, 0x21, 0xc0, 0xb7, + 0xdd, 0xde, 0xa0, 0x4b, 0x5e, 0x19, 0xa3, 0x08, 0x8d, 0xf8, 0x6b, 0xa3, 0x91, 0x58, 0x14, 0x8d, + 0x69, 0x6a, 0xc6, 0x7f, 0xa7, 0x66, 0x7f, 0x09, 0x39, 0x95, 0x8d, 0xe4, 0x00, 0xde, 0x78, 0x65, + 0x76, 0xe5, 0x1f, 0x8f, 0xca, 0x68, 0xca, 0xb0, 0x88, 0x56, 0xf8, 0xb4, 0xc8, 0x9a, 0x51, 0x95, + 0xf5, 0xa1, 0x9a, 0x24, 0xf3, 0x56, 0xbf, 0x4d, 0x28, 0x5f, 0x68, 0xf0, 0x80, 0x1d, 0x69, 0x63, + 0x7f, 0x01, 0xcb, 0x33, 0xa0, 0xaa, 0x30, 0xce, 0x83, 0x49, 0x49, 0xe0, 0x91, 0x30, 0x8a, 0x82, + 0x16, 0x85, 0xd0, 0x6b, 0xdb, 0x0b, 0xd9, 0x51, 0xf6, 0x8b, 0xed, 0xfe, 0x0b, 0x82, 0xec, 0x75, + 0x77, 0x9b, 0x74, 0xc3, 0x6a, 0x62, 0x30, 0xfa, 0x6e, 0x8f, 0xa8, 0x4a, 0x8a, 0x31, 0x5e, 0x05, + 0xf3, 0xa1, 0xdb, 0x1d, 0x12, 0xe9, 0x32, 0xe5, 0x28, 0x69, 0x51, 0xae, 0xa3, 0xd7, 0xe6, 0x3a, + 0x8a, 0xaa, 0x6b, 0x9f, 0x82, 0x9c, 0x8a, 0x57, 0x01, 0x35, 0x0d, 0x8e, 0x03, 0x95, 0x0e, 0x83, + 0xb3, 0xbf, 0x45, 0x90, 0x9b, 0xa9, 0x17, 0xb6, 0xc1, 0xec, 0xf2, 0xa5, 0x54, 0x26, 0xd7, 0x80, + 0xc9, 0xa8, 0xac, 0x34, 0x8e, 0xfa, 0xe7, 0xd5, 0x27, 0x7d, 0x26, 0x70, 0x8f, 0x0b, 0xdc, 0x57, + 0xa7, 0xb8, 0x7f, 0xdc, 0x67, 0xc1, 0x5e, 0x58, 0xfc, 0x43, 0x1c, 0x45, 0xde, 0x54, 0x94, 0xb9, + 0x13, 0x0e, 0xf0, 0x11, 0x30, 0x3a, 0x2e, 0xed, 0x08, 0x50, 0x8c, 0x46, 0x72, 0x32, 0x2a, 0xa3, + 0xb3, 0x8e, 0x50, 0xd9, 0x0f, 0x21, 0xab, 0x3b, 0xc1, 0x57, 0x21, 0x1d, 0x35, 0x4f, 0x11, 0xd4, + 0xbf, 0x43, 0x91, 0x57, 0x7b, 0xc6, 0x19, 0x15, 0x80, 0x4c, 0x17, 0xe3, 0x63, 0x60, 0x74, 0xbd, + 0x3e, 0x11, 0x05, 0x4a, 0x37, 0x52, 0x93, 0x51, 0x59, 0xc8, 0x8e, 0xf8, 0xb5, 0x7b, 0x60, 0x4a, + 0x8e, 0xe1, 0x13, 0xf3, 0x3b, 0x26, 0x1a, 0xa6, 0xf4, 0xa8, 0x7b, 0x2b, 0x43, 0x52, 0xa0, 0x28, + 0xdc, 0xa1, 0x46, 0x7a, 0x32, 0x2a, 0x4b, 0x85, 0x23, 0xff, 0xf8, 0x76, 0x5a, 0x8e, 0x62, 0x3b, + 0x2e, 0xab, 0x34, 0xbf, 0x47, 0xa0, 0x48, 0xf9, 0x4a, 0x98, 0x5f, 0x82, 0x25, 0x2a, 0xa2, 0x0b, + 0x31, 0xd7, 0xb9, 0x2e, 0x26, 0xa6, 0x68, 0x2b, 0x43, 0x27, 0x1c, 0xe0, 0x1a, 0x80, 0x3c, 0x76, + 0x57, 0xa7, 0xf1, 0xe4, 0x27, 0xa3, 0xb2, 0xa6, 0x75, 0xb4, 0xb1, 0xfd, 0x1d, 0x82, 0xcc, 0x1d, + 0xd7, 0x8b, 0xf8, 0xbe, 0x02, 0xc9, 0x07, 0xfc, 0xe0, 0x29, 0xc2, 0x4b, 0x81, 0xf7, 0xb4, 0x16, + 0xe9, 0xba, 0x7b, 0x57, 0xfc, 0x40, 0xf8, 0xcc, 0x39, 0x91, 0x3c, 0xed, 0xfb, 0xc6, 0xbe, 0x7d, + 0x3f, 0xb9, 0x70, 0xa7, 0xbb, 0x66, 0xa4, 0xe2, 0x85, 0x84, 0xfd, 0x35, 0x82, 0xac, 0x8c, 0x4c, + 0x31, 0xfb, 0x12, 0x98, 0x32, 0x70, 0x45, 0x8d, 0x03, 0x1b, 0x11, 0x68, 0x4d, 0x48, 0x2d, 0xc1, + 0x1f, 0x41, 0xbe, 0x15, 0xf8, 0x83, 0x01, 0x69, 0xdd, 0x56, 0xdd, 0x2c, 0x3e, 0xdf, 0xcd, 0x36, + 0xf5, 0x79, 0x67, 0xce, 0xdc, 0xfe, 0x95, 0x9f, 0x1f, 0xd9, 0x59, 0x14, 0x54, 0x51, 0x8a, 0xe8, + 0xb5, 0x9b, 0x79, 0x7c, 0xd1, 0x66, 0xbe, 0x0a, 0x66, 0x3b, 0xf0, 0x87, 0x03, 0x5a, 0x4c, 0xc8, + 0xd3, 0x2d, 0xa5, 0x05, 0x9b, 0xfc, 0x35, 0xc8, 0x87, 0xa9, 0x1c, 0xd0, 0x5e, 0x4b, 0xf3, 0xed, + 0x75, 0xab, 0x45, 0xfa, 0xcc, 0xdb, 0xf1, 0xa2, 0x86, 0xa9, 0xec, 0xed, 0x6f, 0x10, 0x14, 0xe6, + 0x4d, 0xf0, 0x87, 0x1a, 0xcd, 0xb9, 0xbb, 0x93, 0x07, 0xbb, 0xab, 0x89, 0xf6, 0x45, 0x45, 0x1f, + 0x08, 0x8f, 0x40, 0xe9, 0x02, 0x64, 0x34, 0x35, 0x2e, 0x40, 0x62, 0x97, 0x84, 0x94, 0xe4, 0x43, + 0x4e, 0xba, 0xe9, 0x89, 0x4c, 0xab, 0x63, 0x78, 0x31, 0x7e, 0x1e, 0x71, 0x42, 0xe7, 0x66, 0x2a, + 0x89, 0xcf, 0x83, 0xb1, 0x13, 0xf8, 0xbd, 0x85, 0xca, 0x24, 0x56, 0xe0, 0xb7, 0x21, 0xce, 0xfc, + 0x85, 0x8a, 0x14, 0x67, 0x3e, 0xaf, 0x91, 0x4a, 0x3e, 0x21, 0x82, 0x53, 0x92, 0xfd, 0x33, 0x82, + 0x43, 0x7c, 0x8d, 0x44, 0xe0, 0x72, 0x67, 0xd8, 0xdf, 0xc5, 0x55, 0x28, 0xf0, 0x9d, 0xee, 0x7b, + 0xea, 0x36, 0xba, 0xef, 0xb5, 0x54, 0x9a, 0x79, 0xae, 0x0f, 0x2f, 0xa9, 0xad, 0x16, 0x5e, 0x83, + 0xa5, 0x21, 0x95, 0x06, 0x32, 0x67, 0x93, 0x8b, 0x5b, 0x2d, 0x7c, 0x5a, 0xdb, 0x8e, 0x63, 0xad, + 0x3d, 0x75, 0x04, 0x86, 0x37, 0x5d, 0x2f, 0x88, 0x7a, 0xcb, 0x29, 0x30, 0x9b, 0x7c, 0x63, 0xc9, + 0x13, 0x7e, 0x1b, 0x46, 0xc6, 0x22, 0x20, 0x47, 0x4d, 0xdb, 0xef, 0x40, 0x3a, 0x5a, 0xbd, 0xef, + 0x25, 0xb8, 0x6f, 0x05, 0xec, 0xa3, 0x90, 0x94, 0x89, 0x61, 0x30, 0x5a, 0x2e, 0x73, 0xc5, 0x92, + 0xac, 0x23, 0xc6, 0x76, 0x11, 0x56, 0xef, 0x04, 0x6e, 0x9f, 0xee, 0x90, 0x40, 0x18, 0x45, 0xf4, + 0xb3, 0x0f, 0xc3, 0x32, 0x3f, 0xea, 0x24, 0xa0, 0x97, 0xfd, 0x61, 0x9f, 0xa9, 0x13, 0x66, 0x9f, + 0x81, 0x95, 0x59, 0xb5, 0x62, 0xeb, 0x0a, 0x24, 0x9b, 0x5c, 0x21, 0xbc, 0xe7, 0x1c, 0x29, 0xd8, + 0x3f, 0x22, 0xc0, 0x9f, 0x10, 0x26, 0x5c, 0x6f, 0x6d, 0x52, 0xed, 0x3d, 0xd6, 0x73, 0x59, 0xb3, + 0x43, 0x02, 0x1a, 0xbe, 0xc7, 0x42, 0xf9, 0xff, 0x78, 0x8f, 0xd9, 0xe7, 0x60, 0x79, 0x26, 0x4a, + 0x95, 0x53, 0x09, 0x52, 0x4d, 0xa5, 0x53, 0x37, 0x77, 0x24, 0xbf, 0x71, 0x12, 0xd2, 0xd1, 0xab, + 0x15, 0x67, 0x60, 0xe9, 0xca, 0x0d, 0xe7, 0xee, 0x86, 0xb3, 0x59, 0x88, 0xe1, 0x2c, 0xa4, 0x1a, + 0x1b, 0x97, 0x3f, 0x15, 0x12, 0x5a, 0xdf, 0x00, 0x93, 0xbf, 0xdf, 0x49, 0x80, 0xdf, 0x03, 0x83, + 0x8f, 0xf0, 0xe1, 0x69, 0x7d, 0xb5, 0x4f, 0x86, 0xd2, 0xea, 0xbc, 0x5a, 0xd5, 0x21, 0xb6, 0xfe, + 0x77, 0x02, 0x96, 0xf8, 0xcb, 0x8b, 0x9f, 0xe2, 0xf7, 0x21, 0x29, 0x1e, 0x61, 0x58, 0x33, 0xd7, + 0x9f, 0xba, 0xa5, 0xb5, 0x97, 0xf4, 0xa1, 0x9f, 0x37, 0x11, 0xfe, 0x0c, 0x32, 0x42, 0xa9, 0x6e, + 0xda, 0x63, 0xf3, 0x97, 0xd8, 0x8c, 0xa7, 0xe3, 0x07, 0xcc, 0x6a, 0xfe, 0x2e, 0x42, 0x52, 0x30, + 0x52, 0x8f, 0x46, 0x7f, 0xaa, 0xe9, 0xd1, 0xcc, 0x3c, 0x89, 0xec, 0x18, 0xbe, 0x00, 0x06, 0x27, + 0x92, 0x0e, 0x87, 0x76, 0xe9, 0xe9, 0x70, 0xe8, 0x37, 0x8e, 0xd8, 0xf6, 0x83, 0xe8, 0xee, 0x5e, + 0x9b, 0x6f, 0x62, 0xe1, 0xf2, 0xe2, 0xcb, 0x13, 0xd1, 0xce, 0x37, 0xe4, 0x25, 0x16, 0x52, 0x18, + 0x1f, 0x9f, 0xdd, 0x6a, 0x8e, 0xf1, 0x25, 0xeb, 0xa0, 0xe9, 0xc8, 0xe1, 0x75, 0xc8, 0x68, 0xf4, + 0xd1, 0x61, 0x7d, 0x99, 0xfb, 0x3a, 0xac, 0xfb, 0x70, 0xce, 0x8e, 0xad, 0x7f, 0x0e, 0xa9, 0xb0, + 0xc7, 0xe0, 0x5b, 0x90, 0x9f, 0x3d, 0x9e, 0xf8, 0x88, 0x16, 0xcd, 0x6c, 0xe3, 0x2a, 0x55, 0xb4, + 0xa9, 0xfd, 0xcf, 0x74, 0xac, 0x8a, 0x1a, 0xf7, 0x9e, 0x3c, 0xb3, 0x62, 0x4f, 0x9f, 0x59, 0xb1, + 0x17, 0xcf, 0x2c, 0xf4, 0xd5, 0xd8, 0x42, 0x3f, 0x8d, 0x2d, 0xf4, 0x78, 0x6c, 0xa1, 0x27, 0x63, + 0x0b, 0xfd, 0x39, 0xb6, 0xd0, 0x5f, 0x63, 0x2b, 0xf6, 0x62, 0x6c, 0xa1, 0x47, 0xcf, 0xad, 0xd8, + 0x93, 0xe7, 0x56, 0xec, 0xe9, 0x73, 0x2b, 0x76, 0xef, 0x84, 0xfe, 0xc1, 0x1c, 0xb8, 0x3b, 0x6e, + 0xdf, 0xad, 0x77, 0xfd, 0x5d, 0xaf, 0xae, 0x7f, 0x90, 0x6f, 0x9b, 0xe2, 0xef, 0xad, 0x7f, 0x02, + 0x00, 0x00, 0xff, 0xff, 0x9b, 0xeb, 0xf2, 0xbd, 0xa7, 0x0f, 0x00, 0x00, } func (x Direction) String() string { diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 7d2041116085..21c4d415ff33 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -83,7 +83,7 @@ message StreamAdapter { string labels = 1 [(gogoproto.jsontag) = "labels"]; repeated EntryAdapter entries = 2 [(gogoproto.nullable) = false, (gogoproto.jsontag) = "entries"]; // hash contains the original hash of the stream. - uint64 hash = 3 [(gogoproto.jsontag) = "hash"]; + uint64 hash = 3 [(gogoproto.jsontag) = "-"]; } message EntryAdapter { diff --git a/pkg/logproto/types.go b/pkg/logproto/types.go index 1dd1822e35d5..554ca79c469f 100644 --- a/pkg/logproto/types.go +++ b/pkg/logproto/types.go @@ -12,7 +12,7 @@ import ( type Stream struct { Labels string `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels"` Entries []Entry `protobuf:"bytes,2,rep,name=entries,proto3,customtype=EntryAdapter" json:"entries"` - Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"hash"` + Hash uint64 `protobuf:"varint,3,opt,name=hash,proto3" json:"-"` } // Entry is a log entry with a timestamp. From 5b9d4dea5976ea0f10476a8e850e2c5bdb105d7a Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 1 Feb 2022 10:56:42 +0100 Subject: [PATCH 12/14] Add more regression tests. Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator_test.go | 48 +++++++++++++++++ pkg/iter/sample_iterator_test.go | 90 ++++++++++++++++++++++++-------- 2 files changed, 117 insertions(+), 21 deletions(-) diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 7a6038948219..3db309288763 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -306,6 +306,54 @@ func TestMergeIteratorDeduplication(t *testing.T) { assertIt(it, true, len(foo.Entries)) } +func TestMergeIteratorWithoutLabels(t *testing.T) { + foo := logproto.Stream{ + Labels: ``, + Hash: hashLabels(`{app="foo"}`), + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 1), Line: "1"}, + {Timestamp: time.Unix(0, 2), Line: "2"}, + {Timestamp: time.Unix(0, 3), Line: "3"}, + }, + } + bar := logproto.Stream{ + Labels: `{some="other"}`, + Hash: hashLabels(`{app="bar"}`), + Entries: []logproto.Entry{ + {Timestamp: time.Unix(0, 1), Line: "1"}, + {Timestamp: time.Unix(0, 2), Line: "2"}, + {Timestamp: time.Unix(0, 3), Line: "3"}, + }, + } + + // forward iteration + it := NewMergeEntryIterator(context.Background(), []EntryIterator{ + NewStreamIterator(foo), + NewStreamIterator(bar), + NewStreamIterator(foo), + NewStreamIterator(bar), + NewStreamIterator(foo), + NewStreamIterator(bar), + NewStreamIterator(foo), + }, logproto.FORWARD) + + for i := 0; i < 3; i++ { + + require.True(t, it.Next()) + require.NoError(t, it.Error()) + require.Equal(t, bar.Labels, it.Labels()) + require.Equal(t, bar.Entries[i], it.Entry()) + + require.True(t, it.Next()) + require.NoError(t, it.Error()) + require.Equal(t, foo.Labels, it.Labels()) + require.Equal(t, foo.Entries[i], it.Entry()) + + } + require.False(t, it.Next()) + require.NoError(t, it.Error()) +} + func mustReverseStreamIterator(it EntryIterator) EntryIterator { reversed, err := NewReversedIter(it, 0, true) if err != nil { diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index 384d0fa73ad3..bcfccd9fa0dd 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -107,28 +107,76 @@ var carSeries = logproto.Series{ } func TestNewMergeSampleIterator(t *testing.T) { - it := NewMergeSampleIterator(context.Background(), - []SampleIterator{ - NewSeriesIterator(varSeries), - NewSeriesIterator(carSeries), - NewSeriesIterator(carSeries), - NewSeriesIterator(varSeries), - NewSeriesIterator(carSeries), - NewSeriesIterator(varSeries), - NewSeriesIterator(carSeries), - }) + t.Run("with labels", func(t *testing.T) { + it := NewMergeSampleIterator(context.Background(), + []SampleIterator{ + NewSeriesIterator(varSeries), + NewSeriesIterator(carSeries), + NewSeriesIterator(carSeries), + NewSeriesIterator(varSeries), + NewSeriesIterator(carSeries), + NewSeriesIterator(varSeries), + NewSeriesIterator(carSeries), + }) - for i := 1; i < 4; i++ { - require.True(t, it.Next(), i) - require.Equal(t, `{foo="car"}`, it.Labels(), i) - require.Equal(t, sample(i), it.Sample(), i) - require.True(t, it.Next(), i) - require.Equal(t, `{foo="var"}`, it.Labels(), i) - require.Equal(t, sample(i), it.Sample(), i) - } - require.False(t, it.Next()) - require.NoError(t, it.Error()) - require.NoError(t, it.Close()) + for i := 1; i < 4; i++ { + require.True(t, it.Next(), i) + require.Equal(t, `{foo="car"}`, it.Labels(), i) + require.Equal(t, sample(i), it.Sample(), i) + require.True(t, it.Next(), i) + require.Equal(t, `{foo="var"}`, it.Labels(), i) + require.Equal(t, sample(i), it.Sample(), i) + } + require.False(t, it.Next()) + require.NoError(t, it.Error()) + require.NoError(t, it.Close()) + }) + t.Run("no labels", func(t *testing.T) { + it := NewMergeSampleIterator(context.Background(), + []SampleIterator{ + NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: carSeries.StreamHash, + Samples: carSeries.Samples, + }), + NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: varSeries.StreamHash, + Samples: varSeries.Samples, + }), NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: carSeries.StreamHash, + Samples: carSeries.Samples, + }), + NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: varSeries.StreamHash, + Samples: varSeries.Samples, + }), + NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: carSeries.StreamHash, + Samples: carSeries.Samples, + }), + NewSeriesIterator(logproto.Series{ + Labels: ``, + StreamHash: varSeries.StreamHash, + Samples: varSeries.Samples, + }), + }) + + for i := 1; i < 4; i++ { + require.True(t, it.Next(), i) + require.Equal(t, ``, it.Labels(), i) + require.Equal(t, sample(i), it.Sample(), i) + require.True(t, it.Next(), i) + require.Equal(t, ``, it.Labels(), i) + require.Equal(t, sample(i), it.Sample(), i) + } + require.False(t, it.Next()) + require.NoError(t, it.Error()) + require.NoError(t, it.Close()) + }) } type fakeSampleClient struct { From b699d81d0716bd0cd1b3b403560895b80ad61af3 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 1 Feb 2022 13:37:36 +0100 Subject: [PATCH 13/14] Update CHANGELOG.md Signed-off-by: Cyril Tovena --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa10295d3e4a..adc47e8bf655 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## Main +* [5289](https://github.com/grafana/loki/pull/5289) **ctovena**: Fix deduplication bug in queries when mutating labels. * [5280](https://github.com/grafana/loki/pull/5280) **jeschkies**: Fix Docker target connection loss. * [5243](https://github.com/grafana/loki/pull/5243) **owen-d**: moves `querier.split-queries-by-interval` to limits code only. * [5139](https://github.com/grafana/loki/pull/5139) **DylanGuedes**: Drop support for legacy configuration rules format. From 8fa75657dca203248272dd14ec8528ed807f03a0 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 4 Feb 2022 08:58:22 +0100 Subject: [PATCH 14/14] Review feedback --- pkg/iter/entry_iterator.go | 14 +++++++------- pkg/iter/sample_iterator_test.go | 1 - 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 8907ab5f4ff8..b93e03fa329e 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -74,8 +74,8 @@ func (h *iteratorHeap) Pop() interface{} { type iteratorSortHeap struct { iteratorHeap - byAlphabetical bool - ascendingTime bool + byAlphabetical bool + byAscendingTime bool } func (h iteratorSortHeap) Less(i, j int) bool { @@ -86,7 +86,7 @@ func (h iteratorSortHeap) Less(i, j int) bool { } return h.iteratorHeap[i].StreamHash() < h.iteratorHeap[j].StreamHash() } - if h.ascendingTime { + if h.byAscendingTime { return t1 < t2 } return t1 > t2 @@ -124,9 +124,9 @@ func NewMergeEntryIterator(ctx context.Context, is []EntryIterator, direction lo result := &mergeEntryIterator{is: is, stats: stats.FromContext(ctx)} switch direction { case logproto.BACKWARD: - result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: false} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), byAscendingTime: false} case logproto.FORWARD: - result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: true} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), byAscendingTime: true} default: panic("bad direction") } @@ -320,9 +320,9 @@ func NewSortEntryIterator(is []EntryIterator, direction logproto.Direction) Entr result := &entrySortIterator{is: is} switch direction { case logproto.BACKWARD: - result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: false, byAlphabetical: true} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), byAscendingTime: false, byAlphabetical: true} case logproto.FORWARD: - result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), ascendingTime: true, byAlphabetical: true} + result.heap = &iteratorSortHeap{iteratorHeap: make([]EntryIterator, 0, len(is)), byAscendingTime: true, byAlphabetical: true} default: panic("bad direction") } diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index bcfccd9fa0dd..4aed2e10d79c 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -220,7 +220,6 @@ func TestNewSampleQueryClientIterator(t *testing.T) { } func TestNewNonOverlappingSampleIterator(t *testing.T) { - // todo fix this test it := NewNonOverlappingSampleIterator([]SampleIterator{ NewSeriesIterator(varSeries), NewSeriesIterator(logproto.Series{