Like Prometheus, but for logs.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
loki/pkg/iter/entry_iterator_test.go

637 lines
16 KiB

package iter
import (
"context"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/atomic"
"github.com/grafana/loki/pkg/logproto"
"github.com/grafana/loki/pkg/logqlmodel/stats"
)
const (
testSize = 10
defaultLabels = "{foo=\"baz\"}"
)
func TestIterator(t *testing.T) {
for i, tc := range []struct {
iterator EntryIterator
generator generator
length int64
labels string
}{
// Test basic identity.
{
iterator: mkStreamIterator(identity, defaultLabels),
generator: identity,
length: testSize,
labels: defaultLabels,
},
// Test basic identity (backwards).
{
iterator: mkStreamIterator(inverse(identity), defaultLabels),
generator: inverse(identity),
length: testSize,
labels: defaultLabels,
},
// Test dedupe of overlapping iterators with the heap iterator.
{
iterator: NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(offset(0, identity), defaultLabels),
mkStreamIterator(offset(testSize/2, identity), defaultLabels),
mkStreamIterator(offset(testSize, identity), defaultLabels),
}, logproto.FORWARD),
generator: identity,
length: 2 * testSize,
labels: defaultLabels,
},
// Test dedupe of overlapping iterators with the heap iterator (backward).
{
iterator: NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(inverse(offset(0, identity)), defaultLabels),
mkStreamIterator(inverse(offset(-testSize/2, identity)), defaultLabels),
mkStreamIterator(inverse(offset(-testSize, identity)), defaultLabels),
}, logproto.BACKWARD),
generator: inverse(identity),
length: 2 * testSize,
labels: defaultLabels,
},
// Test dedupe of entries with the same timestamp but different entries.
{
iterator: NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(offset(0, constant(0)), defaultLabels),
mkStreamIterator(offset(0, constant(0)), defaultLabels),
mkStreamIterator(offset(testSize, constant(0)), defaultLabels),
}, logproto.FORWARD),
generator: constant(0),
length: 2 * testSize,
labels: defaultLabels,
},
// Test basic identity with non-default labels.
{
iterator: mkStreamIterator(identity, "{foobar: \"bazbar\"}"),
generator: identity,
length: testSize,
labels: "{foobar: \"bazbar\"}",
},
} {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
for i := int64(0); i < tc.length; i++ {
assert.Equal(t, true, tc.iterator.Next())
assert.Equal(t, tc.generator(i), tc.iterator.Entry(), fmt.Sprintln("iteration", i))
assert.Equal(t, tc.labels, tc.iterator.Labels(), fmt.Sprintln("iteration", i))
}
assert.Equal(t, false, tc.iterator.Next())
assert.Equal(t, nil, tc.iterator.Error())
assert.NoError(t, tc.iterator.Close())
})
}
}
func TestIteratorMultipleLabels(t *testing.T) {
for i, tc := range []struct {
iterator EntryIterator
generator generator
length int64
labels func(int64) string
}{
// Test merging with differing labels but same timestamps and values.
{
iterator: NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(identity, "{foobar: \"baz1\"}"),
mkStreamIterator(identity, "{foobar: \"baz2\"}"),
}, logproto.FORWARD),
generator: func(i int64) logproto.Entry {
return identity(i / 2)
},
length: testSize * 2,
labels: func(i int64) string {
if i%2 == 0 {
return "{foobar: \"baz1\"}"
}
return "{foobar: \"baz2\"}"
},
},
// Test merging with differing labels but all the same timestamps and different values.
{
iterator: NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(constant(0), "{foobar: \"baz1\"}"),
mkStreamIterator(constant(0), "{foobar: \"baz2\"}"),
}, logproto.FORWARD),
generator: func(i int64) logproto.Entry {
return constant(0)(i % testSize)
},
length: testSize * 2,
labels: func(i int64) string {
if i/testSize == 0 {
return "{foobar: \"baz1\"}"
}
return "{foobar: \"baz2\"}"
},
},
} {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
for i := int64(0); i < tc.length; i++ {
assert.Equal(t, true, tc.iterator.Next())
assert.Equal(t, tc.generator(i), tc.iterator.Entry(), fmt.Sprintln("iteration", i))
assert.Equal(t, tc.labels(i), tc.iterator.Labels(), fmt.Sprintln("iteration", i))
}
assert.Equal(t, false, tc.iterator.Next())
assert.Equal(t, nil, tc.iterator.Error())
assert.NoError(t, tc.iterator.Close())
})
}
}
func TestHeapIteratorPrefetch(t *testing.T) {
t.Parallel()
type tester func(t *testing.T, i HeapIterator)
tests := map[string]tester{
"prefetch on Len() when called as first method": func(t *testing.T, i HeapIterator) {
assert.Equal(t, 2, i.Len())
},
"prefetch on Peek() when called as first method": func(t *testing.T, i HeapIterator) {
assert.Equal(t, time.Unix(0, 0), i.Peek())
},
"prefetch on Next() when called as first method": func(t *testing.T, i HeapIterator) {
assert.True(t, i.Next())
assert.Equal(t, logproto.Entry{Timestamp: time.Unix(0, 0), Line: "0"}, i.Entry())
},
}
for testName, testFunc := range tests {
testFunc := testFunc
t.Run(testName, func(t *testing.T) {
t.Parallel()
i := NewHeapIterator(context.Background(), []EntryIterator{
mkStreamIterator(identity, "{foobar: \"baz1\"}"),
mkStreamIterator(identity, "{foobar: \"baz2\"}"),
}, logproto.FORWARD)
testFunc(t, i)
})
}
}
type generator func(i int64) logproto.Entry
func mkStreamIterator(f generator, labels string) EntryIterator {
entries := []logproto.Entry{}
for i := int64(0); i < testSize; i++ {
entries = append(entries, f(i))
}
return NewStreamIterator(logproto.Stream{
Entries: entries,
Labels: labels,
})
}
func identity(i int64) logproto.Entry {
return logproto.Entry{
Timestamp: time.Unix(i, 0),
Line: fmt.Sprintf("%d", i),
}
}
func offset(j int64, g generator) generator {
return func(i int64) logproto.Entry {
return g(i + j)
}
}
// nolint
func constant(t int64) generator {
return func(i int64) logproto.Entry {
return logproto.Entry{
Timestamp: time.Unix(t, 0),
Line: fmt.Sprintf("%d", i),
}
}
}
func inverse(g generator) generator {
return func(i int64) logproto.Entry {
return g(-i)
}
}
func TestHeapIteratorDeduplication(t *testing.T) {
foo := logproto.Stream{
Labels: `{app="foo"}`,
Entries: []logproto.Entry{
{Timestamp: time.Unix(0, 1), Line: "1"},
{Timestamp: time.Unix(0, 2), Line: "2"},
{Timestamp: time.Unix(0, 3), Line: "3"},
},
}
bar := logproto.Stream{
Labels: `{app="bar"}`,
Entries: []logproto.Entry{
{Timestamp: time.Unix(0, 1), Line: "1"},
{Timestamp: time.Unix(0, 2), Line: "2"},
{Timestamp: time.Unix(0, 3), Line: "3"},
},
}
assertIt := func(it EntryIterator, reversed bool, length int) {
for i := 0; i < length; i++ {
j := i
if reversed {
j = length - 1 - i
}
require.True(t, it.Next())
require.NoError(t, it.Error())
require.Equal(t, bar.Labels, it.Labels())
require.Equal(t, bar.Entries[j], it.Entry())
require.True(t, it.Next())
require.NoError(t, it.Error())
require.Equal(t, foo.Labels, it.Labels())
require.Equal(t, foo.Entries[j], it.Entry())
}
require.False(t, it.Next())
require.NoError(t, it.Error())
}
// forward iteration
it := NewHeapIterator(context.Background(), []EntryIterator{
NewStreamIterator(foo),
NewStreamIterator(bar),
NewStreamIterator(foo),
NewStreamIterator(bar),
NewStreamIterator(foo),
NewStreamIterator(bar),
NewStreamIterator(foo),
}, logproto.FORWARD)
assertIt(it, false, len(foo.Entries))
// backward iteration
it = NewHeapIterator(context.Background(), []EntryIterator{
mustReverseStreamIterator(NewStreamIterator(foo)),
mustReverseStreamIterator(NewStreamIterator(bar)),
mustReverseStreamIterator(NewStreamIterator(foo)),
mustReverseStreamIterator(NewStreamIterator(bar)),
mustReverseStreamIterator(NewStreamIterator(foo)),
mustReverseStreamIterator(NewStreamIterator(bar)),
mustReverseStreamIterator(NewStreamIterator(foo)),
}, logproto.BACKWARD)
assertIt(it, true, len(foo.Entries))
}
func mustReverseStreamIterator(it EntryIterator) EntryIterator {
reversed, err := NewReversedIter(it, 0, true)
if err != nil {
panic(err)
}
return reversed
}
func TestReverseIterator(t *testing.T) {
itr1 := mkStreamIterator(inverse(offset(testSize, identity)), defaultLabels)
itr2 := mkStreamIterator(inverse(offset(testSize, identity)), "{foobar: \"bazbar\"}")
heapIterator := NewHeapIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD)
reversedIter, err := NewReversedIter(heapIterator, testSize, false)
require.NoError(t, err)
for i := int64((testSize / 2) + 1); i <= testSize; i++ {
assert.Equal(t, true, reversedIter.Next())
assert.Equal(t, identity(i), reversedIter.Entry(), fmt.Sprintln("iteration", i))
assert.Equal(t, reversedIter.Labels(), itr2.Labels())
assert.Equal(t, true, reversedIter.Next())
assert.Equal(t, identity(i), reversedIter.Entry(), fmt.Sprintln("iteration", i))
assert.Equal(t, reversedIter.Labels(), itr1.Labels())
}
assert.Equal(t, false, reversedIter.Next())
assert.Equal(t, nil, reversedIter.Error())
assert.NoError(t, reversedIter.Close())
}
func TestReverseEntryIterator(t *testing.T) {
itr1 := mkStreamIterator(identity, defaultLabels)
reversedIter, err := NewEntryReversedIter(itr1)
require.NoError(t, err)
for i := int64(testSize - 1); i >= 0; i-- {
assert.Equal(t, true, reversedIter.Next())
assert.Equal(t, identity(i), reversedIter.Entry(), fmt.Sprintln("iteration", i))
LogQL: Labels and Metrics Extraction (#2769) * Adds logfmt, regexp and json logql parser Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * hook the ast with parsers. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * hook parser with memchunk. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * hook parser with the storage. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * hook parser with ingesters Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * fixes all tests Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Refactor to pipeline and implement ast parsing. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes the lexer for duration and range Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes all tests and add some for label filters Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add label and line format. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add tests for fmt label and line with validations. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Polishing parsers and add some more test cases Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finish the unwrap parser, still need to add more tests Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Indent this hell. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Moar tests and it works. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add more tests which lead me to find a bug in the lexer Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add more tests and fix all engine tests Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes match stage in promtail pipelines. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Hook Pipeline into ingester, tailer and storage. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Correctly setup sharding for logqlv2 Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes precedences issue with label filters and add moar tests :v: * Adds quantile_over_time, grouping for non associate range aggregation parsing and moar tests * Extract with grouping * Adds parsing duration on unwrap * Improve the lexer to support more common identifier as functions. Also add duration convertion for unwrap. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes the frontend logs to include org_id. The auth middleware was happening after the stats one and so org_id was not set :facepalm:. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Support byte sizes in label filters. This patch extends the duration label filter with support for byte sizes such as `1kB` and `42MiB`. * Wip on error handling. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes json parser with prometheus label name rules. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * fixup! Support byte sizes in label filters. * Wip error handling, commit before big refactoring. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Refactoring in progress. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Work in progress. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Got something that builds and throw __error__ labels properly now. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add error handling + fixes groupins and post filtering. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * 400 on pipeline errors. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes a races in the log pipeline. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Unsure the key is parsable and valid. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Cleanup and code documentation. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Lint. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Lint. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes frontend handler. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes old test. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix go1.15 local failing test. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> Co-authored-by: Karsten Jeschkies <k@jeschkies.xyz>
5 years ago
assert.Equal(t, reversedIter.Labels(), defaultLabels)
}
assert.Equal(t, false, reversedIter.Next())
assert.Equal(t, nil, reversedIter.Error())
assert.NoError(t, reversedIter.Close())
}
func TestReverseEntryIteratorUnlimited(t *testing.T) {
itr1 := mkStreamIterator(offset(testSize, identity), defaultLabels)
itr2 := mkStreamIterator(offset(testSize, identity), "{foobar: \"bazbar\"}")
heapIterator := NewHeapIterator(context.Background(), []EntryIterator{itr1, itr2}, logproto.BACKWARD)
reversedIter, err := NewReversedIter(heapIterator, 0, false)
require.NoError(t, err)
var ct int
expected := 2 * testSize
for reversedIter.Next() {
ct++
}
require.Equal(t, expected, ct)
}
func Test_PeekingIterator(t *testing.T) {
iter := NewPeekingIterator(NewStreamIterator(logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 1),
},
{
Timestamp: time.Unix(0, 2),
},
{
Timestamp: time.Unix(0, 3),
},
},
}))
_, peek, ok := iter.Peek()
if peek.Timestamp.UnixNano() != 1 {
t.Fatal("wrong peeked time.")
}
if !ok {
t.Fatal("should be ok.")
}
hasNext := iter.Next()
if !hasNext {
t.Fatal("should have next.")
}
if iter.Entry().Timestamp.UnixNano() != 1 {
t.Fatal("wrong peeked time.")
}
_, peek, ok = iter.Peek()
if peek.Timestamp.UnixNano() != 2 {
t.Fatal("wrong peeked time.")
}
if !ok {
t.Fatal("should be ok.")
}
hasNext = iter.Next()
if !hasNext {
t.Fatal("should have next.")
}
if iter.Entry().Timestamp.UnixNano() != 2 {
t.Fatal("wrong peeked time.")
}
_, peek, ok = iter.Peek()
if peek.Timestamp.UnixNano() != 3 {
t.Fatal("wrong peeked time.")
}
if !ok {
t.Fatal("should be ok.")
}
hasNext = iter.Next()
if !hasNext {
t.Fatal("should have next.")
}
if iter.Entry().Timestamp.UnixNano() != 3 {
t.Fatal("wrong peeked time.")
}
_, _, ok = iter.Peek()
if ok {
t.Fatal("should not be ok.")
}
}
func Test_DuplicateCount(t *testing.T) {
stream := logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 1),
Line: "foo",
},
{
Timestamp: time.Unix(0, 2),
Line: "foo",
},
{
Timestamp: time.Unix(0, 3),
Line: "foo",
},
},
}
for _, test := range []struct {
name string
iters []EntryIterator
direction logproto.Direction
expectedDuplicates int64
}{
{
"empty b",
[]EntryIterator{},
logproto.BACKWARD,
0,
},
{
"empty f",
[]EntryIterator{},
logproto.FORWARD,
0,
},
{
"replication 2 b",
[]EntryIterator{
NewStreamIterator(stream),
NewStreamIterator(stream),
},
logproto.BACKWARD,
3,
},
{
"replication 2 f",
[]EntryIterator{
NewStreamIterator(stream),
NewStreamIterator(stream),
},
logproto.FORWARD,
3,
},
{
"replication 3 f",
[]EntryIterator{
NewStreamIterator(stream),
NewStreamIterator(stream),
NewStreamIterator(stream),
NewStreamIterator(logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 4),
Line: "bar",
},
},
}),
},
logproto.FORWARD,
6,
},
{
"replication 3 b",
[]EntryIterator{
NewStreamIterator(stream),
NewStreamIterator(stream),
NewStreamIterator(stream),
NewStreamIterator(logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 4),
Line: "bar",
},
},
}),
},
logproto.BACKWARD,
6,
},
{
"single f",
[]EntryIterator{
NewStreamIterator(logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 4),
Line: "bar",
},
},
}),
},
logproto.FORWARD,
0,
},
{
"single b",
[]EntryIterator{
NewStreamIterator(logproto.Stream{
Entries: []logproto.Entry{
{
Timestamp: time.Unix(0, 4),
Line: "bar",
},
},
}),
},
logproto.BACKWARD,
0,
},
} {
t.Run(test.name, func(t *testing.T) {
_, ctx := stats.NewContext(context.Background())
it := NewHeapIterator(ctx, test.iters, test.direction)
defer it.Close()
for it.Next() {
}
require.Equal(t, test.expectedDuplicates, stats.FromContext(ctx).Result(0).TotalDuplicates())
})
}
}
func Test_timeRangedIterator_Next(t *testing.T) {
tests := []struct {
mint time.Time
maxt time.Time
expect []bool // array of expected values for next call in sequence
}{
{time.Unix(0, 0), time.Unix(0, 0), []bool{false}},
{time.Unix(0, 0), time.Unix(0, 1), []bool{false}},
{time.Unix(0, 1), time.Unix(0, 1), []bool{true, false}},
{time.Unix(0, 1), time.Unix(0, 2), []bool{true, false}},
{time.Unix(0, 1), time.Unix(0, 3), []bool{true, true, false}},
{time.Unix(0, 3), time.Unix(0, 3), []bool{true, false}},
{time.Unix(0, 4), time.Unix(0, 10), []bool{false}},
{time.Unix(0, 1), time.Unix(0, 10), []bool{true, true, true, false}},
{time.Unix(0, 0), time.Unix(0, 10), []bool{true, true, true, false}},
}
for _, tt := range tests {
t.Run(fmt.Sprintf("mint:%d maxt:%d", tt.mint.UnixNano(), tt.maxt.UnixNano()), func(t *testing.T) {
Improve metric queries by computing samples at the edges. (#2293) * First pass breaking the code appart. Wondering how we're going to achieve fast mutation of labels. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Work in progress. I realize I need hash for deduping lines. going to benchmark somes. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Tested some hash and decided which one to use. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Wip Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Starting working on ingester. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Trying to find a better hash function. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More hash testing we have a winner. xxhash it is. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Settle on xxhash Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Better params interfacing. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add interface for queryparams for things that exist in both type of params. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add storage sample iterator implementations. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing tests and verifying we don't get collions for the hashing method. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing ingesters tests and refactoring utility function/tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing and testing that stats are still well computed. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing more tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More engine tests finished. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes sharding evaluator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes more engine tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix error tests in the engine. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finish fixing all tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes a bug where extractor was not passed in correctly. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add notes about upgrade. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Renamed and fix a bug. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add memchunk tests and starting test for sampleIterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Test heap sample iterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * working on test. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finishing testing all new iterators. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Making sure all store functions are tested. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Benchmark and verify everything is working well. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Make the linter happy. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * use xxhash v2. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix a flaky test because of map. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * go.mod. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> Co-authored-by: Edward Welch <edward.welch@grafana.com>
6 years ago
it := NewTimeRangedIterator(
NewStreamIterator(
logproto.Stream{Entries: []logproto.Entry{
{Timestamp: time.Unix(0, 1)},
{Timestamp: time.Unix(0, 2)},
{Timestamp: time.Unix(0, 3)},
}}),
tt.mint,
tt.maxt,
)
for _, b := range tt.expect {
Improve metric queries by computing samples at the edges. (#2293) * First pass breaking the code appart. Wondering how we're going to achieve fast mutation of labels. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Work in progress. I realize I need hash for deduping lines. going to benchmark somes. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Tested some hash and decided which one to use. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Wip Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Starting working on ingester. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Trying to find a better hash function. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More hash testing we have a winner. xxhash it is. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Settle on xxhash Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Better params interfacing. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add interface for queryparams for things that exist in both type of params. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add storage sample iterator implementations. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing tests and verifying we don't get collions for the hashing method. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing ingesters tests and refactoring utility function/tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing and testing that stats are still well computed. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing more tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More engine tests finished. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes sharding evaluator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes more engine tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix error tests in the engine. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finish fixing all tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes a bug where extractor was not passed in correctly. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add notes about upgrade. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Renamed and fix a bug. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add memchunk tests and starting test for sampleIterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Test heap sample iterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * working on test. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finishing testing all new iterators. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Making sure all store functions are tested. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Benchmark and verify everything is working well. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Make the linter happy. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * use xxhash v2. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix a flaky test because of map. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * go.mod. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> Co-authored-by: Edward Welch <edward.welch@grafana.com>
6 years ago
require.Equal(t, b, it.Next())
}
Improve metric queries by computing samples at the edges. (#2293) * First pass breaking the code appart. Wondering how we're going to achieve fast mutation of labels. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Work in progress. I realize I need hash for deduping lines. going to benchmark somes. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Tested some hash and decided which one to use. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Wip Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Starting working on ingester. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Trying to find a better hash function. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More hash testing we have a winner. xxhash it is. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Settle on xxhash Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Better params interfacing. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add interface for queryparams for things that exist in both type of params. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add storage sample iterator implementations. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing tests and verifying we don't get collions for the hashing method. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing ingesters tests and refactoring utility function/tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing and testing that stats are still well computed. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixing more tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * More engine tests finished. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes sharding evaluator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes more engine tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix error tests in the engine. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finish fixing all tests. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fixes a bug where extractor was not passed in correctly. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add notes about upgrade. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Renamed and fix a bug. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Add memchunk tests and starting test for sampleIterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Test heap sample iterator. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * working on test. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Finishing testing all new iterators. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Making sure all store functions are tested. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Benchmark and verify everything is working well. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Make the linter happy. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * use xxhash v2. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * Fix a flaky test because of map. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> * go.mod. Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com> Co-authored-by: Edward Welch <edward.welch@grafana.com>
6 years ago
require.NoError(t, it.Close())
})
t.Run(fmt.Sprintf("mint:%d maxt:%d_sample", tt.mint.UnixNano(), tt.maxt.UnixNano()), func(t *testing.T) {
it := NewTimeRangedSampleIterator(
NewSeriesIterator(
logproto.Series{Samples: []logproto.Sample{
sample(1),
sample(2),
sample(3),
}}),
tt.mint.UnixNano(),
tt.maxt.UnixNano(),
)
for _, b := range tt.expect {
require.Equal(t, b, it.Next())
}
require.NoError(t, it.Close())
})
}
}
type CloseTestingIterator struct {
closed atomic.Bool
e logproto.Entry
}
func (i *CloseTestingIterator) Next() bool { return true }
func (i *CloseTestingIterator) Entry() logproto.Entry { return i.e }
func (i *CloseTestingIterator) Labels() string { return "" }
func (i *CloseTestingIterator) Error() error { return nil }
func (i *CloseTestingIterator) Close() error {
i.closed.Store(true)
return nil
}
func TestNonOverlappingClose(t *testing.T) {
a, b := &CloseTestingIterator{}, &CloseTestingIterator{}
itr := NewNonOverlappingIterator([]EntryIterator{a, b}, "")
// Ensure both itr.cur and itr.iterators are non nil
itr.Next()
require.NotNil(t, itr.(*nonOverlappingIterator).curr)
itr.Close()
require.Equal(t, true, a.closed.Load())
require.Equal(t, true, b.closed.Load())
}