mirror of https://github.com/grafana/loki
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
213 lines
5.4 KiB
213 lines
5.4 KiB
|
6 years ago
|
package stats
|
||
|
|
|
||
|
|
import (
|
||
|
|
"context"
|
||
|
|
"testing"
|
||
|
|
"time"
|
||
|
|
|
||
|
5 years ago
|
util_log "github.com/cortexproject/cortex/pkg/util/log"
|
||
|
6 years ago
|
jsoniter "github.com/json-iterator/go"
|
||
|
|
"github.com/stretchr/testify/require"
|
||
|
|
)
|
||
|
|
|
||
|
|
func TestSnapshot(t *testing.T) {
|
||
|
|
ctx := NewContext(context.Background())
|
||
|
|
|
||
|
6 years ago
|
GetChunkData(ctx).HeadChunkBytes += 10
|
||
|
|
GetChunkData(ctx).HeadChunkLines += 20
|
||
|
|
GetChunkData(ctx).DecompressedBytes += 40
|
||
|
|
GetChunkData(ctx).DecompressedLines += 20
|
||
|
|
GetChunkData(ctx).CompressedBytes += 30
|
||
|
6 years ago
|
GetChunkData(ctx).TotalDuplicates += 10
|
||
|
|
|
||
|
|
GetStoreData(ctx).TotalChunksRef += 50
|
||
|
6 years ago
|
GetStoreData(ctx).TotalChunksDownloaded += 60
|
||
|
|
GetStoreData(ctx).ChunksDownloadTime += time.Second
|
||
|
6 years ago
|
|
||
|
|
fakeIngesterQuery(ctx)
|
||
|
|
fakeIngesterQuery(ctx)
|
||
|
|
|
||
|
|
res := Snapshot(ctx, 2*time.Second)
|
||
|
5 years ago
|
res.Log(util_log.Logger)
|
||
|
6 years ago
|
expected := Result{
|
||
|
|
Ingester: Ingester{
|
||
|
6 years ago
|
TotalChunksMatched: 200,
|
||
|
|
TotalBatches: 50,
|
||
|
|
TotalLinesSent: 60,
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 24,
|
||
|
|
DecompressedLines: 40,
|
||
|
|
CompressedBytes: 60,
|
||
|
|
TotalDuplicates: 2,
|
||
|
|
TotalReached: 2,
|
||
|
6 years ago
|
},
|
||
|
|
Store: Store{
|
||
|
6 years ago
|
TotalChunksRef: 50,
|
||
|
|
TotalChunksDownloaded: 60,
|
||
|
|
ChunksDownloadTime: time.Second.Seconds(),
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 40,
|
||
|
|
DecompressedLines: 20,
|
||
|
|
CompressedBytes: 30,
|
||
|
|
TotalDuplicates: 10,
|
||
|
6 years ago
|
},
|
||
|
|
Summary: Summary{
|
||
|
6 years ago
|
ExecTime: 2 * time.Second.Seconds(),
|
||
|
|
BytesProcessedPerSecond: int64(42),
|
||
|
|
LinesProcessedPerSecond: int64(50),
|
||
|
|
TotalBytesProcessed: int64(84),
|
||
|
|
TotalLinesProcessed: int64(100),
|
||
|
6 years ago
|
},
|
||
|
|
}
|
||
|
|
require.Equal(t, expected, res)
|
||
|
|
}
|
||
|
|
|
||
|
6 years ago
|
func TestSnapshot_MergesResults(t *testing.T) {
|
||
|
|
ctx := NewContext(context.Background())
|
||
|
|
expected := Result{
|
||
|
|
Ingester: Ingester{
|
||
|
|
TotalChunksMatched: 200,
|
||
|
|
TotalBatches: 50,
|
||
|
|
TotalLinesSent: 60,
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 24,
|
||
|
|
DecompressedLines: 40,
|
||
|
|
CompressedBytes: 60,
|
||
|
|
TotalDuplicates: 2,
|
||
|
|
TotalReached: 2,
|
||
|
|
},
|
||
|
|
Store: Store{
|
||
|
|
TotalChunksRef: 50,
|
||
|
|
TotalChunksDownloaded: 60,
|
||
|
|
ChunksDownloadTime: time.Second.Seconds(),
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 40,
|
||
|
|
DecompressedLines: 20,
|
||
|
|
CompressedBytes: 30,
|
||
|
|
TotalDuplicates: 10,
|
||
|
|
},
|
||
|
|
Summary: Summary{
|
||
|
|
ExecTime: 2 * time.Second.Seconds(),
|
||
|
|
BytesProcessedPerSecond: int64(42),
|
||
|
|
LinesProcessedPerSecond: int64(50),
|
||
|
|
TotalBytesProcessed: int64(84),
|
||
|
|
TotalLinesProcessed: int64(100),
|
||
|
|
},
|
||
|
|
}
|
||
|
|
|
||
|
|
err := JoinResults(ctx, expected)
|
||
|
|
require.Nil(t, err)
|
||
|
|
res := Snapshot(ctx, 2*time.Second)
|
||
|
|
require.Equal(t, expected, res)
|
||
|
|
}
|
||
|
|
|
||
|
|
func TestGetResult_ErrsNonexistant(t *testing.T) {
|
||
|
|
out, err := GetResult(context.Background())
|
||
|
|
require.NotNil(t, err)
|
||
|
|
require.Nil(t, out)
|
||
|
|
}
|
||
|
|
|
||
|
6 years ago
|
func fakeIngesterQuery(ctx context.Context) {
|
||
|
|
d, _ := ctx.Value(trailersKey).(*trailerCollector)
|
||
|
|
meta := d.addTrailer()
|
||
|
|
|
||
|
|
c, _ := jsoniter.MarshalToString(ChunkData{
|
||
|
6 years ago
|
HeadChunkBytes: 5,
|
||
|
|
HeadChunkLines: 10,
|
||
|
|
DecompressedBytes: 12,
|
||
|
|
DecompressedLines: 20,
|
||
|
|
CompressedBytes: 30,
|
||
|
6 years ago
|
TotalDuplicates: 1,
|
||
|
|
})
|
||
|
|
meta.Set(chunkDataKey, c)
|
||
|
|
i, _ := jsoniter.MarshalToString(IngesterData{
|
||
|
|
TotalChunksMatched: 100,
|
||
|
|
TotalBatches: 25,
|
||
|
|
TotalLinesSent: 30,
|
||
|
|
})
|
||
|
|
meta.Set(ingesterDataKey, i)
|
||
|
|
}
|
||
|
6 years ago
|
|
||
|
|
func TestResult_Merge(t *testing.T) {
|
||
|
|
var res Result
|
||
|
|
|
||
|
6 years ago
|
res.Merge(res) // testing zero.
|
||
|
|
require.Equal(t, res, res)
|
||
|
|
|
||
|
6 years ago
|
toMerge := Result{
|
||
|
|
Ingester: Ingester{
|
||
|
|
TotalChunksMatched: 200,
|
||
|
|
TotalBatches: 50,
|
||
|
|
TotalLinesSent: 60,
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 24,
|
||
|
|
DecompressedLines: 40,
|
||
|
|
CompressedBytes: 60,
|
||
|
|
TotalDuplicates: 2,
|
||
|
|
TotalReached: 2,
|
||
|
|
},
|
||
|
|
Store: Store{
|
||
|
|
TotalChunksRef: 50,
|
||
|
|
TotalChunksDownloaded: 60,
|
||
|
|
ChunksDownloadTime: time.Second.Seconds(),
|
||
|
|
HeadChunkBytes: 10,
|
||
|
|
HeadChunkLines: 20,
|
||
|
|
DecompressedBytes: 40,
|
||
|
|
DecompressedLines: 20,
|
||
|
|
CompressedBytes: 30,
|
||
|
|
TotalDuplicates: 10,
|
||
|
|
},
|
||
|
|
Summary: Summary{
|
||
|
6 years ago
|
ExecTime: 2 * time.Second.Seconds(),
|
||
|
|
BytesProcessedPerSecond: int64(42),
|
||
|
|
LinesProcessedPerSecond: int64(50),
|
||
|
|
TotalBytesProcessed: int64(84),
|
||
|
|
TotalLinesProcessed: int64(100),
|
||
|
6 years ago
|
},
|
||
|
|
}
|
||
|
|
|
||
|
|
res.Merge(toMerge)
|
||
|
|
require.Equal(t, toMerge, res)
|
||
|
6 years ago
|
|
||
|
|
// merge again
|
||
|
|
res.Merge(toMerge)
|
||
|
|
require.Equal(t, Result{
|
||
|
|
Ingester: Ingester{
|
||
|
|
TotalChunksMatched: 2 * 200,
|
||
|
|
TotalBatches: 2 * 50,
|
||
|
|
TotalLinesSent: 2 * 60,
|
||
|
|
HeadChunkBytes: 2 * 10,
|
||
|
|
HeadChunkLines: 2 * 20,
|
||
|
|
DecompressedBytes: 2 * 24,
|
||
|
|
DecompressedLines: 2 * 40,
|
||
|
|
CompressedBytes: 2 * 60,
|
||
|
|
TotalDuplicates: 2 * 2,
|
||
|
|
TotalReached: 2 * 2,
|
||
|
|
},
|
||
|
|
Store: Store{
|
||
|
|
TotalChunksRef: 2 * 50,
|
||
|
|
TotalChunksDownloaded: 2 * 60,
|
||
|
|
ChunksDownloadTime: 2 * time.Second.Seconds(),
|
||
|
|
HeadChunkBytes: 2 * 10,
|
||
|
|
HeadChunkLines: 2 * 20,
|
||
|
|
DecompressedBytes: 2 * 40,
|
||
|
|
DecompressedLines: 2 * 20,
|
||
|
|
CompressedBytes: 2 * 30,
|
||
|
|
TotalDuplicates: 2 * 10,
|
||
|
|
},
|
||
|
|
Summary: Summary{
|
||
|
6 years ago
|
ExecTime: 2 * 2 * time.Second.Seconds(),
|
||
|
|
BytesProcessedPerSecond: int64(42), // 2 requests at the same pace should give the same bytes/lines per sec
|
||
|
|
LinesProcessedPerSecond: int64(50),
|
||
|
|
TotalBytesProcessed: 2 * int64(84),
|
||
|
|
TotalLinesProcessed: 2 * int64(100),
|
||
|
6 years ago
|
},
|
||
|
|
}, res)
|
||
|
|
|
||
|
6 years ago
|
}
|