|
|
|
|
@ -740,6 +740,89 @@ func TestHead_ReadWAL(t *testing.T) { |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestHead_ReadWAL2(t *testing.T) { |
|
|
|
|
for _, compress := range []wlog.CompressionType{wlog.CompressionNone, wlog.CompressionSnappy, wlog.CompressionZstd} { |
|
|
|
|
t.Run(fmt.Sprintf("compress=%s", compress), func(t *testing.T) { |
|
|
|
|
entries := []interface{}{ |
|
|
|
|
[]record.RefSeries{ |
|
|
|
|
{Ref: 10, Labels: labels.FromStrings("a", "1")}, |
|
|
|
|
{Ref: 11, Labels: labels.FromStrings("a", "2")}, |
|
|
|
|
{Ref: 100, Labels: labels.FromStrings("a", "3")}, |
|
|
|
|
}, |
|
|
|
|
[]record.RefHistogramSample{ |
|
|
|
|
{Ref: 0, T: 99, H: tsdbutil.GenerateTestHistogram(1)}, |
|
|
|
|
{Ref: 10, T: 100, H: tsdbutil.GenerateTestCustomBucketsHistogram(2)}, |
|
|
|
|
{Ref: 100, T: 100, H: tsdbutil.GenerateTestHistogram(3)}, |
|
|
|
|
}, |
|
|
|
|
[]record.RefSeries{ |
|
|
|
|
{Ref: 50, Labels: labels.FromStrings("a", "4")}, |
|
|
|
|
// This series has two refs pointing to it.
|
|
|
|
|
{Ref: 101, Labels: labels.FromStrings("a", "3")}, |
|
|
|
|
}, |
|
|
|
|
[]record.RefHistogramSample{ |
|
|
|
|
{Ref: 10, T: 101, H: tsdbutil.GenerateTestHistogram(5)}, |
|
|
|
|
{Ref: 50, T: 101, H: tsdbutil.GenerateTestHistogram(6)}, |
|
|
|
|
{Ref: 101, T: 101, H: tsdbutil.GenerateTestCustomBucketsHistogram(7)}, |
|
|
|
|
}, |
|
|
|
|
[]tombstones.Stone{ |
|
|
|
|
{Ref: 0, Intervals: []tombstones.Interval{{Mint: 99, Maxt: 101}}}, |
|
|
|
|
}, |
|
|
|
|
[]record.RefExemplar{ |
|
|
|
|
{Ref: 10, T: 100, V: 1, Labels: labels.FromStrings("trace_id", "asdf")}, |
|
|
|
|
}, |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
head, w := newTestHead(t, 1000, compress, false) |
|
|
|
|
defer func() { |
|
|
|
|
require.NoError(t, head.Close()) |
|
|
|
|
}() |
|
|
|
|
|
|
|
|
|
populateTestWL(t, w, entries) |
|
|
|
|
|
|
|
|
|
require.NoError(t, head.Init(math.MinInt64)) |
|
|
|
|
require.Equal(t, uint64(101), head.lastSeriesID.Load()) |
|
|
|
|
|
|
|
|
|
s10 := head.series.getByID(10) |
|
|
|
|
s11 := head.series.getByID(11) |
|
|
|
|
s50 := head.series.getByID(50) |
|
|
|
|
s100 := head.series.getByID(100) |
|
|
|
|
|
|
|
|
|
testutil.RequireEqual(t, labels.FromStrings("a", "1"), s10.lset) |
|
|
|
|
require.Nil(t, s11) // Series without samples should be garbage collected at head.Init().
|
|
|
|
|
testutil.RequireEqual(t, labels.FromStrings("a", "4"), s50.lset) |
|
|
|
|
testutil.RequireEqual(t, labels.FromStrings("a", "3"), s100.lset) |
|
|
|
|
|
|
|
|
|
expandChunk := func(c chunkenc.Iterator) (x []sample) { |
|
|
|
|
for c.Next() == chunkenc.ValHistogram { |
|
|
|
|
t, v := c.AtHistogram(nil) |
|
|
|
|
//t, v := c.At()
|
|
|
|
|
x = append(x, sample{t: t, h: v}) |
|
|
|
|
} |
|
|
|
|
require.NoError(t, c.Err()) |
|
|
|
|
return x |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
c, _, _, err := s10.chunk(0, head.chunkDiskMapper, &head.memChunkPool) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.Equal(t, []sample{{100, 0, tsdbutil.GenerateTestCustomBucketsHistogram(2), nil}, {101, 0, tsdbutil.GenerateTestCustomBucketsHistogram(5), nil}}, expandChunk(c.chunk.Iterator(nil))) |
|
|
|
|
c, _, _, err = s50.chunk(0, head.chunkDiskMapper, &head.memChunkPool) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.Equal(t, []sample{{101, 0, tsdbutil.GenerateTestHistogram(6), nil}}, expandChunk(c.chunk.Iterator(nil))) |
|
|
|
|
// The samples before the new series record should be discarded since a duplicate record
|
|
|
|
|
// is only possible when old samples were compacted.
|
|
|
|
|
c, _, _, err = s100.chunk(0, head.chunkDiskMapper, &head.memChunkPool) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.Equal(t, []sample{{101, 0, tsdbutil.GenerateTestCustomBucketsHistogram(7), nil}}, expandChunk(c.chunk.Iterator(nil))) |
|
|
|
|
|
|
|
|
|
q, err := head.ExemplarQuerier(context.Background()) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
e, err := q.Select(0, 1000, []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "1")}) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.True(t, exemplar.Exemplar{Ts: 100, Value: 1, Labels: labels.FromStrings("trace_id", "asdf")}.Equals(e[0].Exemplars[0])) |
|
|
|
|
}) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestHead_WALMultiRef(t *testing.T) { |
|
|
|
|
head, w := newTestHead(t, 1000, wlog.CompressionNone, false) |
|
|
|
|
|
|
|
|
|
@ -3953,6 +4036,194 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) { |
|
|
|
|
testQuery() |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestHistogramInWALAndMmapChunk2(t *testing.T) { |
|
|
|
|
head, _ := newTestHead(t, 3000, wlog.CompressionNone, false) |
|
|
|
|
t.Cleanup(func() { |
|
|
|
|
require.NoError(t, head.Close()) |
|
|
|
|
}) |
|
|
|
|
require.NoError(t, head.Init(0)) |
|
|
|
|
|
|
|
|
|
// Series with only histograms.
|
|
|
|
|
s1 := labels.FromStrings("a", "b1") |
|
|
|
|
k1 := s1.String() |
|
|
|
|
numHistograms := 300 |
|
|
|
|
exp := map[string][]chunks.Sample{} |
|
|
|
|
ts := int64(0) |
|
|
|
|
var app storage.Appender |
|
|
|
|
for _, custom := range []bool{true, false} { |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
var hists []*histogram.Histogram |
|
|
|
|
if custom { |
|
|
|
|
hists = tsdbutil.GenerateTestCustomBucketsHistograms(numHistograms) |
|
|
|
|
} else { |
|
|
|
|
hists = tsdbutil.GenerateTestHistograms(numHistograms) |
|
|
|
|
} |
|
|
|
|
for _, h := range hists { |
|
|
|
|
if !custom { |
|
|
|
|
h.NegativeSpans = h.PositiveSpans |
|
|
|
|
h.NegativeBuckets = h.PositiveBuckets |
|
|
|
|
} |
|
|
|
|
_, err := app.AppendHistogram(0, s1, ts, h, nil) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
exp[k1] = append(exp[k1], sample{t: ts, h: h.Copy()}) |
|
|
|
|
ts++ |
|
|
|
|
if ts%5 == 0 { |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
} |
|
|
|
|
for _, custom := range []bool{true, false} { |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
var hists []*histogram.FloatHistogram |
|
|
|
|
if custom { |
|
|
|
|
hists = tsdbutil.GenerateTestCustomBucketsFloatHistograms(numHistograms) |
|
|
|
|
} else { |
|
|
|
|
hists = tsdbutil.GenerateTestFloatHistograms(numHistograms) |
|
|
|
|
} |
|
|
|
|
for _, h := range hists { |
|
|
|
|
if !custom { |
|
|
|
|
h.NegativeSpans = h.PositiveSpans |
|
|
|
|
h.NegativeBuckets = h.PositiveBuckets |
|
|
|
|
} |
|
|
|
|
_, err := app.AppendHistogram(0, s1, ts, nil, h) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
exp[k1] = append(exp[k1], sample{t: ts, fh: h.Copy()}) |
|
|
|
|
ts++ |
|
|
|
|
if ts%5 == 0 { |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
head.mmapHeadChunks() |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// There should be 20 mmap chunks in s1.
|
|
|
|
|
ms := head.series.getByHash(s1.Hash(), s1) |
|
|
|
|
require.Len(t, ms.mmappedChunks, 19) |
|
|
|
|
expMmapChunks := make([]*mmappedChunk, 0, 20) |
|
|
|
|
for _, mmap := range ms.mmappedChunks { |
|
|
|
|
require.Positive(t, mmap.numSamples) |
|
|
|
|
cpy := *mmap |
|
|
|
|
expMmapChunks = append(expMmapChunks, &cpy) |
|
|
|
|
} |
|
|
|
|
expHeadChunkSamples := ms.headChunks.chunk.NumSamples() |
|
|
|
|
require.Positive(t, expHeadChunkSamples) |
|
|
|
|
|
|
|
|
|
// Series with mix of histograms and float.
|
|
|
|
|
s2 := labels.FromStrings("a", "b2") |
|
|
|
|
k2 := s2.String() |
|
|
|
|
ts = 0 |
|
|
|
|
for _, custom := range []bool{true, false} { |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
var hists []*histogram.Histogram |
|
|
|
|
if custom { |
|
|
|
|
hists = tsdbutil.GenerateTestCustomBucketsHistograms(100) |
|
|
|
|
} else { |
|
|
|
|
hists = tsdbutil.GenerateTestHistograms(100) |
|
|
|
|
} |
|
|
|
|
for _, h := range hists { |
|
|
|
|
ts++ |
|
|
|
|
if !custom { |
|
|
|
|
h.NegativeSpans = h.PositiveSpans |
|
|
|
|
h.NegativeBuckets = h.PositiveBuckets |
|
|
|
|
} |
|
|
|
|
_, err := app.AppendHistogram(0, s2, ts, h, nil) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
eh := h.Copy() |
|
|
|
|
if ts > 30 && (ts-10)%20 == 1 { |
|
|
|
|
// Need "unknown" hint after float sample.
|
|
|
|
|
eh.CounterResetHint = histogram.UnknownCounterReset |
|
|
|
|
} |
|
|
|
|
exp[k2] = append(exp[k2], sample{t: ts, h: eh}) |
|
|
|
|
if ts%20 == 0 { |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
// Add some float.
|
|
|
|
|
for i := 0; i < 10; i++ { |
|
|
|
|
ts++ |
|
|
|
|
_, err := app.Append(0, s2, ts, float64(ts)) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
exp[k2] = append(exp[k2], sample{t: ts, f: float64(ts)}) |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
} |
|
|
|
|
for _, custom := range []bool{true, false} { |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
var hists []*histogram.FloatHistogram |
|
|
|
|
if custom { |
|
|
|
|
hists = tsdbutil.GenerateTestCustomBucketsFloatHistograms(100) |
|
|
|
|
} else { |
|
|
|
|
hists = tsdbutil.GenerateTestFloatHistograms(100) |
|
|
|
|
} |
|
|
|
|
for _, h := range hists { |
|
|
|
|
ts++ |
|
|
|
|
if !custom { |
|
|
|
|
h.NegativeSpans = h.PositiveSpans |
|
|
|
|
h.NegativeBuckets = h.PositiveBuckets |
|
|
|
|
} |
|
|
|
|
_, err := app.AppendHistogram(0, s2, ts, nil, h) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
eh := h.Copy() |
|
|
|
|
if ts > 30 && (ts-10)%20 == 1 { |
|
|
|
|
// Need "unknown" hint after float sample.
|
|
|
|
|
eh.CounterResetHint = histogram.UnknownCounterReset |
|
|
|
|
} |
|
|
|
|
exp[k2] = append(exp[k2], sample{t: ts, fh: eh}) |
|
|
|
|
if ts%20 == 0 { |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
// Add some float.
|
|
|
|
|
for i := 0; i < 10; i++ { |
|
|
|
|
ts++ |
|
|
|
|
_, err := app.Append(0, s2, ts, float64(ts)) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
exp[k2] = append(exp[k2], sample{t: ts, f: float64(ts)}) |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
app = head.Appender(context.Background()) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Restart head.
|
|
|
|
|
require.NoError(t, head.Close()) |
|
|
|
|
startHead := func() { |
|
|
|
|
w, err := wlog.NewSize(nil, nil, head.wal.Dir(), 32768, wlog.CompressionNone) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
head, err = NewHead(nil, nil, w, nil, head.opts, nil) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.NoError(t, head.Init(0)) |
|
|
|
|
} |
|
|
|
|
startHead() |
|
|
|
|
|
|
|
|
|
// Checking contents of s1.
|
|
|
|
|
ms = head.series.getByHash(s1.Hash(), s1) |
|
|
|
|
require.Equal(t, expMmapChunks, ms.mmappedChunks) |
|
|
|
|
require.Equal(t, expHeadChunkSamples, ms.headChunks.chunk.NumSamples()) |
|
|
|
|
|
|
|
|
|
testQuery := func() { |
|
|
|
|
q, err := NewBlockQuerier(head, head.MinTime(), head.MaxTime()) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
act := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "a", "b.*")) |
|
|
|
|
compareSeries(t, exp, act) |
|
|
|
|
} |
|
|
|
|
testQuery() |
|
|
|
|
|
|
|
|
|
// Restart with no mmap chunks to test WAL replay.
|
|
|
|
|
require.NoError(t, head.Close()) |
|
|
|
|
require.NoError(t, os.RemoveAll(mmappedChunksDir(head.opts.ChunkDirRoot))) |
|
|
|
|
startHead() |
|
|
|
|
testQuery() |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestChunkSnapshot(t *testing.T) { |
|
|
|
|
head, _ := newTestHead(t, 120*4, wlog.CompressionNone, false) |
|
|
|
|
defer func() { |
|
|
|
|
@ -5089,6 +5360,48 @@ func TestChunkSnapshotTakenAfterIncompleteSnapshot(t *testing.T) { |
|
|
|
|
require.Positive(t, offset) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestHistogramWALANDWBLReplay(t *testing.T) { |
|
|
|
|
dir := t.TempDir() |
|
|
|
|
wal, err := wlog.NewSize(nil, nil, filepath.Join(dir, "wal"), 32768, wlog.CompressionSnappy) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
oooWlog, err := wlog.NewSize(nil, nil, filepath.Join(dir, wlog.WblDirName), 32768, wlog.CompressionSnappy) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
|
|
|
|
|
opts := DefaultHeadOptions() |
|
|
|
|
opts.ChunkRange = 1000 |
|
|
|
|
opts.ChunkDirRoot = dir |
|
|
|
|
opts.OutOfOrderTimeWindow.Store(30 * time.Minute.Milliseconds()) |
|
|
|
|
opts.EnableNativeHistograms.Store(true) |
|
|
|
|
opts.EnableOOONativeHistograms.Store(true) |
|
|
|
|
|
|
|
|
|
h, err := NewHead(nil, nil, wal, oooWlog, opts, nil) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.NoError(t, h.Init(0)) |
|
|
|
|
|
|
|
|
|
var expOOOSamples []chunks.Sample |
|
|
|
|
l := labels.FromStrings("foo", "bar") |
|
|
|
|
appendSample := func(mins int64, val float64, isOOO bool, isCustomBucketHistogram bool) { |
|
|
|
|
app := h.Appender(context.Background()) |
|
|
|
|
var s sample |
|
|
|
|
if isCustomBucketHistogram { |
|
|
|
|
s = sample{t: mins * time.Minute.Milliseconds(), h: tsdbutil.GenerateTestCustomBucketsHistogram(int(val))} |
|
|
|
|
} else { |
|
|
|
|
s = sample{t: mins * time.Minute.Milliseconds(), h: tsdbutil.GenerateTestHistogram(int(val))} |
|
|
|
|
} |
|
|
|
|
_, err := app.AppendHistogram(0, l, mins*time.Minute.Milliseconds(), s.h, nil) |
|
|
|
|
require.NoError(t, err) |
|
|
|
|
require.NoError(t, app.Commit()) |
|
|
|
|
|
|
|
|
|
if isOOO { |
|
|
|
|
expOOOSamples = append(expOOOSamples, s) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// In-order histogram samples.
|
|
|
|
|
appendSample(60, 60, false, false) |
|
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// TestWBLReplay checks the replay at a low level.
|
|
|
|
|
func TestWBLReplay(t *testing.T) { |
|
|
|
|
for name, scenario := range sampleTypeScenarios { |
|
|
|
|
|