@ -9,9 +9,9 @@ import (
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"github.com/grafana/loki/pkg/push"
"github.com/grafana/loki/v3/pkg/chunkenc"
iter "github.com/grafana/loki/v3/pkg/iter/v2"
"github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter"
)
// TODO(owen-d): this should probably be in it's own testing-util package
@ -46,94 +46,73 @@ func MakeBlock(t testing.TB, nth int, fromFp, throughFp model.Fingerprint, fromT
return block , data , keys
}
// This is a helper type used in tests that buffers blooms and can be turned into
// the commonly used iterator form *SeriesWithBlooms.
type SeriesWithLiteralBlooms struct {
Series * Series
Blooms [ ] * Bloom
}
func ( s * SeriesWithLiteralBlooms ) SeriesWithBlooms ( ) SeriesWithBlooms {
offsets := make ( [ ] BloomOffset , 0 , len ( s . Blooms ) )
for i := range s . Blooms {
func newSeriesWithBlooms ( series Series , blooms [ ] * Bloom ) SeriesWithBlooms {
offsets := make ( [ ] BloomOffset , 0 , len ( blooms ) )
for i := range blooms {
offsets = append ( offsets , BloomOffset { Page : i , ByteOffset : 0 } )
}
return SeriesWithBlooms {
Series : & SeriesWithMeta {
Series : * s . S eries,
Series : series ,
Meta : Meta {
Fields : NewSetFromLiteral [ Field ] ( "trace_id" ) ,
Offsets : offsets ,
} ,
} ,
Blooms : iter . NewSliceIter ( s . B looms) ,
Blooms : iter . NewSliceIter ( b looms) ,
}
}
func MkBasicSeriesWithBlooms ( nSeries int , fromFp , throughFp model . Fingerprint , fromTs , throughTs model . Time ) ( seriesList [ ] SeriesWithBlooms , keysList [ ] [ ] [ ] byte ) {
series , keys := MkBasicSeriesWithLiteralBlooms ( nSeries , fromFp , throughFp , fromTs , throughTs )
mapped := make ( [ ] SeriesWithBlooms , 0 , len ( series ) )
for _ , s := range series {
v := s . SeriesWithBlooms ( )
mapped = append ( mapped , v )
}
return mapped , keys
}
func MkBasicSeriesWithBlooms ( nSeries int , fromFp , throughFp model . Fingerprint , fromTs , throughTs model . Time ) ( [ ] SeriesWithBlooms , [ ] [ ] [ ] byte ) {
// return values
seriesList := make ( [ ] SeriesWithBlooms , 0 , nSeries )
keysList := make ( [ ] [ ] [ ] byte , 0 , nSeries )
func MkBasicSeriesWithLiteralBlooms ( nSeries int , fromFp , throughFp model . Fingerprint , fromTs , throughTs model . Time ) ( seriesList [ ] SeriesWithLiteralBlooms , keysList [ ] [ ] [ ] byte ) {
const nGramLen = 4
seriesList = make ( [ ] SeriesWithLiteralBlooms , 0 , nSeries )
keysList = make ( [ ] [ ] [ ] byte , 0 , nSeries )
numChunksPerSeries := 10
numBloomsPerSeries := 2
step := ( throughFp - fromFp ) / model . Fingerprint ( nSeries )
timeDelta := time . Duration ( throughTs . Sub ( fromTs ) . Nanoseconds ( ) / int64 ( nSeries ) )
timeDelta := time . Duration ( throughTs . Sub ( fromTs ) . Nanoseconds ( ) / int64 ( numChunksPerSeries ) )
tokenizer := NewNGramTokenizer ( nGramLen , 0 )
for i := 0 ; i < nSeries ; i ++ {
var series Series
var blooms [ ] * Bloom
series . Fingerprint = fromFp + model . Fingerprint ( i ) * step
from := fromTs . Add ( timeDelta * time . Duration ( i ) )
series . Chunks = [ ] ChunkRef {
{
From : from ,
Through : from . Add ( timeDelta ) ,
Checksum : uint32 ( i ) ,
} ,
for from := fromTs ; from < throughTs ; from = from . Add ( timeDelta ) {
series . Chunks = append ( series . Chunks ,
ChunkRef {
From : from ,
Through : from . Add ( timeDelta ) ,
} ,
)
}
var bloom Bloom
bloom . ScalableBloomFilter = * filter . NewScalableBloomFilter ( 1024 , 0.01 , 0.8 )
keys := make ( [ ] [ ] byte , 0 , int ( step ) )
for _ , chk := range series . Chunks {
tokenBuf , prefixLen := prefixedToken ( nGramLen , chk , nil )
for j := 0 ; j < int ( step ) ; j ++ {
line := fmt . Sprintf ( "%04x:%04x" , int ( series . Fingerprint ) , j )
it := tokenizer . Tokens ( line )
chunkBatchSize := ( series . Chunks . Len ( ) + numBloomsPerSeries - 1 ) / numBloomsPerSeries
for j := 0 ; j < numBloomsPerSeries ; j ++ {
bloom := NewBloom ( )
batchStart , batchEnd := j * chunkBatchSize , min ( series . Chunks . Len ( ) , ( j + 1 ) * chunkBatchSize )
for x , chk := range series . Chunks [ batchStart : batchEnd ] {
tokenizer := NewStructuredMetadataTokenizer ( string ( prefixForChunkRef ( chk ) ) )
kv := push . LabelAdapter { Name : "trace_id" , Value : fmt . Sprintf ( "%s:%04x" , series . Fingerprint , j * chunkBatchSize + x ) }
it := tokenizer . Tokens ( kv )
for it . Next ( ) {
key := it . At ( )
// series-level key
key := [ ] byte ( it . At ( ) )
bloom . Add ( key )
// chunk-level key
tokenBuf = append ( tokenBuf [ : prefixLen ] , key ... )
bloom . Add ( tokenBuf )
keyCopy := key
keys = append ( keys , keyCopy )
keys = append ( keys , key )
}
}
blooms = append ( blooms , bloom )
}
seriesList = append ( seriesList , SeriesWithLiteralBlooms {
Series : & series ,
Blooms : [ ] * Bloom { & bloom } ,
} )
seriesList = append ( seriesList , newSeriesWithBlooms ( series , blooms ) )
keysList = append ( keysList , keys )
}
return
return seriesList , keysList
}
func EqualIterators [ T any ] ( t * testing . T , test func ( a , b T ) , expected , actual iter . Iterator [ T ] ) {