@ -24,7 +24,6 @@ import (
"github.com/grafana/loki/pkg/storage/config"
storageerrors "github.com/grafana/loki/pkg/storage/errors"
"github.com/grafana/loki/pkg/storage/stores"
"github.com/grafana/loki/pkg/storage/stores/index"
"github.com/grafana/loki/pkg/storage/stores/index/stats"
series_index "github.com/grafana/loki/pkg/storage/stores/series/index"
"github.com/grafana/loki/pkg/util"
@ -63,7 +62,8 @@ var (
} )
)
type indexReaderWriter struct {
// IndexReaderWriter implements pkg/storage/stores/index.ReaderWriter
type IndexReaderWriter struct {
schema series_index . SeriesStoreSchema
index series_index . Client
schemaCfg config . SchemaConfig
@ -74,8 +74,8 @@ type indexReaderWriter struct {
}
func NewIndexReaderWriter ( schemaCfg config . SchemaConfig , schema series_index . SeriesStoreSchema , index series_index . Client ,
fetcher * fetcher . Fetcher , chunkBatchSize int , writeDedupeCache cache . Cache ) index . ReaderWriter {
return & i ndexReaderWriter{
fetcher * fetcher . Fetcher , chunkBatchSize int , writeDedupeCache cache . Cache ) * Index ReaderWriter {
return & I ndexReaderWriter{
schema : schema ,
index : index ,
schemaCfg : schemaCfg ,
@ -85,7 +85,7 @@ func NewIndexReaderWriter(schemaCfg config.SchemaConfig, schema series_index.Ser
}
}
func ( c * i ndexReaderWriter) IndexChunk ( ctx context . Context , from , through model . Time , chk chunk . Chunk ) error {
func ( c * I ndexReaderWriter) IndexChunk ( ctx context . Context , from , through model . Time , chk chunk . Chunk ) error {
writeReqs , keysToCache , err := c . calculateIndexEntries ( ctx , from , through , chk )
if err != nil {
return err
@ -104,7 +104,7 @@ func (c *indexReaderWriter) IndexChunk(ctx context.Context, from, through model.
}
// calculateIndexEntries creates a set of batched WriteRequests for all the chunks it is given.
func ( c * i ndexReaderWriter) calculateIndexEntries ( ctx context . Context , from , through model . Time , chunk chunk . Chunk ) ( series_index . WriteBatch , [ ] string , error ) {
func ( c * I ndexReaderWriter) calculateIndexEntries ( ctx context . Context , from , through model . Time , chunk chunk . Chunk ) ( series_index . WriteBatch , [ ] string , error ) {
seenIndexEntries := map [ string ] struct { } { }
entries := [ ] series_index . Entry { }
@ -149,7 +149,7 @@ func (c *indexReaderWriter) calculateIndexEntries(ctx context.Context, from, thr
return result , missing , nil
}
func ( c * i ndexReaderWriter) GetChunkRefs ( ctx context . Context , userID string , from , through model . Time , allMatchers ... * labels . Matcher ) ( [ ] logproto . ChunkRef , error ) {
func ( c * I ndexReaderWriter) GetChunkRefs ( ctx context . Context , userID string , from , through model . Time , allMatchers ... * labels . Matcher ) ( [ ] logproto . ChunkRef , error ) {
log := util_log . WithContext ( ctx , util_log . Logger )
// Check there is a metric name matcher of type equal,
metricNameMatcher , matchers , ok := extract . MetricNameMatcherFromMatchers ( allMatchers )
@ -192,7 +192,7 @@ func (c *indexReaderWriter) GetChunkRefs(ctx context.Context, userID string, fro
return chunks , nil
}
func ( c * i ndexReaderWriter) SetChunkFilterer ( f chunk . RequestChunkFilterer ) {
func ( c * I ndexReaderWriter) SetChunkFilterer ( f chunk . RequestChunkFilterer ) {
c . chunkFilterer = f
}
@ -209,7 +209,7 @@ func (c chunkGroup) Less(i, j int) bool {
return c . schema . ExternalKey ( c . chunks [ i ] . ChunkRef ) < c . schema . ExternalKey ( c . chunks [ j ] . ChunkRef )
}
func ( c * i ndexReaderWriter) GetSeries ( ctx context . Context , userID string , from , through model . Time , matchers ... * labels . Matcher ) ( [ ] labels . Labels , error ) {
func ( c * I ndexReaderWriter) GetSeries ( ctx context . Context , userID string , from , through model . Time , matchers ... * labels . Matcher ) ( [ ] labels . Labels , error ) {
chks , err := c . GetChunkRefs ( ctx , userID , from , through , matchers ... )
if err != nil {
return nil , err
@ -218,7 +218,7 @@ func (c *indexReaderWriter) GetSeries(ctx context.Context, userID string, from,
return c . chunksToSeries ( ctx , chks , matchers )
}
func ( c * i ndexReaderWriter) chunksToSeries ( ctx context . Context , in [ ] logproto . ChunkRef , matchers [ ] * labels . Matcher ) ( [ ] labels . Labels , error ) {
func ( c * I ndexReaderWriter) chunksToSeries ( ctx context . Context , in [ ] logproto . ChunkRef , matchers [ ] * labels . Matcher ) ( [ ] labels . Labels , error ) {
// download one per series and merge
// group chunks by series
chunksBySeries := filterChunkRefsByUniqueFingerprint ( in )
@ -313,7 +313,7 @@ func (c *indexReaderWriter) chunksToSeries(ctx context.Context, in []logproto.Ch
}
// LabelNamesForMetricName retrieves all label names for a metric name.
func ( c * i ndexReaderWriter) LabelNamesForMetricName ( ctx context . Context , userID string , from , through model . Time , metricName string ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) LabelNamesForMetricName ( ctx context . Context , userID string , from , through model . Time , metricName string ) ( [ ] string , error ) {
sp , ctx := opentracing . StartSpanFromContext ( ctx , "SeriesStore.LabelNamesForMetricName" )
defer sp . Finish ( )
log := spanlogger . FromContext ( ctx )
@ -341,7 +341,7 @@ func (c *indexReaderWriter) LabelNamesForMetricName(ctx context.Context, userID
return labelNames , nil
}
func ( c * i ndexReaderWriter) LabelValuesForMetricName ( ctx context . Context , userID string , from , through model . Time , metricName string , labelName string , matchers ... * labels . Matcher ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) LabelValuesForMetricName ( ctx context . Context , userID string , from , through model . Time , metricName string , labelName string , matchers ... * labels . Matcher ) ( [ ] string , error ) {
sp , ctx := opentracing . StartSpanFromContext ( ctx , "SeriesStore.LabelValuesForMetricName" )
defer sp . Finish ( )
log := spanlogger . FromContext ( ctx )
@ -377,7 +377,7 @@ func (c *indexReaderWriter) LabelValuesForMetricName(ctx context.Context, userID
}
// LabelValuesForMetricName retrieves all label values for a single label name and metric name.
func ( c * i ndexReaderWriter) labelValuesForMetricNameWithMatchers ( ctx context . Context , userID string , from , through model . Time , metricName , labelName string , matchers ... * labels . Matcher ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) labelValuesForMetricNameWithMatchers ( ctx context . Context , userID string , from , through model . Time , metricName , labelName string , matchers ... * labels . Matcher ) ( [ ] string , error ) {
// Otherwise get series which include other matchers
seriesIDs , err := c . lookupSeriesByMetricNameMatchers ( ctx , from , through , userID , metricName , matchers )
if err != nil {
@ -419,7 +419,7 @@ func (c *indexReaderWriter) labelValuesForMetricNameWithMatchers(ctx context.Con
return result . Strings ( ) , nil
}
func ( c * i ndexReaderWriter) lookupSeriesByMetricNameMatchers ( ctx context . Context , from , through model . Time , userID , metricName string , matchers [ ] * labels . Matcher ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupSeriesByMetricNameMatchers ( ctx context . Context , from , through model . Time , userID , metricName string , matchers [ ] * labels . Matcher ) ( [ ] string , error ) {
// Check if one of the labels is a shard annotation, pass that information to lookupSeriesByMetricNameMatcher,
// and remove the label.
shard , shardLabelIndex , err := astmapper . ShardFromMatchers ( matchers )
@ -502,13 +502,13 @@ func (c *indexReaderWriter) lookupSeriesByMetricNameMatchers(ctx context.Context
return ids , nil
}
func ( c * i ndexReaderWriter) lookupSeriesByMetricNameMatcher ( ctx context . Context , from , through model . Time , userID , metricName string , matcher * labels . Matcher , shard * astmapper . ShardAnnotation ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupSeriesByMetricNameMatcher ( ctx context . Context , from , through model . Time , userID , metricName string , matcher * labels . Matcher , shard * astmapper . ShardAnnotation ) ( [ ] string , error ) {
return c . lookupIdsByMetricNameMatcher ( ctx , from , through , userID , metricName , matcher , func ( queries [ ] series_index . Query ) [ ] series_index . Query {
return c . schema . FilterReadQueries ( queries , shard )
} )
}
func ( c * i ndexReaderWriter) lookupIdsByMetricNameMatcher ( ctx context . Context , from , through model . Time , userID , metricName string , matcher * labels . Matcher , filter func ( [ ] series_index . Query ) [ ] series_index . Query ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupIdsByMetricNameMatcher ( ctx context . Context , from , through model . Time , userID , metricName string , matcher * labels . Matcher , filter func ( [ ] series_index . Query ) [ ] series_index . Query ) ( [ ] string , error ) {
var err error
var queries [ ] series_index . Query
var labelName string
@ -600,7 +600,7 @@ var entriesPool = sync.Pool{
} ,
}
func ( c * i ndexReaderWriter) lookupEntriesByQueries ( ctx context . Context , queries [ ] series_index . Query , entries * [ ] series_index . Entry ) error {
func ( c * I ndexReaderWriter) lookupEntriesByQueries ( ctx context . Context , queries [ ] series_index . Query , entries * [ ] series_index . Entry ) error {
* entries = ( * entries ) [ : 0 ]
// Nothing to do if there are no queries.
if len ( queries ) == 0 {
@ -628,7 +628,7 @@ func (c *indexReaderWriter) lookupEntriesByQueries(ctx context.Context, queries
return err
}
func ( c * i ndexReaderWriter) lookupLabelNamesBySeries ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupLabelNamesBySeries ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
sp , ctx := opentracing . StartSpanFromContext ( ctx , "SeriesStore.lookupLabelNamesBySeries" )
defer sp . Finish ( )
log := spanlogger . FromContext ( ctx )
@ -665,7 +665,7 @@ func (c *indexReaderWriter) lookupLabelNamesBySeries(ctx context.Context, from,
return result . Strings ( ) , nil
}
func ( c * i ndexReaderWriter) lookupLabelNamesByChunks ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupLabelNamesByChunks ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
sp , ctx := opentracing . StartSpanFromContext ( ctx , "SeriesStore.lookupLabelNamesByChunks" )
defer sp . Finish ( )
log := spanlogger . FromContext ( ctx )
@ -701,7 +701,7 @@ func (c *indexReaderWriter) lookupLabelNamesByChunks(ctx context.Context, from,
return labelNamesFromChunks ( allChunks ) , nil
}
func ( c * i ndexReaderWriter) lookupChunksBySeries ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
func ( c * I ndexReaderWriter) lookupChunksBySeries ( ctx context . Context , from , through model . Time , userID string , seriesIDs [ ] string ) ( [ ] string , error ) {
queries := make ( [ ] series_index . Query , 0 , len ( seriesIDs ) )
for _ , seriesID := range seriesIDs {
qs , err := c . schema . GetChunksForSeries ( from , through , userID , [ ] byte ( seriesID ) )
@ -722,7 +722,7 @@ func (c *indexReaderWriter) lookupChunksBySeries(ctx context.Context, from, thro
return result , err
}
func ( c * i ndexReaderWriter) convertChunkIDsToChunks ( _ context . Context , userID string , chunkIDs [ ] string ) ( [ ] chunk . Chunk , error ) {
func ( c * I ndexReaderWriter) convertChunkIDsToChunks ( _ context . Context , userID string , chunkIDs [ ] string ) ( [ ] chunk . Chunk , error ) {
chunkSet := make ( [ ] chunk . Chunk , 0 , len ( chunkIDs ) )
for _ , chunkID := range chunkIDs {
chunk , err := chunk . ParseExternalKey ( userID , chunkID )
@ -735,7 +735,7 @@ func (c *indexReaderWriter) convertChunkIDsToChunks(_ context.Context, userID st
return chunkSet , nil
}
func ( c * i ndexReaderWriter) convertChunkIDsToChunkRefs ( _ context . Context , userID string , chunkIDs [ ] string ) ( [ ] logproto . ChunkRef , error ) {
func ( c * I ndexReaderWriter) convertChunkIDsToChunkRefs ( _ context . Context , userID string , chunkIDs [ ] string ) ( [ ] logproto . ChunkRef , error ) {
chunkSet := make ( [ ] logproto . ChunkRef , 0 , len ( chunkIDs ) )
for _ , chunkID := range chunkIDs {
chunk , err := chunk . ParseExternalKey ( userID , chunkID )
@ -749,11 +749,11 @@ func (c *indexReaderWriter) convertChunkIDsToChunkRefs(_ context.Context, userID
}
// old index stores do not implement stats -- skip
func ( c * i ndexReaderWriter) Stats ( _ context . Context , _ string , _ , _ model . Time , _ ... * labels . Matcher ) ( * stats . Stats , error ) {
func ( c * I ndexReaderWriter) Stats ( _ context . Context , _ string , _ , _ model . Time , _ ... * labels . Matcher ) ( * stats . Stats , error ) {
return nil , nil
}
// old index stores do not implement label volume -- skip
func ( c * i ndexReaderWriter) Volume ( _ context . Context , _ string , _ , _ model . Time , _ int32 , _ [ ] string , _ string , _ ... * labels . Matcher ) ( * logproto . VolumeResponse , error ) {
func ( c * I ndexReaderWriter) Volume ( _ context . Context , _ string , _ , _ model . Time , _ int32 , _ [ ] string , _ string , _ ... * labels . Matcher ) ( * logproto . VolumeResponse , error ) {
return nil , nil
}