Like Prometheus, but for logs.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 
loki/pkg/storage/chunk/client/aws/fixtures.go

99 lines
3.0 KiB

package aws
import (
"fmt"
"io"
"time"
"github.com/grafana/dskit/backoff"
"golang.org/x/time/rate"
"github.com/grafana/loki/v3/pkg/storage/chunk/client"
"github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils"
"github.com/grafana/loki/v3/pkg/storage/config"
"github.com/grafana/loki/v3/pkg/storage/stores/series/index"
)
type fixture struct {
name string
clients func() (index.Client, client.Client, index.TableClient, config.SchemaConfig, io.Closer, error)
}
func (f fixture) Name() string {
return f.name
}
func (f fixture) Clients() (index.Client, client.Client, index.TableClient, config.SchemaConfig, io.Closer, error) {
return f.clients()
}
// Fixtures for testing the various configuration of AWS storage.
var Fixtures = []testutils.Fixture{
fixture{
name: "S3 chunks",
clients: func() (index.Client, client.Client, index.TableClient, config.SchemaConfig, io.Closer, error) {
schemaConfig := testutils.DefaultSchemaConfig("s3")
dynamoDB := newMockDynamoDB(0, 0)
table := &dynamoTableClient{
DynamoDB: dynamoDB,
metrics: newMetrics(nil),
}
index := &dynamoDBStorageClient{
DynamoDB: dynamoDB,
batchGetItemRequestFn: dynamoDB.batchGetItemRequest,
batchWriteItemRequestFn: dynamoDB.batchWriteItemRequest,
schemaCfg: schemaConfig,
metrics: newMetrics(nil),
}
mock := newMockS3()
object := client.NewClient(&S3ObjectClient{S3: mock, hedgedS3: mock}, nil, schemaConfig)
return index, object, table, schemaConfig, testutils.CloserFunc(func() error {
table.Stop()
index.Stop()
object.Stop()
return nil
}), nil
},
},
dynamoDBFixture(0, 10, 20),
dynamoDBFixture(0, 0, 20),
dynamoDBFixture(2, 10, 20),
}
// nolint
func dynamoDBFixture(provisionedErr, gangsize, maxParallelism int) testutils.Fixture {
return fixture{
name: fmt.Sprintf("DynamoDB chunks provisionedErr=%d, ChunkGangSize=%d, ChunkGetMaxParallelism=%d",
provisionedErr, gangsize, maxParallelism),
clients: func() (index.Client, client.Client, index.TableClient, config.SchemaConfig, io.Closer, error) {
dynamoDB := newMockDynamoDB(0, provisionedErr)
schemaCfg := testutils.DefaultSchemaConfig("aws")
table := &dynamoTableClient{
DynamoDB: dynamoDB,
metrics: newMetrics(nil),
}
storage := &dynamoDBStorageClient{
cfg: DynamoDBConfig{
ChunkGangSize: gangsize,
ChunkGetMaxParallelism: maxParallelism,
BackoffConfig: backoff.Config{
MinBackoff: 1 * time.Millisecond,
MaxBackoff: 5 * time.Millisecond,
MaxRetries: 20,
},
},
DynamoDB: dynamoDB,
writeThrottle: rate.NewLimiter(10, dynamoDBMaxWriteBatchSize),
batchGetItemRequestFn: dynamoDB.batchGetItemRequest,
batchWriteItemRequestFn: dynamoDB.batchWriteItemRequest,
schemaCfg: schemaCfg,
metrics: newMetrics(nil),
}
return storage, storage, table, schemaCfg, testutils.CloserFunc(func() error {
table.Stop()
storage.Stop()
return nil
}), nil
},
}
}