From 03982c218ec3e6549068b5ed1ebecd3514126123 Mon Sep 17 00:00:00 2001 From: arukiidou Date: Tue, 26 Mar 2024 16:10:31 +0900 Subject: [PATCH 01/54] docs: fix otlp guide - collector config example (#12328) Signed-off-by: junya koyama Co-authored-by: Sandeep Sukhani --- docs/sources/send-data/otel/_index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sources/send-data/otel/_index.md b/docs/sources/send-data/otel/_index.md index dac87a4fb5d56..fb2bd7b2665ac 100644 --- a/docs/sources/send-data/otel/_index.md +++ b/docs/sources/send-data/otel/_index.md @@ -33,7 +33,7 @@ And enable it in `service.pipelines`: ```yaml service: pipelines: - metrics: + logs: receivers: [...] processors: [...] exporters: [..., otlphttp] @@ -57,7 +57,7 @@ exporters: service: extensions: [basicauth/otlp] pipelines: - metrics: + logs: receivers: [...] processors: [...] exporters: [..., otlphttp] From 774b01dac0daba8e0d734294860c85d659bfa530 Mon Sep 17 00:00:00 2001 From: Ashwanth Date: Tue, 26 Mar 2024 13:15:54 +0530 Subject: [PATCH 02/54] fix(ruler): pass noop analyseRules to rules manager (#12353) --- pkg/ruler/compat.go | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/pkg/ruler/compat.go b/pkg/ruler/compat.go index 8f70d314da884..355acc7d61910 100644 --- a/pkg/ruler/compat.go +++ b/pkg/ruler/compat.go @@ -152,18 +152,19 @@ func MultiTenantRuleManager(cfg Config, evaluator Evaluator, overrides RulesLimi groupLoader := NewCachingGroupLoader(GroupLoader{}) mgr := rules.NewManager(&rules.ManagerOptions{ - Appendable: registry, - Queryable: memStore, - QueryFunc: queryFn, - Context: user.InjectOrgID(ctx, userID), - ExternalURL: cfg.ExternalURL.URL, - NotifyFunc: ruler.SendAlerts(notifier, cfg.ExternalURL.URL.String(), cfg.DatasourceUID), - Logger: logger, - Registerer: reg, - OutageTolerance: cfg.OutageTolerance, - ForGracePeriod: cfg.ForGracePeriod, - ResendDelay: cfg.ResendDelay, - GroupLoader: groupLoader, + Appendable: registry, + Queryable: memStore, + QueryFunc: queryFn, + Context: user.InjectOrgID(ctx, userID), + ExternalURL: cfg.ExternalURL.URL, + NotifyFunc: ruler.SendAlerts(notifier, cfg.ExternalURL.URL.String(), cfg.DatasourceUID), + Logger: logger, + Registerer: reg, + OutageTolerance: cfg.OutageTolerance, + ForGracePeriod: cfg.ForGracePeriod, + ResendDelay: cfg.ResendDelay, + GroupLoader: groupLoader, + RuleDependencyController: &noopRuleDependencyController{}, }) cachingManager := &CachingRulesManager{ @@ -347,3 +348,12 @@ func (exprAdapter) PositionRange() posrange.PositionRange { return posrange.Posi func (exprAdapter) PromQLExpr() {} func (exprAdapter) Type() parser.ValueType { return parser.ValueType("unimplemented") } func (exprAdapter) Pretty(_ int) string { return "" } + +type noopRuleDependencyController struct{} + +// Prometheus rules manager calls AnalyseRules to determine the dependents and dependencies of a rule +// which it then uses to decide if a rule within a group is eligible for concurrent execution. +// AnalyseRules is a noop for Loki since there is no dependency relation between rules. +func (*noopRuleDependencyController) AnalyseRules([]rules.Rule) { + // Do nothing +} From d5ecf9aa7b2bd05a8ef00030c7c0f8a043996114 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 10:21:48 +0000 Subject: [PATCH 03/54] chore(deps): bump google.golang.org/protobuf from 1.32.0 to 1.33.0 in /operator (#12207) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Periklis Tsirakidis --- operator/go.mod | 2 +- operator/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/operator/go.mod b/operator/go.mod index f580ea5b6d6d2..4e6b1d65bd67f 100644 --- a/operator/go.mod +++ b/operator/go.mod @@ -150,7 +150,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20231127180814-3a041ad873d4 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect google.golang.org/grpc v1.59.0 // indirect - google.golang.org/protobuf v1.32.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect k8s.io/apiextensions-apiserver v0.28.4 // indirect diff --git a/operator/go.sum b/operator/go.sum index 220e1e2ebb5e9..6d861a1180a9d 100644 --- a/operator/go.sum +++ b/operator/go.sum @@ -1916,8 +1916,8 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From a36483b7319c1f2f34082c76b72bb2b246d99730 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Tue, 26 Mar 2024 03:36:49 -0700 Subject: [PATCH 04/54] feat(blooms): bloom integration in query planning (#12208) Signed-off-by: Owen Diehl --- docs/sources/configure/_index.md | 35 +- pkg/bloomcompactor/bloomcompactor.go | 20 +- pkg/bloomcompactor/config.go | 25 +- pkg/bloomcompactor/spec.go | 2 +- pkg/bloomcompactor/spec_test.go | 6 +- pkg/bloomcompactor/tsdb.go | 24 +- pkg/bloomcompactor/tsdb_test.go | 9 +- pkg/bloomgateway/bloomgateway.go | 26 +- pkg/bloomgateway/client.go | 14 +- pkg/bloomgateway/processor.go | 12 +- pkg/bloomgateway/util_test.go | 2 +- pkg/ingester/flush_test.go | 9 + pkg/ingester/index/bitprefix.go | 42 +- pkg/ingester/index/bitprefix_test.go | 132 +- pkg/ingester/index/index.go | 38 +- pkg/ingester/index/index_test.go | 16 +- pkg/ingester/index/multi.go | 14 +- pkg/ingester/index/multi_test.go | 7 +- pkg/ingester/ingester_test.go | 9 + pkg/ingester/instance.go | 20 +- pkg/loghttp/params.go | 19 + pkg/loghttp/query.go | 12 + pkg/logproto/compat.go | 43 + pkg/logproto/extensions.go | 7 + pkg/logproto/extensions_test.go | 42 + pkg/logproto/indexgateway.pb.go | 1443 ++++++++++++++++- pkg/logproto/indexgateway.proto | 53 + pkg/logproto/logproto.pb.go | 2 + pkg/logproto/logproto.proto | 2 + pkg/logql/downstream.go | 53 +- pkg/logql/downstream_test.go | 66 +- pkg/logql/explain_test.go | 3 +- pkg/logql/mapper_metrics.go | 7 +- pkg/logql/metrics.go | 63 + pkg/logql/shardmapper.go | 88 +- pkg/logql/shardmapper_test.go | 188 +-- pkg/logql/shards.go | 248 +++ pkg/logql/shards_test.go | 188 +++ pkg/logql/test_utils.go | 20 +- pkg/logqlmodel/stats/context.go | 23 + pkg/logqlmodel/stats/stats.pb.go | 480 +++++- pkg/logqlmodel/stats/stats.proto | 14 + pkg/loki/loki.go | 2 +- pkg/loki/modules.go | 4 + pkg/querier/handler.go | 12 + pkg/querier/http.go | 33 + pkg/querier/multi_tenant_querier.go | 38 + pkg/querier/querier.go | 41 + pkg/querier/querier_mock_test.go | 21 + pkg/querier/queryrange/codec.go | 59 + pkg/querier/queryrange/codec_test.go | 4 + pkg/querier/queryrange/downstreamer_test.go | 11 +- pkg/querier/queryrange/extensions.go | 16 + pkg/querier/queryrange/limits/definitions.go | 1 + pkg/querier/queryrange/marshal.go | 12 +- pkg/querier/queryrange/prometheus_test.go | 4 + pkg/querier/queryrange/queryrange.pb.go | 766 +++++++-- pkg/querier/queryrange/queryrange.proto | 11 + pkg/querier/queryrange/querysharding.go | 20 +- pkg/querier/queryrange/roundtrip.go | 3 + pkg/querier/queryrange/roundtrip_test.go | 3 + pkg/querier/queryrange/shard_resolver.go | 124 +- pkg/querier/queryrange/split_by_interval.go | 2 +- pkg/querier/queryrange/splitters.go | 9 + pkg/querier/queryrange/stats.go | 4 + pkg/storage/bloom/v1/block.go | 28 +- pkg/storage/bloom/v1/bloom.go | 45 +- pkg/storage/bloom/v1/bloom_querier.go | 27 +- pkg/storage/bloom/v1/bounds.go | 41 +- pkg/storage/bloom/v1/bounds_test.go | 21 + pkg/storage/bloom/v1/builder_test.go | 20 +- pkg/storage/bloom/v1/fuse_test.go | 8 +- pkg/storage/bloom/v1/index.go | 19 +- pkg/storage/bloom/v1/index_querier.go | 2 + pkg/storage/bloom/v1/metrics.go | 88 +- pkg/storage/bloom/v1/test_util.go | 2 +- pkg/storage/bloom/v1/util.go | 4 +- pkg/storage/chunk/predicate.go | 1 + pkg/storage/store.go | 4 +- pkg/storage/stores/composite_store.go | 79 + pkg/storage/stores/composite_store_entry.go | 20 + pkg/storage/stores/composite_store_test.go | 9 + pkg/storage/stores/index/index.go | 57 + .../series/series_index_gateway_store.go | 44 +- .../series/series_index_gateway_store_test.go | 9 +- .../stores/series/series_index_store.go | 29 + .../stores/shipper/bloomshipper/cache.go | 12 +- .../stores/shipper/bloomshipper/fetcher.go | 50 +- .../shipper/bloomshipper/fetcher_test.go | 6 +- .../stores/shipper/bloomshipper/store.go | 26 +- .../gatewayclient/gateway_client.go | 146 +- .../indexshipper/indexgateway/gateway.go | 311 +++- .../indexshipper/indexgateway/gateway_test.go | 306 ++++ .../indexshipper/indexgateway/metrics.go | 37 + .../indexgateway/shufflesharding.go | 1 + .../shipper/indexshipper/tsdb/compactor.go | 9 +- .../indexshipper/tsdb/compactor_test.go | 6 +- .../shipper/indexshipper/tsdb/head_manager.go | 8 + .../stores/shipper/indexshipper/tsdb/index.go | 19 +- .../shipper/indexshipper/tsdb/index/shard.go | 3 + .../shipper/indexshipper/tsdb/index_client.go | 88 +- .../tsdb/index_shipper_querier.go | 33 +- .../shipper/indexshipper/tsdb/lazy_index.go | 8 + .../indexshipper/tsdb/multi_file_index.go | 37 +- .../shipper/indexshipper/tsdb/multitenant.go | 4 + .../indexshipper/tsdb/sharding/for_series.go | 66 + .../indexshipper/tsdb/sharding/power.go | 117 ++ .../indexshipper/tsdb/sharding/power_test.go} | 19 +- .../indexshipper/tsdb/sharding/sharding.go | 102 ++ .../tsdb/sharding/sharding_test.go | 153 ++ .../indexshipper/tsdb/single_file_index.go | 20 +- pkg/storage/util_test.go | 9 + pkg/util/marshal/legacy/marshal_test.go | 4 + pkg/util/marshal/marshal.go | 10 + pkg/util/marshal/marshal_test.go | 4 + pkg/validation/limits.go | 27 +- pkg/validation/limits_test.go | 2 + tools/bloom/inspector/main.go | 4 +- tools/tsdb/bloom-tester/lib.go | 5 +- tools/tsdb/bloom-tester/readlib.go | 6 +- tools/tsdb/index-analyzer/analytics.go | 5 +- 121 files changed, 6070 insertions(+), 876 deletions(-) create mode 100644 pkg/logproto/extensions_test.go create mode 100644 pkg/logql/shards.go create mode 100644 pkg/logql/shards_test.go create mode 100644 pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go create mode 100644 pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go create mode 100644 pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go rename pkg/{querier/queryrange/shard_resolver_test.go => storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go} (59%) create mode 100644 pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go create mode 100644 pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 36603f1be1d3f..d5411f8b1c9d0 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2695,18 +2695,18 @@ ring: # CLI flag: -bloom-compactor.compaction-interval [compaction_interval: | default = 10m] -# How many index periods (days) to wait before building bloom filters for a -# table. This can be used to lower cost by not re-writing data to object storage -# too frequently since recent data changes more often. -# CLI flag: -bloom-compactor.min-table-compaction-period -[min_table_compaction_period: | default = 1] - -# The maximum number of index periods (days) to build bloom filters for a table. -# This can be used to lower cost by not trying to compact older data which -# doesn't change. This can be optimized by aligning it with the maximum -# `reject_old_samples_max_age` setting of any tenant. -# CLI flag: -bloom-compactor.max-table-compaction-period -[max_table_compaction_period: | default = 7] +# Newest day-table offset (from today, inclusive) to compact. Increase to lower +# cost by not re-writing data to object storage too frequently since recent data +# changes more often at the cost of not having blooms available as quickly. +# CLI flag: -bloom-compactor.min-table-offset +[min_table_offset: | default = 1] + +# Oldest day-table offset (from today, inclusive) to compact. This can be used +# to lower cost by not trying to compact older data which doesn't change. This +# can be optimized by aligning it with the maximum `reject_old_samples_max_age` +# setting of any tenant. +# CLI flag: -bloom-compactor.max-table-offset +[max_table_offset: | default = 2] # Number of workers to run in parallel for compaction. # CLI flag: -bloom-compactor.worker-parallelism @@ -2871,11 +2871,18 @@ The `limits_config` block configures global and per-tenant limits in Loki. # CLI flag: -querier.tsdb-max-query-parallelism [tsdb_max_query_parallelism: | default = 128] -# Maximum number of bytes assigned to a single sharded query. Also expressible -# in human readable forms (1GB, etc). +# Target maximum number of bytes assigned to a single sharded query. Also +# expressible in human readable forms (1GB, etc). Note: This is a _target_ and +# not an absolute limit. The actual limit can be higher, but the query planner +# will try to build shards up to this limit. # CLI flag: -querier.tsdb-max-bytes-per-shard [tsdb_max_bytes_per_shard: | default = 600MB] +# sharding strategy to use in query planning. Suggested to use bounded once all +# nodes can recognize it. +# CLI flag: -limits.tsdb-sharding-strategy +[tsdb_sharding_strategy: | default = "power_of_two"] + # Cardinality limit for index queries. # CLI flag: -store.cardinality-limit [cardinality_limit: | default = 100000] diff --git a/pkg/bloomcompactor/bloomcompactor.go b/pkg/bloomcompactor/bloomcompactor.go index ddfe552cb2ad8..25994c08b6a35 100644 --- a/pkg/bloomcompactor/bloomcompactor.go +++ b/pkg/bloomcompactor/bloomcompactor.go @@ -55,8 +55,7 @@ type Compactor struct { sharding util_ring.TenantSharding - metrics *Metrics - btMetrics *v1.Metrics + metrics *Metrics } func New( @@ -67,7 +66,7 @@ func New( fetcherProvider stores.ChunkFetcherProvider, sharding util_ring.TenantSharding, limits Limits, - store bloomshipper.Store, + store bloomshipper.StoreWithMetrics, logger log.Logger, r prometheus.Registerer, ) (*Compactor, error) { @@ -78,6 +77,7 @@ func New( sharding: sharding, limits: limits, bloomStore: store, + metrics: NewMetrics(r, store.BloomMetrics()), } tsdbStore, err := NewTSDBStores(schemaCfg, storeCfg, clientMetrics, logger) @@ -86,10 +86,6 @@ func New( } c.tsdbStore = tsdbStore - // initialize metrics - c.btMetrics = v1.NewMetrics(prometheus.WrapRegistererWithPrefix("loki_bloom_tokenizer_", r)) - c.metrics = NewMetrics(r, c.btMetrics) - chunkLoader := NewStoreChunkLoader( fetcherProvider, c.metrics, @@ -258,12 +254,12 @@ func (c *Compactor) runOne(ctx context.Context) error { func (c *Compactor) tables(ts time.Time) *dayRangeIterator { // adjust the minimum by one to make it inclusive, which is more intuitive // for a configuration variable - adjustedMin := min(c.cfg.MinTableCompactionPeriod - 1) - minCompactionPeriod := time.Duration(adjustedMin) * config.ObjectStorageIndexRequiredPeriod - maxCompactionPeriod := time.Duration(c.cfg.MaxTableCompactionPeriod) * config.ObjectStorageIndexRequiredPeriod + adjustedMin := min(c.cfg.MinTableOffset - 1) + minCompactionDelta := time.Duration(adjustedMin) * config.ObjectStorageIndexRequiredPeriod + maxCompactionDelta := time.Duration(c.cfg.MaxTableOffset) * config.ObjectStorageIndexRequiredPeriod - from := ts.Add(-maxCompactionPeriod).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) - through := ts.Add(-minCompactionPeriod).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) + from := ts.Add(-maxCompactionDelta).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) + through := ts.Add(-minCompactionDelta).UnixNano() / int64(config.ObjectStorageIndexRequiredPeriod) * int64(config.ObjectStorageIndexRequiredPeriod) fromDay := config.NewDayTime(model.TimeFromUnixNano(from)) throughDay := config.NewDayTime(model.TimeFromUnixNano(through)) diff --git a/pkg/bloomcompactor/config.go b/pkg/bloomcompactor/config.go index a80399503f4e7..72ff9c5cc2f19 100644 --- a/pkg/bloomcompactor/config.go +++ b/pkg/bloomcompactor/config.go @@ -22,14 +22,14 @@ type Config struct { // section and the ingester configuration by default). Ring ring.RingConfig `yaml:"ring,omitempty" doc:"description=Defines the ring to be used by the bloom-compactor servers. In case this isn't configured, this block supports inheriting configuration from the common ring section."` // Enabled configures whether bloom-compactors should be used to compact index values into bloomfilters - Enabled bool `yaml:"enabled"` - CompactionInterval time.Duration `yaml:"compaction_interval"` - MinTableCompactionPeriod int `yaml:"min_table_compaction_period"` - MaxTableCompactionPeriod int `yaml:"max_table_compaction_period"` - WorkerParallelism int `yaml:"worker_parallelism"` - RetryMinBackoff time.Duration `yaml:"compaction_retries_min_backoff"` - RetryMaxBackoff time.Duration `yaml:"compaction_retries_max_backoff"` - CompactionRetries int `yaml:"compaction_retries"` + Enabled bool `yaml:"enabled"` + CompactionInterval time.Duration `yaml:"compaction_interval"` + MinTableOffset int `yaml:"min_table_offset"` + MaxTableOffset int `yaml:"max_table_offset"` + WorkerParallelism int `yaml:"worker_parallelism"` + RetryMinBackoff time.Duration `yaml:"compaction_retries_min_backoff"` + RetryMaxBackoff time.Duration `yaml:"compaction_retries_max_backoff"` + CompactionRetries int `yaml:"compaction_retries"` MaxCompactionParallelism int `yaml:"max_compaction_parallelism"` } @@ -40,15 +40,14 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.DurationVar(&cfg.CompactionInterval, "bloom-compactor.compaction-interval", 10*time.Minute, "Interval at which to re-run the compaction operation.") f.IntVar(&cfg.WorkerParallelism, "bloom-compactor.worker-parallelism", 1, "Number of workers to run in parallel for compaction.") // TODO(owen-d): This is a confusing name. Rename it to `min_table_offset` - f.IntVar(&cfg.MinTableCompactionPeriod, "bloom-compactor.min-table-compaction-period", 1, "How many index periods (days) to wait before building bloom filters for a table. This can be used to lower cost by not re-writing data to object storage too frequently since recent data changes more often.") + f.IntVar(&cfg.MinTableOffset, "bloom-compactor.min-table-offset", 1, "Newest day-table offset (from today, inclusive) to compact. Increase to lower cost by not re-writing data to object storage too frequently since recent data changes more often at the cost of not having blooms available as quickly.") // TODO(owen-d): ideally we'd set this per tenant based on their `reject_old_samples_max_age` setting, // but due to how we need to discover tenants, we can't do that yet. Tenant+Period discovery is done by // iterating the table periods in object storage and looking for tenants within that period. // In order to have this done dynamically, we'd need to account for tenant specific overrides, which are also // dynamically reloaded. // I'm doing it the simple way for now. - // TODO(owen-d): This is a confusing name. Rename it to `max_table_offset` - f.IntVar(&cfg.MaxTableCompactionPeriod, "bloom-compactor.max-table-compaction-period", 7, "The maximum number of index periods (days) to build bloom filters for a table. This can be used to lower cost by not trying to compact older data which doesn't change. This can be optimized by aligning it with the maximum `reject_old_samples_max_age` setting of any tenant.") + f.IntVar(&cfg.MaxTableOffset, "bloom-compactor.max-table-offset", 2, "Oldest day-table offset (from today, inclusive) to compact. This can be used to lower cost by not trying to compact older data which doesn't change. This can be optimized by aligning it with the maximum `reject_old_samples_max_age` setting of any tenant.") f.DurationVar(&cfg.RetryMinBackoff, "bloom-compactor.compaction-retries-min-backoff", 10*time.Second, "Minimum backoff time between retries.") f.DurationVar(&cfg.RetryMaxBackoff, "bloom-compactor.compaction-retries-max-backoff", time.Minute, "Maximum backoff time between retries.") f.IntVar(&cfg.CompactionRetries, "bloom-compactor.compaction-retries", 3, "Number of retries to perform when compaction fails.") @@ -67,8 +66,8 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { } func (cfg *Config) Validate() error { - if cfg.MinTableCompactionPeriod > cfg.MaxTableCompactionPeriod { - return fmt.Errorf("min_compaction_age must be less than or equal to max_compaction_age") + if cfg.MinTableOffset > cfg.MaxTableOffset { + return fmt.Errorf("min-table-offset (%d) must be less than or equal to max-table-offset (%d)", cfg.MinTableOffset, cfg.MaxTableOffset) } if cfg.Ring.ReplicationFactor != ringReplicationFactor { return errors.New("Replication factor must not be changed as it will not take effect") diff --git a/pkg/bloomcompactor/spec.go b/pkg/bloomcompactor/spec.go index 1f2e58dabcceb..2f67d8f90dcdd 100644 --- a/pkg/bloomcompactor/spec.go +++ b/pkg/bloomcompactor/spec.go @@ -217,7 +217,7 @@ func (b *LazyBlockBuilderIterator) Next() bool { return false } - b.curr = v1.NewBlock(reader) + b.curr = v1.NewBlock(reader, b.metrics.bloomMetrics) return true } diff --git a/pkg/bloomcompactor/spec_test.go b/pkg/bloomcompactor/spec_test.go index 29f579a8e777d..fe1b2a09b5a07 100644 --- a/pkg/bloomcompactor/spec_test.go +++ b/pkg/bloomcompactor/spec_test.go @@ -50,7 +50,7 @@ func blocksFromSchemaWithRange(t *testing.T, n int, options v1.BlockOptions, fro _, err = builder.BuildFrom(itr) require.Nil(t, err) - res = append(res, v1.NewBlock(reader)) + res = append(res, v1.NewBlock(reader, v1.NewMetrics(nil))) ref := genBlockRef(data[minIdx].Series.Fingerprint, data[maxIdx-1].Series.Fingerprint) t.Log("create block", ref) refs = append(refs, ref) @@ -74,7 +74,7 @@ func dummyBloomGen(t *testing.T, opts v1.BlockOptions, store v1.Iterator[*v1.Ser for i, b := range blocks { bqs = append(bqs, &bloomshipper.CloseableBlockQuerier{ BlockRef: refs[i], - BlockQuerier: v1.NewBlockQuerier(b), + BlockQuerier: v1.NewBlockQuerier(b, false), }) } @@ -152,7 +152,7 @@ func TestSimpleBloomGenerator(t *testing.T) { expectedRefs := v1.PointerSlice(data) outputRefs := make([]*v1.SeriesWithBloom, 0, len(data)) for _, block := range outputBlocks { - bq := block.Querier() + bq := v1.NewBlockQuerier(block, false) for bq.Next() { outputRefs = append(outputRefs, bq.At()) } diff --git a/pkg/bloomcompactor/tsdb.go b/pkg/bloomcompactor/tsdb.go index ddfd78c2974b4..db2adf9fdc74f 100644 --- a/pkg/bloomcompactor/tsdb.go +++ b/pkg/bloomcompactor/tsdb.go @@ -21,6 +21,7 @@ import ( "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) const ( @@ -121,34 +122,22 @@ func (b *BloomTSDBStore) LoadTSDB( } }() - return NewTSDBSeriesIter(ctx, idx, bounds) + return NewTSDBSeriesIter(ctx, tenant, idx, bounds) } -// TSDBStore is an interface for interacting with the TSDB, -// modeled off a relevant subset of the `tsdb.TSDBIndex` struct -type forSeries interface { - ForSeries( - ctx context.Context, - fpFilter index.FingerprintFilter, - from model.Time, - through model.Time, - fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), - matchers ...*labels.Matcher, - ) error -} - -func NewTSDBSeriesIter(ctx context.Context, f forSeries, bounds v1.FingerprintBounds) (v1.Iterator[*v1.Series], error) { +func NewTSDBSeriesIter(ctx context.Context, user string, f sharding.ForSeries, bounds v1.FingerprintBounds) (v1.Iterator[*v1.Series], error) { // TODO(salvacorts): Create a pool series := make([]*v1.Series, 0, 100) if err := f.ForSeries( ctx, + user, bounds, 0, math.MaxInt64, - func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { select { case <-ctx.Done(): - return + return true default: res := &v1.Series{ Fingerprint: fp, @@ -163,6 +152,7 @@ func NewTSDBSeriesIter(ctx context.Context, f forSeries, bounds v1.FingerprintBo } series = append(series, res) + return false } }, labels.MustNewMatcher(labels.MatchEqual, "", ""), diff --git a/pkg/bloomcompactor/tsdb_test.go b/pkg/bloomcompactor/tsdb_test.go index 91ad1719375ac..30fc668a5a927 100644 --- a/pkg/bloomcompactor/tsdb_test.go +++ b/pkg/bloomcompactor/tsdb_test.go @@ -17,10 +17,11 @@ type forSeriesTestImpl []*v1.Series func (f forSeriesTestImpl) ForSeries( _ context.Context, + _ string, _ index.FingerprintFilter, _ model.Time, _ model.Time, - fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), + fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) bool, _ ...*labels.Matcher, ) error { for i := range f { @@ -61,7 +62,7 @@ func TestTSDBSeriesIter(t *testing.T) { }, } srcItr := v1.NewSliceIter(input) - itr, err := NewTSDBSeriesIter(context.Background(), forSeriesTestImpl(input), v1.NewBounds(0, math.MaxUint64)) + itr, err := NewTSDBSeriesIter(context.Background(), "", forSeriesTestImpl(input), v1.NewBounds(0, math.MaxUint64)) require.NoError(t, err) v1.EqualIterators[*v1.Series]( @@ -78,7 +79,7 @@ func TestTSDBSeriesIter_Expiry(t *testing.T) { t.Run("expires on creation", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) cancel() - itr, err := NewTSDBSeriesIter(ctx, forSeriesTestImpl{ + itr, err := NewTSDBSeriesIter(ctx, "", forSeriesTestImpl{ {}, // a single entry }, v1.NewBounds(0, math.MaxUint64)) require.Error(t, err) @@ -87,7 +88,7 @@ func TestTSDBSeriesIter_Expiry(t *testing.T) { t.Run("expires during consumption", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) - itr, err := NewTSDBSeriesIter(ctx, forSeriesTestImpl{ + itr, err := NewTSDBSeriesIter(ctx, "", forSeriesTestImpl{ {}, {}, }, v1.NewBounds(0, math.MaxUint64)) diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 97f555cf43073..3b556b7dd7f99 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -51,7 +51,6 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/services" "github.com/grafana/dskit/tenant" - "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "go.uber.org/atomic" @@ -63,6 +62,7 @@ import ( "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" "github.com/grafana/loki/pkg/util" "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/pkg/util/spanlogger" ) var errGatewayUnhealthy = errors.New("bloom-gateway is unhealthy in the ring") @@ -196,15 +196,17 @@ func (g *Gateway) stopping(_ error) error { // FilterChunkRefs implements BloomGatewayServer func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunkRefRequest) (*logproto.FilterChunkRefResponse, error) { - sp, ctx := opentracing.StartSpanFromContext(ctx, "bloomgateway.FilterChunkRefs") - defer sp.Finish() - tenantID, err := tenant.TenantID(ctx) if err != nil { return nil, err } - logger := log.With(g.logger, "tenant", tenantID) + sp, ctx := spanlogger.NewWithLogger( + ctx, + log.With(g.logger, "tenant", tenantID), + "bloomgateway.FilterChunkRefs", + ) + defer sp.Finish() // start time == end time --> empty response if req.From.Equal(req.Through) { @@ -237,7 +239,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk }, nil } - sp.LogKV( + sp.Log( "filters", len(filters), "days", len(seriesByDay), "series_requested", len(req.Refs), @@ -254,7 +256,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // TODO(owen-d): include capacity in constructor? task.responses = responsesPool.Get(len(seriesForDay.series)) - level.Debug(g.logger).Log( + level.Debug(sp).Log( "msg", "created task for day", "task", task.ID, "day", seriesForDay.day, @@ -276,7 +278,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk for _, task := range tasks { task := task task.enqueueTime = time.Now() - level.Info(logger).Log("msg", "enqueue task", "task", task.ID, "table", task.table, "series", len(task.series)) + level.Info(sp).Log("msg", "enqueue task", "task", task.ID, "table", task.table, "series", len(task.series)) // TODO(owen-d): gracefully handle full queues if err := g.queue.Enqueue(tenantID, nil, task, func() { @@ -289,7 +291,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk go g.consumeTask(ctx, task, tasksCh) } - sp.LogKV("enqueue_duration", time.Since(queueStart).String()) + sp.Log("enqueue_duration", time.Since(queueStart).String()) remaining := len(tasks) @@ -305,7 +307,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk case <-ctx.Done(): return nil, errors.Wrap(ctx.Err(), "request failed") case task := <-tasksCh: - level.Info(logger).Log("msg", "task done", "task", task.ID, "err", task.Err()) + level.Info(sp).Log("msg", "task done", "task", task.ID, "err", task.Err()) if task.Err() != nil { return nil, errors.Wrap(task.Err(), "request failed") } @@ -314,7 +316,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk } } - sp.LogKV("msg", "received all responses") + sp.Log("msg", "received all responses") filtered := filterChunkRefs(req, responses) @@ -333,7 +335,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk g.metrics.requestedChunks.Observe(float64(preFilterChunks)) g.metrics.filteredChunks.Observe(float64(preFilterChunks - postFilterChunks)) - level.Info(logger).Log( + level.Info(sp).Log( "msg", "return filtered chunk refs", "requested_series", preFilterSeries, "filtered_series", preFilterSeries-postFilterSeries, diff --git a/pkg/bloomgateway/client.go b/pkg/bloomgateway/client.go index 6497848923ab1..05eae0360952c 100644 --- a/pkg/bloomgateway/client.go +++ b/pkg/bloomgateway/client.go @@ -35,6 +35,7 @@ import ( "github.com/grafana/loki/pkg/storage/chunk/cache" "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" "github.com/grafana/loki/pkg/util/constants" + util_log "github.com/grafana/loki/pkg/util/log" ) var ( @@ -270,9 +271,6 @@ func (c *GatewayClient) FilterChunks(ctx context.Context, tenant string, from, t "from", from.Time(), "through", through.Time(), "num_refs", len(rs.groups), - "refs", JoinFunc(rs.groups, ",", func(e *logproto.GroupedChunkRefs) string { - return model.Fingerprint(e.Fingerprint).String() - }), "plan", plan.String(), "plan_hash", plan.Hash(), ) @@ -357,6 +355,16 @@ func replicationSetsWithBounds(subRing ring.ReadRing, instances []ring.InstanceD return nil, errors.Wrap(err, "bloom gateway get ring") } + if len(tr) == 0 { + level.Warn(util_log.Logger).Log( + "subroutine", "replicationSetsWithBounds", + "msg", "instance has no token ranges - should not be possible", + "instance", inst.Id, + "n_instances", len(instances), + ) + continue + } + // NB(owen-d): this will send requests to the wrong nodes if RF>1 since it only checks the // first token when assigning replicasets rs, err := subRing.Get(tr[0], BlocksOwnerRead, bufDescs, bufHosts, bufZones) diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index e49679fe61ea8..dc58d0a0664ca 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -90,7 +90,17 @@ func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) er } start := time.Now() - bqs, err := p.store.FetchBlocks(ctx, refs, bloomshipper.WithFetchAsync(true), bloomshipper.WithIgnoreNotFound(true)) + bqs, err := p.store.FetchBlocks( + ctx, + refs, + bloomshipper.WithFetchAsync(true), + bloomshipper.WithIgnoreNotFound(true), + // NB(owen-d): we relinquish bloom pages to a pool + // after iteration for performance (alloc reduction). + // This is safe to do here because we do not capture + // the underlying bloom []byte outside of iteration + bloomshipper.WithPool(true), + ) level.Debug(p.logger).Log("msg", "fetch blocks", "count", len(bqs), "duration", time.Since(start), "err", err) if err != nil { diff --git a/pkg/bloomgateway/util_test.go b/pkg/bloomgateway/util_test.go index 0eb94f68c7dbf..48d0f99f9004d 100644 --- a/pkg/bloomgateway/util_test.go +++ b/pkg/bloomgateway/util_test.go @@ -334,7 +334,7 @@ func createBlocks(t *testing.T, tenant string, n int, from, through model.Time, // } // } querier := &bloomshipper.CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(block), + BlockQuerier: v1.NewBlockQuerier(block, false), BlockRef: blockRef, } queriers = append(queriers, querier) diff --git a/pkg/ingester/flush_test.go b/pkg/ingester/flush_test.go index e3060f873b875..86127a3177f91 100644 --- a/pkg/ingester/flush_test.go +++ b/pkg/ingester/flush_test.go @@ -35,6 +35,7 @@ import ( "github.com/grafana/loki/pkg/storage/chunk/fetcher" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/constants" "github.com/grafana/loki/pkg/validation" ) @@ -364,6 +365,14 @@ func (s *testStore) GetChunks(_ context.Context, _ string, _, _ model.Time, _ ch return nil, nil, nil } +func (s *testStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *testStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *testStore) GetSchemaConfigs() []config.PeriodConfig { return defaultPeriodConfigs } diff --git a/pkg/ingester/index/bitprefix.go b/pkg/ingester/index/bitprefix.go index 8235c2821d6ca..524bd56a69858 100644 --- a/pkg/ingester/index/bitprefix.go +++ b/pkg/ingester/index/bitprefix.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) @@ -48,7 +48,7 @@ func NewBitPrefixWithShards(totalShards uint32) (*BitPrefixInvertedIndex, error) }, nil } -func (ii *BitPrefixInvertedIndex) getShards(shard *astmapper.ShardAnnotation) ([]*indexShard, bool) { +func (ii *BitPrefixInvertedIndex) getShards(shard *logql.Shard) ([]*indexShard, bool) { if shard == nil { return ii.shards, false } @@ -63,13 +63,18 @@ func (ii *BitPrefixInvertedIndex) getShards(shard *astmapper.ShardAnnotation) ([ // Conversely, if the requested shard is 1_of_2, but the index has a factor of 4, // we can _exactly_ match ob1 => (ob10, ob11) and know all fingerprints in those // resulting shards have the requested ob1 prefix (don't need to filter). - var filter bool - if shard.Of > len(ii.shards) { - filter = true + // NB(owen-d): this only applies when using the old power-of-two shards, + // which are superseded by the new bounded sharding strategy. + filter := true + + switch shard.Variant() { + case logql.PowerOfTwoVersion: + if shard.PowerOfTwo.Of <= len(ii.shards) { + filter = false + } } - requestedShard := shard.TSDB() - minFp, maxFp := requestedShard.GetFromThrough() + minFp, maxFp := shard.GetFromThrough() // Determine how many bits we need to take from // the requested shard's min/max fingerprint values @@ -102,12 +107,17 @@ func (ii *BitPrefixInvertedIndex) shardForFP(fp model.Fingerprint) int { return int(fp >> (64 - localShard.RequiredBits())) } -func (ii *BitPrefixInvertedIndex) validateShard(shard *astmapper.ShardAnnotation) error { +func (ii *BitPrefixInvertedIndex) validateShard(shard *logql.Shard) error { if shard == nil { return nil } - return shard.TSDB().Validate() + switch shard.Variant() { + case logql.PowerOfTwoVersion: + return shard.PowerOfTwo.TSDB().Validate() + } + return nil + } // Add a fingerprint under the specified labels. @@ -119,7 +129,7 @@ func (ii *BitPrefixInvertedIndex) Add(labels []logproto.LabelAdapter, fp model.F } // Lookup all fingerprints for the provided matchers. -func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -143,7 +153,7 @@ func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astm // Because bit prefix order is also ascending order, // the merged fingerprints from ascending shards are also in order. if filter { - minFP, maxFP := shard.TSDB().GetFromThrough() + minFP, maxFP := shard.GetFromThrough() minIdx := sort.Search(len(result), func(i int) bool { return result[i] >= minFP }) @@ -159,7 +169,7 @@ func (ii *BitPrefixInvertedIndex) Lookup(matchers []*labels.Matcher, shard *astm } // LabelNames returns all label names. -func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) { +func (ii *BitPrefixInvertedIndex) LabelNames(shard *logql.Shard) ([]string, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -171,7 +181,6 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( // Therefore it's more performant to request shard factors lower or equal to the // inverted index factor if filter { - s := shard.TSDB() extractor = func(x unlockIndex) (results []string) { @@ -179,7 +188,7 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( for name, entry := range x { for _, valEntry := range entry.fps { for _, fp := range valEntry.fps { - if s.Match(fp) { + if shard.Match(fp) { results = append(results, name) continue outer } @@ -201,7 +210,7 @@ func (ii *BitPrefixInvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ( } // LabelValues returns the values for the given label. -func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) { +func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *logql.Shard) ([]string, error) { if err := ii.validateShard(shard); err != nil { return nil, err } @@ -209,7 +218,6 @@ func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.Shar var extractor func(indexEntry) []string shards, filter := ii.getShards(shard) if filter { - s := shard.TSDB() extractor = func(x indexEntry) []string { results := make([]string, 0, len(x.fps)) @@ -217,7 +225,7 @@ func (ii *BitPrefixInvertedIndex) LabelValues(name string, shard *astmapper.Shar outer: for val, valEntry := range x.fps { for _, fp := range valEntry.fps { - if s.Match(fp) { + if shard.Match(fp) { results = append(results, val) continue outer } diff --git a/pkg/ingester/index/bitprefix_test.go b/pkg/ingester/index/bitprefix_test.go index d4afb9f635727..4c67ac4c5e32c 100644 --- a/pkg/ingester/index/bitprefix_test.go +++ b/pkg/ingester/index/bitprefix_test.go @@ -2,6 +2,7 @@ package index import ( "fmt" + "math" "sort" "testing" @@ -10,6 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) @@ -18,24 +20,24 @@ func Test_BitPrefixGetShards(t *testing.T) { for _, tt := range []struct { total uint32 filter bool - shard *astmapper.ShardAnnotation + shard *logql.Shard expected []uint32 }{ // equal factors - {16, false, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {16, false, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, - {16, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, + {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{4}}, + {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{15}}, // idx factor a larger factor of 2 - {32, false, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 1}}, - {32, false, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{8, 9}}, - {32, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{30, 31}}, - {64, false, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{60, 61, 62, 63}}, + {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0, 1}}, + {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{8, 9}}, + {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{30, 31}}, + {64, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{60, 61, 62, 63}}, // // idx factor a smaller factor of 2 - {8, true, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {8, true, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{2}}, - {8, true, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{7}}, + {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{2}}, + {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{7}}, } { tt := tt t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { @@ -52,11 +54,105 @@ func Test_BitPrefixGetShards(t *testing.T) { } } +func Test_BitPrefixGetShards_Bounded(t *testing.T) { + for _, tt := range []struct { + total uint32 + shard *logql.Shard + expected []uint32 + }{ + { + 4, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b01 << 62, + Max: 0b10 << 62, + }, + }, + ).Ptr(), + []uint32{1, 2}, + }, + { + 4, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b10 << 62, + Max: 0b11 << 62, + }, + }, + ).Ptr(), + []uint32{2, 3}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b101 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b110 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0b00 << 62, + Max: 0b111 << 61, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6, 7}, + }, + { + 8, + logql.NewBoundedShard( + logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + }, + ).Ptr(), + []uint32{0, 1, 2, 3, 4, 5, 6, 7}, + }, + } { + t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { + ii, err := NewBitPrefixWithShards(tt.total) + require.Nil(t, err) + res, filter := ii.getShards(tt.shard) + require.True(t, filter) // always need to filter bounded shards + resInt := []uint32{} + for _, r := range res { + resInt = append(resInt, r.shard) + } + require.Equal(t, tt.expected, resInt) + }) + } + +} + func Test_BitPrefixValidateShards(t *testing.T) { ii, err := NewBitPrefixWithShards(32) require.Nil(t, err) - require.NoError(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 16})) - require.Error(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 15})) + require.NoError(t, ii.validateShard(logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 16}).Ptr())) + require.Error(t, ii.validateShard(logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 15}).Ptr())) } func Test_BitPrefixCreation(t *testing.T) { @@ -116,10 +212,10 @@ func Test_BitPrefix_hash_mapping(t *testing.T) { []*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, - &astmapper.ShardAnnotation{ + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: int(expShard), Of: requestedFactor, - }, + }).Ptr(), ) require.NoError(t, err) require.Len(t, res, 1) @@ -147,7 +243,7 @@ func Test_BitPrefixNoMatcherLookup(t *testing.T) { require.Nil(t, err) expShard := uint32(fp >> (64 - index.NewShard(0, 16).RequiredBits())) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), fp) - ids, err = ii.Lookup(nil, &astmapper.ShardAnnotation{Shard: int(expShard), Of: 16}) + ids, err = ii.Lookup(nil, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(expShard), Of: 16}).Ptr()) require.Nil(t, err) require.Equal(t, fp, ids[0]) } @@ -171,10 +267,10 @@ func Test_BitPrefixConsistentMapping(t *testing.T) { shardMax := 8 for i := 0; i < shardMax; i++ { - shard := &astmapper.ShardAnnotation{ + shard := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: i, Of: shardMax, - } + }).Ptr() aIDs, err := a.Lookup([]*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"), diff --git a/pkg/ingester/index/index.go b/pkg/ingester/index/index.go index aff7352c8d59c..064c0ddc45ba7 100644 --- a/pkg/ingester/index/index.go +++ b/pkg/ingester/index/index.go @@ -18,6 +18,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/stores/series" ) @@ -28,9 +29,9 @@ var ErrInvalidShardQuery = errors.New("incompatible index shard query") type Interface interface { Add(labels []logproto.LabelAdapter, fp model.Fingerprint) labels.Labels - Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) - LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) - LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) + Lookup(matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) + LabelNames(shard *logql.Shard) ([]string, error) + LabelValues(name string, shard *logql.Shard) ([]string, error) Delete(labels labels.Labels, fp model.Fingerprint) } @@ -71,14 +72,20 @@ func (ii *InvertedIndex) getShards(shard *astmapper.ShardAnnotation) []*indexSha return result } -func (ii *InvertedIndex) validateShard(shard *astmapper.ShardAnnotation) error { +func (ii *InvertedIndex) validateShard(shard *logql.Shard) (*astmapper.ShardAnnotation, error) { if shard == nil { - return nil + return nil, nil + } + + s := shard.PowerOfTwo + if s == nil { + return nil, errors.New("inverted index only supports shard annotations with `PowerOfTwo`") } - if int(ii.totalShards)%shard.Of != 0 || uint32(shard.Of) > ii.totalShards { - return fmt.Errorf("%w index_shard:%d query_shard:%v", ErrInvalidShardQuery, ii.totalShards, shard) + + if int(ii.totalShards)%s.Of != 0 || uint32(s.Of) > ii.totalShards { + return nil, fmt.Errorf("%w index_shard:%d query_shard:%v", ErrInvalidShardQuery, ii.totalShards, s) } - return nil + return s, nil } // Add a fingerprint under the specified labels. @@ -150,8 +157,9 @@ func labelsString(b *bytes.Buffer, ls labels.Labels) { } // Lookup all fingerprints for the provided matchers. -func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, s *logql.Shard) ([]model.Fingerprint, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } @@ -175,8 +183,9 @@ func (ii *InvertedIndex) Lookup(matchers []*labels.Matcher, shard *astmapper.Sha } // LabelNames returns all label names. -func (ii *InvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) LabelNames(s *logql.Shard) ([]string, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } shards := ii.getShards(shard) @@ -190,8 +199,9 @@ func (ii *InvertedIndex) LabelNames(shard *astmapper.ShardAnnotation) ([]string, } // LabelValues returns the values for the given label. -func (ii *InvertedIndex) LabelValues(name string, shard *astmapper.ShardAnnotation) ([]string, error) { - if err := ii.validateShard(shard); err != nil { +func (ii *InvertedIndex) LabelValues(name string, s *logql.Shard) ([]string, error) { + shard, err := ii.validateShard(s) + if err != nil { return nil, err } shards := ii.getShards(shard) diff --git a/pkg/ingester/index/index_test.go b/pkg/ingester/index/index_test.go index bc6aaeebf344b..06625a357970d 100644 --- a/pkg/ingester/index/index_test.go +++ b/pkg/ingester/index/index_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/util" ) @@ -46,7 +47,10 @@ func Test_GetShards(t *testing.T) { func Test_ValidateShards(t *testing.T) { ii := NewWithShards(32) - require.NoError(t, ii.validateShard(&astmapper.ShardAnnotation{Shard: 1, Of: 16})) + _, err := ii.validateShard( + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 16}).Ptr(), + ) + require.NoError(t, err) } var ( @@ -108,7 +112,8 @@ func Test_hash_mapping(t *testing.T) { ii := NewWithShards(shard) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - res, err := ii.Lookup([]*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, &astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + res, err := ii.Lookup([]*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, &x) require.NoError(t, err) require.Len(t, res, 1) require.Equal(t, model.Fingerprint(1), res[0]) @@ -131,7 +136,8 @@ func Test_NoMatcherLookup(t *testing.T) { // with shard param ii = NewWithShards(16) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - ids, err = ii.Lookup(nil, &astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + ids, err = ii.Lookup(nil, &x) require.Nil(t, err) require.Equal(t, model.Fingerprint(1), ids[0]) } @@ -151,10 +157,10 @@ func Test_ConsistentMapping(t *testing.T) { shardMax := 8 for i := 0; i < shardMax; i++ { - shard := &astmapper.ShardAnnotation{ + shard := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: i, Of: shardMax, - } + }).Ptr() aIDs, err := a.Lookup([]*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"), diff --git a/pkg/ingester/index/multi.go b/pkg/ingester/index/multi.go index db4a7b642f6fe..0bfa57806ad0a 100644 --- a/pkg/ingester/index/multi.go +++ b/pkg/ingester/index/multi.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/storage/config" ) @@ -80,15 +80,15 @@ func (m *Multi) Delete(labels labels.Labels, fp model.Fingerprint) { } -func (m *Multi) Lookup(t time.Time, matchers []*labels.Matcher, shard *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (m *Multi) Lookup(t time.Time, matchers []*labels.Matcher, shard *logql.Shard) ([]model.Fingerprint, error) { return m.indexFor(t).Lookup(matchers, shard) } -func (m *Multi) LabelNames(t time.Time, shard *astmapper.ShardAnnotation) ([]string, error) { +func (m *Multi) LabelNames(t time.Time, shard *logql.Shard) ([]string, error) { return m.indexFor(t).LabelNames(shard) } -func (m *Multi) LabelValues(t time.Time, name string, shard *astmapper.ShardAnnotation) ([]string, error) { +func (m *Multi) LabelValues(t time.Time, name string, shard *logql.Shard) ([]string, error) { return m.indexFor(t).LabelValues(name, shard) } @@ -111,14 +111,14 @@ func (noopInvertedIndex) Add(_ []logproto.LabelAdapter, _ model.Fingerprint) lab func (noopInvertedIndex) Delete(_ labels.Labels, _ model.Fingerprint) {} -func (noopInvertedIndex) Lookup(_ []*labels.Matcher, _ *astmapper.ShardAnnotation) ([]model.Fingerprint, error) { +func (noopInvertedIndex) Lookup(_ []*labels.Matcher, _ *logql.Shard) ([]model.Fingerprint, error) { return nil, nil } -func (noopInvertedIndex) LabelNames(_ *astmapper.ShardAnnotation) ([]string, error) { +func (noopInvertedIndex) LabelNames(_ *logql.Shard) ([]string, error) { return nil, nil } -func (noopInvertedIndex) LabelValues(_ string, _ *astmapper.ShardAnnotation) ([]string, error) { +func (noopInvertedIndex) LabelValues(_ string, _ *logql.Shard) ([]string, error) { return nil, nil } diff --git a/pkg/ingester/index/multi_test.go b/pkg/ingester/index/multi_test.go index dc4568dab63b2..50d5db945edb8 100644 --- a/pkg/ingester/index/multi_test.go +++ b/pkg/ingester/index/multi_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" @@ -129,7 +130,9 @@ func TestMultiIndex(t *testing.T) { []*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, - &astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + logql.NewPowerOfTwoShard( + astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + ).Ptr(), ) require.Nil(t, err) @@ -144,7 +147,7 @@ func TestMultiIndex(t *testing.T) { []*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, - &astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}).Ptr(), ) require.Nil(t, err) diff --git a/pkg/ingester/ingester_test.go b/pkg/ingester/ingester_test.go index 1f62821e1cc8b..82a124f5116a4 100644 --- a/pkg/ingester/ingester_test.go +++ b/pkg/ingester/ingester_test.go @@ -43,6 +43,7 @@ import ( "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/constants" "github.com/grafana/loki/pkg/validation" ) @@ -478,6 +479,14 @@ func (s *mockStore) Stats(_ context.Context, _ string, _, _ model.Time, _ ...*la }, nil } +func (s *mockStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *mockStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *mockStore) Volume(_ context.Context, _ string, _, _ model.Time, limit int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return &logproto.VolumeResponse{ Volumes: []logproto.Volume{ diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index 64678da85a540..e0e9d5e4ca6b4 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -38,7 +38,6 @@ import ( "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/runtime" "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/config" @@ -504,17 +503,10 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams stats := stats.FromContext(ctx) var iters []iter.SampleIterator - var shard *astmapper.ShardAnnotation - shards, err := logql.ParseShards(req.Shards) + shard, err := parseShardFromRequest(req.Shards) if err != nil { return nil, err } - if len(shards) > 1 { - return nil, errors.New("only one shard per ingester query is supported") - } - if len(shards) == 1 { - shard = &shards[0] - } selector, err := expr.Selector() if err != nil { return nil, err @@ -823,11 +815,11 @@ func (i *instance) forMatchingStreams( // and is used to select the correct inverted index ts time.Time, matchers []*labels.Matcher, - shards *astmapper.ShardAnnotation, + shard *logql.Shard, fn func(*stream) error, ) error { filters, matchers := util.SplitFiltersAndMatchers(matchers) - ids, err := i.index.Lookup(ts, matchers, shards) + ids, err := i.index.Lookup(ts, matchers, shard) if err != nil { return err } @@ -934,9 +926,9 @@ func (i *instance) openTailersCount() uint32 { return uint32(len(i.tailers)) } -func parseShardFromRequest(reqShards []string) (*astmapper.ShardAnnotation, error) { - var shard *astmapper.ShardAnnotation - shards, err := logql.ParseShards(reqShards) +func parseShardFromRequest(reqShards []string) (*logql.Shard, error) { + var shard *logql.Shard + shards, _, err := logql.ParseShards(reqShards) if err != nil { return nil, err } diff --git a/pkg/loghttp/params.go b/pkg/loghttp/params.go index df97a5c2e37fe..654c52e7725df 100644 --- a/pkg/loghttp/params.go +++ b/pkg/loghttp/params.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/c2h5oh/datasize" "github.com/pkg/errors" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" @@ -200,3 +201,21 @@ func parseRegexQuery(httpRequest *http.Request) (string, error) { } return query, nil } + +func parseBytes(r *http.Request, field string, optional bool) (val datasize.ByteSize, err error) { + s := r.Form.Get(field) + + if s == "" { + if !optional { + return 0, fmt.Errorf("missing %s", field) + } + return val, nil + } + + if err := val.UnmarshalText([]byte(s)); err != nil { + return 0, errors.Wrapf(err, "invalid %s: %s", field, s) + } + + return val, nil + +} diff --git a/pkg/loghttp/query.go b/pkg/loghttp/query.go index 854ccd5ae7116..75f75c60ccc03 100644 --- a/pkg/loghttp/query.go +++ b/pkg/loghttp/query.go @@ -8,6 +8,7 @@ import ( "time" "unsafe" + "github.com/c2h5oh/datasize" "github.com/grafana/jsonparser" json "github.com/json-iterator/go" "github.com/prometheus/common/model" @@ -503,6 +504,17 @@ func ParseIndexStatsQuery(r *http.Request) (*RangeQuery, error) { return ParseRangeQuery(r) } +func ParseIndexShardsQuery(r *http.Request) (*RangeQuery, datasize.ByteSize, error) { + // TODO(owen-d): use a specific type/validation instead + // of using range query parameters (superset) + parsed, err := ParseRangeQuery(r) + if err != nil { + return nil, 0, err + } + targetBytes, err := parseBytes(r, "targetBytesPerShard", true) + return parsed, targetBytes, err +} + func NewVolumeRangeQueryWithDefaults(matchers string) *logproto.VolumeRequest { start, end, _ := determineBounds(time.Now(), "", "", "") step := (time.Duration(defaultQueryRangeStep(start, end)) * time.Second).Milliseconds() diff --git a/pkg/logproto/compat.go b/pkg/logproto/compat.go index 0e65a90da02fa..25b5269e1ae6c 100644 --- a/pkg/logproto/compat.go +++ b/pkg/logproto/compat.go @@ -11,6 +11,7 @@ import ( "time" "unsafe" + "github.com/c2h5oh/datasize" "github.com/cespare/xxhash/v2" jsoniter "github.com/json-iterator/go" "github.com/opentracing/opentracing-go" @@ -354,6 +355,9 @@ func (m *FilterChunkRefRequest) GetStep() int64 { return 0 } +// TODO(owen-d): why does this return the hash of all the refs instead of the query? +// The latter should be significantly cheaper, more helpful (readable), and just as correct +// at being a unique identifier for the request. // GetQuery returns the query of the request. // The query is the hash for the input chunks refs and the filter expressions. func (m *FilterChunkRefRequest) GetQuery() string { @@ -402,3 +406,42 @@ func (m *FilterChunkRefRequest) WithStartEndForCache(start, end time.Time) resul return &clone } + +func (m *ShardsRequest) GetCachingOptions() (res definitions.CachingOptions) { return } + +func (m *ShardsRequest) GetStart() time.Time { + return time.Unix(0, m.From.UnixNano()) +} + +func (m *ShardsRequest) GetEnd() time.Time { + return time.Unix(0, m.Through.UnixNano()) +} + +func (m *ShardsRequest) GetStep() int64 { return 0 } + +func (m *ShardsRequest) WithStartEnd(start, end time.Time) definitions.Request { + clone := *m + clone.From = model.TimeFromUnixNano(start.UnixNano()) + clone.Through = model.TimeFromUnixNano(end.UnixNano()) + return &clone +} + +func (m *ShardsRequest) WithQuery(query string) definitions.Request { + clone := *m + clone.Query = query + return &clone +} + +func (m *ShardsRequest) WithStartEndForCache(start, end time.Time) resultscache.Request { + return m.WithStartEnd(start, end).(resultscache.Request) +} + +func (m *ShardsRequest) LogToSpan(sp opentracing.Span) { + fields := []otlog.Field{ + otlog.String("from", timestamp.Time(int64(m.From)).String()), + otlog.String("through", timestamp.Time(int64(m.Through)).String()), + otlog.String("query", m.GetQuery()), + otlog.String("target_bytes_per_shard", datasize.ByteSize(m.TargetBytesPerShard).HumanReadable()), + } + sp.LogFields(fields...) +} diff --git a/pkg/logproto/extensions.go b/pkg/logproto/extensions.go index 9a5f3f8e61af6..5fa5048e1a9d1 100644 --- a/pkg/logproto/extensions.go +++ b/pkg/logproto/extensions.go @@ -133,3 +133,10 @@ func (m *IndexStatsResponse) LoggingKeyValues() []interface{} { "entries", m.Entries, } } + +func (m *Shard) SpaceFor(stats *IndexStatsResponse, targetShardBytes uint64) bool { + curDelta := max(m.Stats.Bytes, targetShardBytes) - min(m.Stats.Bytes, targetShardBytes) + updated := m.Stats.Bytes + stats.Bytes + newDelta := max(updated, targetShardBytes) - min(updated, targetShardBytes) + return newDelta <= curDelta +} diff --git a/pkg/logproto/extensions_test.go b/pkg/logproto/extensions_test.go new file mode 100644 index 0000000000000..d1c96c76bbed3 --- /dev/null +++ b/pkg/logproto/extensions_test.go @@ -0,0 +1,42 @@ +package logproto + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestShard_SpaceFor(t *testing.T) { + target := uint64(100) + shard := Shard{ + Stats: &IndexStatsResponse{ + Bytes: 50, + }, + } + + for _, tc := range []struct { + desc string + bytes uint64 + exp bool + }{ + { + desc: "full shard", + bytes: 50, + exp: true, + }, + { + desc: "overflow equal to underflow accepts", + bytes: 100, + exp: true, + }, + { + desc: "overflow", + bytes: 101, + exp: false, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, shard.SpaceFor(&IndexStatsResponse{Bytes: tc.bytes}, target), tc.exp) + }) + } +} diff --git a/pkg/logproto/indexgateway.pb.go b/pkg/logproto/indexgateway.pb.go index 86b2665e86b17..1229caebbb604 100644 --- a/pkg/logproto/indexgateway.pb.go +++ b/pkg/logproto/indexgateway.pb.go @@ -6,11 +6,18 @@ package logproto import ( context "context" fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" + stats "github.com/grafana/loki/pkg/logqlmodel/stats" + github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" + io "io" math "math" + math_bits "math/bits" + reflect "reflect" + strings "strings" ) // Reference imports to suppress errors if they are not otherwise used. @@ -24,33 +31,453 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package +type ShardsRequest struct { + From github_com_prometheus_common_model.Time `protobuf:"varint,1,opt,name=from,proto3,customtype=github.com/prometheus/common/model.Time" json:"from"` + Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query"` + TargetBytesPerShard uint64 `protobuf:"varint,4,opt,name=target_bytes_per_shard,json=targetBytesPerShard,proto3" json:"targetBytesPerShard"` +} + +func (m *ShardsRequest) Reset() { *m = ShardsRequest{} } +func (*ShardsRequest) ProtoMessage() {} +func (*ShardsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{0} +} +func (m *ShardsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsRequest.Merge(m, src) +} +func (m *ShardsRequest) XXX_Size() int { + return m.Size() +} +func (m *ShardsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsRequest proto.InternalMessageInfo + +func (m *ShardsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *ShardsRequest) GetTargetBytesPerShard() uint64 { + if m != nil { + return m.TargetBytesPerShard + } + return 0 +} + +type ShardsResponse struct { + Shards []Shard `protobuf:"bytes,1,rep,name=shards,proto3" json:"shards"` + Statistics stats.Result `protobuf:"bytes,2,opt,name=statistics,proto3" json:"statistics"` +} + +func (m *ShardsResponse) Reset() { *m = ShardsResponse{} } +func (*ShardsResponse) ProtoMessage() {} +func (*ShardsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{1} +} +func (m *ShardsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsResponse.Merge(m, src) +} +func (m *ShardsResponse) XXX_Size() int { + return m.Size() +} +func (m *ShardsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo + +func (m *ShardsResponse) GetShards() []Shard { + if m != nil { + return m.Shards + } + return nil +} + +func (m *ShardsResponse) GetStatistics() stats.Result { + if m != nil { + return m.Statistics + } + return stats.Result{} +} + +type Shard struct { + Bounds FPBounds `protobuf:"bytes,1,opt,name=bounds,proto3" json:"bounds"` + Stats *IndexStatsResponse `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` +} + +func (m *Shard) Reset() { *m = Shard{} } +func (*Shard) ProtoMessage() {} +func (*Shard) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{2} +} +func (m *Shard) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Shard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Shard.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Shard) XXX_Merge(src proto.Message) { + xxx_messageInfo_Shard.Merge(m, src) +} +func (m *Shard) XXX_Size() int { + return m.Size() +} +func (m *Shard) XXX_DiscardUnknown() { + xxx_messageInfo_Shard.DiscardUnknown(m) +} + +var xxx_messageInfo_Shard proto.InternalMessageInfo + +func (m *Shard) GetBounds() FPBounds { + if m != nil { + return m.Bounds + } + return FPBounds{} +} + +func (m *Shard) GetStats() *IndexStatsResponse { + if m != nil { + return m.Stats + } + return nil +} + +// FPBounds is identical to the definition in `pkg/storage/bloom/v1/bounds.FingerprintBounds` +// which ensures we can cast between them without allocations. +type FPBounds struct { + Min github_com_prometheus_common_model.Fingerprint `protobuf:"varint,1,opt,name=min,proto3,casttype=github.com/prometheus/common/model.Fingerprint" json:"min"` + Max github_com_prometheus_common_model.Fingerprint `protobuf:"varint,2,opt,name=max,proto3,casttype=github.com/prometheus/common/model.Fingerprint" json:"max"` +} + +func (m *FPBounds) Reset() { *m = FPBounds{} } +func (*FPBounds) ProtoMessage() {} +func (*FPBounds) Descriptor() ([]byte, []int) { + return fileDescriptor_d27585148d0a52c8, []int{3} +} +func (m *FPBounds) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *FPBounds) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_FPBounds.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *FPBounds) XXX_Merge(src proto.Message) { + xxx_messageInfo_FPBounds.Merge(m, src) +} +func (m *FPBounds) XXX_Size() int { + return m.Size() +} +func (m *FPBounds) XXX_DiscardUnknown() { + xxx_messageInfo_FPBounds.DiscardUnknown(m) +} + +var xxx_messageInfo_FPBounds proto.InternalMessageInfo + +func (m *FPBounds) GetMin() github_com_prometheus_common_model.Fingerprint { + if m != nil { + return m.Min + } + return 0 +} + +func (m *FPBounds) GetMax() github_com_prometheus_common_model.Fingerprint { + if m != nil { + return m.Max + } + return 0 +} + +func init() { + proto.RegisterType((*ShardsRequest)(nil), "indexgatewaypb.ShardsRequest") + proto.RegisterType((*ShardsResponse)(nil), "indexgatewaypb.ShardsResponse") + proto.RegisterType((*Shard)(nil), "indexgatewaypb.Shard") + proto.RegisterType((*FPBounds)(nil), "indexgatewaypb.FPBounds") +} + func init() { proto.RegisterFile("pkg/logproto/indexgateway.proto", fileDescriptor_d27585148d0a52c8) } var fileDescriptor_d27585148d0a52c8 = []byte{ - // 361 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x4e, 0xc2, 0x40, - 0x18, 0xc7, 0xef, 0x12, 0x63, 0xf4, 0x34, 0x0e, 0xb7, 0x40, 0x40, 0xcf, 0xc4, 0x38, 0xe8, 0x42, - 0x8d, 0xbe, 0x80, 0xd1, 0x84, 0x86, 0x04, 0x4d, 0xc4, 0x84, 0x81, 0xc1, 0x78, 0xc5, 0x8f, 0xd2, - 0x50, 0x7a, 0xb5, 0xbd, 0x46, 0xd9, 0x7c, 0x04, 0x1f, 0xc3, 0x87, 0xf0, 0x01, 0x1c, 0x19, 0x19, - 0xe5, 0x58, 0x1c, 0x79, 0x04, 0xc3, 0x35, 0x85, 0x03, 0x4b, 0xe2, 0x04, 0xfd, 0xfd, 0x7f, 0xdf, - 0xff, 0x4b, 0xef, 0x4a, 0x0e, 0xc3, 0x9e, 0x6b, 0xf9, 0xc2, 0x0d, 0x23, 0x21, 0x85, 0xe5, 0x05, - 0x4f, 0xf0, 0xea, 0x72, 0x09, 0x2f, 0x7c, 0x50, 0xd1, 0x88, 0xee, 0x99, 0x2c, 0x74, 0x4a, 0xe5, - 0xa5, 0x81, 0xec, 0x4f, 0x2a, 0x9f, 0x7f, 0x6e, 0x90, 0xdd, 0xda, 0xcc, 0xb7, 0x53, 0x9f, 0xd6, - 0x08, 0xb9, 0x4b, 0x20, 0x1a, 0x68, 0x48, 0xcb, 0x95, 0xb9, 0xbf, 0xa0, 0x0d, 0x78, 0x4e, 0x20, - 0x96, 0xa5, 0xfd, 0xfc, 0x30, 0x0e, 0x45, 0x10, 0xc3, 0x19, 0xa6, 0x75, 0xb2, 0x63, 0x83, 0xbc, - 0xee, 0x26, 0x41, 0xaf, 0x01, 0x1d, 0x6a, 0xe8, 0x06, 0xce, 0xca, 0x0e, 0xd6, 0xa4, 0x69, 0xdb, - 0x11, 0xa2, 0x55, 0xb2, 0x6d, 0x83, 0xbc, 0x87, 0xc8, 0x83, 0x98, 0x96, 0x96, 0xec, 0x14, 0x66, - 0x4d, 0xe5, 0xdc, 0x6c, 0xde, 0xf3, 0x40, 0x0a, 0x75, 0xee, 0x80, 0x7f, 0xcb, 0xfb, 0x10, 0x57, - 0x45, 0x74, 0x03, 0x32, 0xf2, 0xda, 0xb3, 0x27, 0x7a, 0xb2, 0x98, 0x5c, 0xa3, 0x64, 0x3b, 0x0a, - 0x2b, 0xa6, 0xd1, 0xff, 0x48, 0x8a, 0x1a, 0x35, 0xb9, 0x9f, 0xac, 0x2e, 0x38, 0x5d, 0x19, 0xcb, - 0x71, 0xfe, 0xb1, 0xc1, 0x26, 0x5b, 0xb3, 0x17, 0x93, 0x5c, 0xc6, 0xe6, 0x05, 0xe9, 0xe3, 0xd7, - 0x34, 0xe7, 0x82, 0xcc, 0x70, 0x5e, 0x74, 0xa9, 0x8f, 0xb4, 0x29, 0xfc, 0xa4, 0x0f, 0xd4, 0x58, - 0x98, 0x92, 0xac, 0xa5, 0xf8, 0x37, 0xc8, 0x1a, 0xae, 0x5a, 0xc3, 0x31, 0x43, 0xa3, 0x31, 0x43, - 0xd3, 0x31, 0xc3, 0x6f, 0x8a, 0xe1, 0x0f, 0xc5, 0xf0, 0x97, 0x62, 0x78, 0xa8, 0x18, 0xfe, 0x56, - 0x0c, 0xff, 0x28, 0x86, 0xa6, 0x8a, 0xe1, 0xf7, 0x09, 0x43, 0xc3, 0x09, 0x43, 0xa3, 0x09, 0x43, - 0xad, 0x63, 0xd7, 0x93, 0xdd, 0xc4, 0xa9, 0xb4, 0x45, 0xdf, 0x72, 0x23, 0xde, 0xe1, 0x01, 0xb7, - 0x7c, 0xd1, 0xf3, 0x2c, 0xf3, 0x4b, 0x75, 0x36, 0xf5, 0xcf, 0xc5, 0x6f, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x7a, 0x1a, 0x28, 0xb4, 0xf1, 0x02, 0x00, 0x00, + // 734 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xbf, 0x4f, 0xdb, 0x4c, + 0x18, 0xf6, 0x91, 0x84, 0x0f, 0x8e, 0x0f, 0x86, 0xfb, 0x7e, 0x60, 0x05, 0xb0, 0xa3, 0xa8, 0x52, + 0xd3, 0xc5, 0xae, 0xe8, 0x50, 0xb5, 0x12, 0x12, 0x75, 0xa5, 0x44, 0x88, 0xb4, 0xa2, 0x06, 0x31, + 0x30, 0x94, 0x3a, 0xe1, 0x70, 0x2c, 0x6c, 0x5f, 0xb8, 0x3b, 0xab, 0x61, 0xeb, 0x58, 0x75, 0xaa, + 0xfa, 0x1f, 0x54, 0xea, 0xd0, 0x3f, 0x85, 0x91, 0x11, 0x75, 0xb0, 0x4a, 0x58, 0xaa, 0x4c, 0xcc, + 0x9d, 0x2a, 0xdf, 0xd9, 0x89, 0x09, 0x41, 0xa2, 0x5d, 0x72, 0xe7, 0xe7, 0x7d, 0xde, 0xe7, 0xfd, + 0x79, 0x0a, 0xd4, 0xbb, 0x47, 0xae, 0xe9, 0x13, 0xb7, 0x4b, 0x09, 0x27, 0xa6, 0x17, 0x1e, 0xe0, + 0x9e, 0xeb, 0x70, 0xfc, 0xd6, 0x39, 0x31, 0x04, 0x84, 0x16, 0xf2, 0x58, 0xb7, 0x55, 0xfe, 0xd7, + 0x25, 0x2e, 0x91, 0xec, 0xe4, 0x26, 0x59, 0xe5, 0xa5, 0x6b, 0x32, 0xd9, 0x25, 0x35, 0x56, 0x52, + 0xe3, 0xb1, 0x1f, 0x90, 0x03, 0xec, 0x9b, 0x8c, 0x3b, 0x9c, 0xc9, 0x5f, 0xc9, 0xa8, 0x7e, 0x9e, + 0x82, 0xf3, 0xdb, 0x1d, 0x87, 0x1e, 0x30, 0x1b, 0x1f, 0x47, 0x98, 0x71, 0xb4, 0x09, 0x8b, 0x87, + 0x94, 0x04, 0x2a, 0xa8, 0x80, 0x5a, 0xc1, 0x7a, 0x7c, 0x1a, 0xeb, 0xca, 0xb7, 0x58, 0xbf, 0xef, + 0x7a, 0xbc, 0x13, 0xb5, 0x8c, 0x36, 0x09, 0xcc, 0x2e, 0x25, 0x01, 0xe6, 0x1d, 0x1c, 0x31, 0xb3, + 0x4d, 0x82, 0x80, 0x84, 0xa6, 0x50, 0x37, 0x76, 0xbc, 0x00, 0x0f, 0x62, 0x5d, 0xb8, 0xdb, 0xe2, + 0x17, 0xed, 0xc0, 0xbf, 0x78, 0x87, 0x92, 0xc8, 0xed, 0xa8, 0x53, 0x42, 0xef, 0xe9, 0xef, 0xeb, + 0x65, 0x0a, 0x76, 0x76, 0x41, 0x3a, 0x2c, 0x1d, 0x47, 0x98, 0x9e, 0xa8, 0x85, 0x0a, 0xa8, 0xcd, + 0x5a, 0xb3, 0x83, 0x58, 0x97, 0x80, 0x2d, 0x0f, 0xd4, 0x84, 0xff, 0x73, 0x87, 0xba, 0x98, 0xef, + 0xb7, 0x4e, 0x38, 0x66, 0xfb, 0x5d, 0x4c, 0xf7, 0x59, 0x52, 0xa5, 0x5a, 0xac, 0x80, 0x5a, 0xd1, + 0x5a, 0x1c, 0xc4, 0xfa, 0x3f, 0x92, 0x61, 0x25, 0x84, 0x2d, 0x4c, 0x45, 0x13, 0xec, 0x49, 0x60, + 0xf5, 0x13, 0x80, 0x0b, 0x59, 0x8f, 0x58, 0x97, 0x84, 0x0c, 0xa3, 0x35, 0x38, 0x2d, 0xf4, 0x98, + 0x0a, 0x2a, 0x85, 0xda, 0xdc, 0xea, 0x7f, 0xc6, 0xf5, 0x61, 0x19, 0x82, 0x6f, 0x2d, 0x24, 0xd5, + 0x0e, 0x62, 0x3d, 0x25, 0xdb, 0xe9, 0x89, 0x9e, 0x41, 0x98, 0x0c, 0xc1, 0x63, 0xdc, 0x6b, 0x33, + 0xd1, 0x99, 0xb9, 0xd5, 0x79, 0x43, 0xce, 0xc5, 0xc6, 0x2c, 0xf2, 0xb9, 0x85, 0x52, 0xd7, 0x1c, + 0xd1, 0xce, 0xdd, 0xab, 0xef, 0x01, 0x2c, 0x89, 0x20, 0x68, 0x1d, 0x4e, 0xb7, 0x48, 0x14, 0x8a, + 0x5c, 0x12, 0x21, 0x75, 0x3c, 0x97, 0xfa, 0x96, 0x25, 0xec, 0xa3, 0x74, 0x24, 0xdf, 0x4e, 0x4f, + 0xb4, 0x06, 0x4b, 0x22, 0x76, 0x9a, 0xc9, 0xb2, 0x31, 0x5c, 0xa3, 0x8d, 0x44, 0x69, 0x3b, 0xb1, + 0x65, 0xa5, 0xcb, 0x6e, 0x0b, 0xba, 0x2d, 0x8f, 0xea, 0x17, 0x00, 0x67, 0xb2, 0x18, 0x68, 0x13, + 0x16, 0x02, 0x2f, 0x14, 0xa9, 0x14, 0xad, 0x27, 0x83, 0x58, 0x4f, 0x3e, 0x7f, 0xc6, 0xba, 0x71, + 0x87, 0x81, 0xd7, 0xbd, 0xd0, 0xc5, 0xb4, 0x4b, 0xbd, 0x90, 0xdb, 0x89, 0x9b, 0x10, 0x73, 0x7a, + 0x22, 0xad, 0x4c, 0xcc, 0xe9, 0xfd, 0x91, 0x98, 0xd3, 0x5b, 0xfd, 0x50, 0x82, 0x7f, 0x8b, 0x7a, + 0x1a, 0xb2, 0x33, 0x68, 0x03, 0xc2, 0x57, 0xc9, 0xba, 0x08, 0x10, 0x2d, 0x8d, 0xaa, 0x1e, 0xa1, + 0xe9, 0xa3, 0x28, 0x2f, 0x4f, 0x36, 0xca, 0x96, 0x3c, 0x04, 0xa8, 0x09, 0xe7, 0x1a, 0x98, 0x3f, + 0xef, 0x44, 0xe1, 0x91, 0x8d, 0x0f, 0x51, 0x8e, 0x9e, 0x83, 0x33, 0xb1, 0x95, 0x5b, 0xac, 0x52, + 0xad, 0xaa, 0xa0, 0x3a, 0x9c, 0x6d, 0x60, 0xbe, 0x8d, 0xa9, 0x87, 0x19, 0x2a, 0x5f, 0x63, 0x4b, + 0x30, 0x53, 0x5a, 0x9a, 0x68, 0x1b, 0xea, 0xbc, 0x86, 0x8b, 0x4d, 0xa7, 0x85, 0xfd, 0x97, 0x4e, + 0x80, 0x59, 0x9d, 0xd0, 0x17, 0x98, 0x53, 0xaf, 0x9d, 0x7c, 0xa1, 0xda, 0xc8, 0xf3, 0x16, 0x4a, + 0x16, 0x63, 0x71, 0x8c, 0x99, 0xd3, 0x7f, 0x03, 0x55, 0x01, 0xed, 0x3a, 0x7e, 0x34, 0x1e, 0xe0, + 0xc1, 0x98, 0xdb, 0x04, 0xce, 0x1d, 0x22, 0x34, 0xe0, 0x4c, 0x52, 0x58, 0xb2, 0x66, 0xf9, 0x01, + 0xe5, 0xd7, 0xf2, 0xc6, 0x80, 0x6e, 0xee, 0x6c, 0x55, 0x41, 0xeb, 0xa2, 0xa5, 0xbb, 0xc4, 0x8f, + 0x02, 0x8c, 0x72, 0x01, 0x25, 0x92, 0xa9, 0xa8, 0x37, 0x0d, 0x43, 0x85, 0xa6, 0x1c, 0x8a, 0x7c, + 0xc0, 0x2b, 0x13, 0xdf, 0xfb, 0x30, 0x1b, 0xed, 0x36, 0x73, 0xb6, 0x30, 0xd6, 0xde, 0xd9, 0x85, + 0xa6, 0x9c, 0x5f, 0x68, 0xca, 0xd5, 0x85, 0x06, 0xde, 0xf5, 0x35, 0xf0, 0xb5, 0xaf, 0x81, 0xd3, + 0xbe, 0x06, 0xce, 0xfa, 0x1a, 0xf8, 0xde, 0xd7, 0xc0, 0x8f, 0xbe, 0xa6, 0x5c, 0xf5, 0x35, 0xf0, + 0xf1, 0x52, 0x53, 0xce, 0x2e, 0x35, 0xe5, 0xfc, 0x52, 0x53, 0xf6, 0xee, 0xe5, 0xf6, 0xde, 0xa5, + 0xce, 0xa1, 0x13, 0x3a, 0xa6, 0x4f, 0x8e, 0x3c, 0x33, 0xff, 0x27, 0xd0, 0x9a, 0x16, 0xc7, 0xa3, + 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xbd, 0xfc, 0xaf, 0x62, 0x06, 0x00, 0x00, +} + +func (this *ShardsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsRequest) + if !ok { + that2, ok := that.(ShardsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.From.Equal(that1.From) { + return false + } + if !this.Through.Equal(that1.Through) { + return false + } + if this.Query != that1.Query { + return false + } + if this.TargetBytesPerShard != that1.TargetBytesPerShard { + return false + } + return true +} +func (this *ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsResponse) + if !ok { + that2, ok := that.(ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.Shards) != len(that1.Shards) { + return false + } + for i := range this.Shards { + if !this.Shards[i].Equal(&that1.Shards[i]) { + return false + } + } + if !this.Statistics.Equal(&that1.Statistics) { + return false + } + return true +} +func (this *Shard) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*Shard) + if !ok { + that2, ok := that.(Shard) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.Bounds.Equal(&that1.Bounds) { + return false + } + if !this.Stats.Equal(that1.Stats) { + return false + } + return true +} +func (this *FPBounds) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*FPBounds) + if !ok { + that2, ok := that.(FPBounds) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Min != that1.Min { + return false + } + if this.Max != that1.Max { + return false + } + return true +} +func (this *ShardsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 8) + s = append(s, "&logproto.ShardsRequest{") + s = append(s, "From: "+fmt.Sprintf("%#v", this.From)+",\n") + s = append(s, "Through: "+fmt.Sprintf("%#v", this.Through)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "TargetBytesPerShard: "+fmt.Sprintf("%#v", this.TargetBytesPerShard)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.ShardsResponse{") + if this.Shards != nil { + vs := make([]*Shard, len(this.Shards)) + for i := range vs { + vs[i] = &this.Shards[i] + } + s = append(s, "Shards: "+fmt.Sprintf("%#v", vs)+",\n") + } + s = append(s, "Statistics: "+strings.Replace(this.Statistics.GoString(), `&`, ``, 1)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *Shard) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.Shard{") + s = append(s, "Bounds: "+strings.Replace(this.Bounds.GoString(), `&`, ``, 1)+",\n") + if this.Stats != nil { + s = append(s, "Stats: "+fmt.Sprintf("%#v", this.Stats)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *FPBounds) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&logproto.FPBounds{") + s = append(s, "Min: "+fmt.Sprintf("%#v", this.Min)+",\n") + s = append(s, "Max: "+fmt.Sprintf("%#v", this.Max)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func valueToGoStringIndexgateway(v interface{}, typ string) string { + rv := reflect.ValueOf(v) + if rv.IsNil() { + return "nil" + } + pv := reflect.Indirect(rv).Interface() + return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv) } // Reference imports to suppress errors if they are not otherwise used. @@ -79,6 +506,9 @@ type IndexGatewayClient interface { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. GetVolume(ctx context.Context, in *VolumeRequest, opts ...grpc.CallOption) (*VolumeResponse, error) + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + GetShards(ctx context.Context, in *ShardsRequest, opts ...grpc.CallOption) (IndexGateway_GetShardsClient, error) } type indexGatewayClient struct { @@ -175,6 +605,38 @@ func (c *indexGatewayClient) GetVolume(ctx context.Context, in *VolumeRequest, o return out, nil } +func (c *indexGatewayClient) GetShards(ctx context.Context, in *ShardsRequest, opts ...grpc.CallOption) (IndexGateway_GetShardsClient, error) { + stream, err := c.cc.NewStream(ctx, &_IndexGateway_serviceDesc.Streams[1], "/indexgatewaypb.IndexGateway/GetShards", opts...) + if err != nil { + return nil, err + } + x := &indexGatewayGetShardsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type IndexGateway_GetShardsClient interface { + Recv() (*ShardsResponse, error) + grpc.ClientStream +} + +type indexGatewayGetShardsClient struct { + grpc.ClientStream +} + +func (x *indexGatewayGetShardsClient) Recv() (*ShardsResponse, error) { + m := new(ShardsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + // IndexGatewayServer is the server API for IndexGateway service. type IndexGatewayServer interface { /// QueryIndex reads the indexes required for given query & sends back the batch of rows @@ -191,6 +653,9 @@ type IndexGatewayServer interface { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. GetVolume(context.Context, *VolumeRequest) (*VolumeResponse, error) + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + GetShards(*ShardsRequest, IndexGateway_GetShardsServer) error } // UnimplementedIndexGatewayServer can be embedded to have forward compatible implementations. @@ -218,6 +683,9 @@ func (*UnimplementedIndexGatewayServer) GetStats(ctx context.Context, req *Index func (*UnimplementedIndexGatewayServer) GetVolume(ctx context.Context, req *VolumeRequest) (*VolumeResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetVolume not implemented") } +func (*UnimplementedIndexGatewayServer) GetShards(req *ShardsRequest, srv IndexGateway_GetShardsServer) error { + return status.Errorf(codes.Unimplemented, "method GetShards not implemented") +} func RegisterIndexGatewayServer(s *grpc.Server, srv IndexGatewayServer) { s.RegisterService(&_IndexGateway_serviceDesc, srv) @@ -352,6 +820,27 @@ func _IndexGateway_GetVolume_Handler(srv interface{}, ctx context.Context, dec f return interceptor(ctx, in, info, handler) } +func _IndexGateway_GetShards_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ShardsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(IndexGatewayServer).GetShards(m, &indexGatewayGetShardsServer{stream}) +} + +type IndexGateway_GetShardsServer interface { + Send(*ShardsResponse) error + grpc.ServerStream +} + +type indexGatewayGetShardsServer struct { + grpc.ServerStream +} + +func (x *indexGatewayGetShardsServer) Send(m *ShardsResponse) error { + return x.ServerStream.SendMsg(m) +} + var _IndexGateway_serviceDesc = grpc.ServiceDesc{ ServiceName: "indexgatewaypb.IndexGateway", HandlerType: (*IndexGatewayServer)(nil), @@ -387,6 +876,912 @@ var _IndexGateway_serviceDesc = grpc.ServiceDesc{ Handler: _IndexGateway_QueryIndex_Handler, ServerStreams: true, }, + { + StreamName: "GetShards", + Handler: _IndexGateway_GetShards_Handler, + ServerStreams: true, + }, }, Metadata: "pkg/logproto/indexgateway.proto", } + +func (m *ShardsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ShardsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ShardsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.TargetBytesPerShard != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.TargetBytesPerShard)) + i-- + dAtA[i] = 0x20 + } + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintIndexgateway(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + if m.Through != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Through)) + i-- + dAtA[i] = 0x10 + } + if m.From != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.From)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *ShardsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ShardsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.Statistics.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + if len(m.Shards) > 0 { + for iNdEx := len(m.Shards) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Shards[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *Shard) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Shard) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Shard) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Stats != nil { + { + size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + { + size, err := m.Bounds.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintIndexgateway(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *FPBounds) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *FPBounds) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *FPBounds) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Max != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Max)) + i-- + dAtA[i] = 0x10 + } + if m.Min != 0 { + i = encodeVarintIndexgateway(dAtA, i, uint64(m.Min)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintIndexgateway(dAtA []byte, offset int, v uint64) int { + offset -= sovIndexgateway(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *ShardsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.From != 0 { + n += 1 + sovIndexgateway(uint64(m.From)) + } + if m.Through != 0 { + n += 1 + sovIndexgateway(uint64(m.Through)) + } + l = len(m.Query) + if l > 0 { + n += 1 + l + sovIndexgateway(uint64(l)) + } + if m.TargetBytesPerShard != 0 { + n += 1 + sovIndexgateway(uint64(m.TargetBytesPerShard)) + } + return n +} + +func (m *ShardsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Shards) > 0 { + for _, e := range m.Shards { + l = e.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + } + } + l = m.Statistics.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + return n +} + +func (m *Shard) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.Bounds.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + if m.Stats != nil { + l = m.Stats.Size() + n += 1 + l + sovIndexgateway(uint64(l)) + } + return n +} + +func (m *FPBounds) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Min != 0 { + n += 1 + sovIndexgateway(uint64(m.Min)) + } + if m.Max != 0 { + n += 1 + sovIndexgateway(uint64(m.Max)) + } + return n +} + +func sovIndexgateway(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozIndexgateway(x uint64) (n int) { + return sovIndexgateway(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (this *ShardsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ShardsRequest{`, + `From:` + fmt.Sprintf("%v", this.From) + `,`, + `Through:` + fmt.Sprintf("%v", this.Through) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `TargetBytesPerShard:` + fmt.Sprintf("%v", this.TargetBytesPerShard) + `,`, + `}`, + }, "") + return s +} +func (this *ShardsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForShards := "[]Shard{" + for _, f := range this.Shards { + repeatedStringForShards += strings.Replace(strings.Replace(f.String(), "Shard", "Shard", 1), `&`, ``, 1) + "," + } + repeatedStringForShards += "}" + s := strings.Join([]string{`&ShardsResponse{`, + `Shards:` + repeatedStringForShards + `,`, + `Statistics:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Statistics), "Result", "stats.Result", 1), `&`, ``, 1) + `,`, + `}`, + }, "") + return s +} +func (this *Shard) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&Shard{`, + `Bounds:` + strings.Replace(strings.Replace(this.Bounds.String(), "FPBounds", "FPBounds", 1), `&`, ``, 1) + `,`, + `Stats:` + strings.Replace(fmt.Sprintf("%v", this.Stats), "IndexStatsResponse", "IndexStatsResponse", 1) + `,`, + `}`, + }, "") + return s +} +func (this *FPBounds) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&FPBounds{`, + `Min:` + fmt.Sprintf("%v", this.Min) + `,`, + `Max:` + fmt.Sprintf("%v", this.Max) + `,`, + `}`, + }, "") + return s +} +func valueToStringIndexgateway(v interface{}) string { + rv := reflect.ValueOf(v) + if rv.IsNil() { + return "nil" + } + pv := reflect.Indirect(rv).Interface() + return fmt.Sprintf("*%v", pv) +} +func (m *ShardsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ShardsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ShardsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) + } + m.From = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.From |= github_com_prometheus_common_model.Time(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Through", wireType) + } + m.Through = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Through |= github_com_prometheus_common_model.Time(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TargetBytesPerShard", wireType) + } + m.TargetBytesPerShard = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.TargetBytesPerShard |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ShardsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ShardsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ShardsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Shards", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Shards = append(m.Shards, Shard{}) + if err := m.Shards[len(m.Shards)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Statistics", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Statistics.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Shard) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Shard: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Shard: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Bounds", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Bounds.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Stats", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthIndexgateway + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthIndexgateway + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Stats == nil { + m.Stats = &IndexStatsResponse{} + } + if err := m.Stats.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *FPBounds) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: FPBounds: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: FPBounds: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Min", wireType) + } + m.Min = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Min |= github_com_prometheus_common_model.Fingerprint(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Max", wireType) + } + m.Max = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Max |= github_com_prometheus_common_model.Fingerprint(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipIndexgateway(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthIndexgateway + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipIndexgateway(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + return iNdEx, nil + case 1: + iNdEx += 8 + return iNdEx, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthIndexgateway + } + iNdEx += length + if iNdEx < 0 { + return 0, ErrInvalidLengthIndexgateway + } + return iNdEx, nil + case 3: + for { + var innerWire uint64 + var start int = iNdEx + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowIndexgateway + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipIndexgateway(dAtA[start:]) + if err != nil { + return 0, err + } + iNdEx = start + next + if iNdEx < 0 { + return 0, ErrInvalidLengthIndexgateway + } + } + return iNdEx, nil + case 4: + return iNdEx, nil + case 5: + iNdEx += 4 + return iNdEx, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +var ( + ErrInvalidLengthIndexgateway = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowIndexgateway = fmt.Errorf("proto: integer overflow") +) diff --git a/pkg/logproto/indexgateway.proto b/pkg/logproto/indexgateway.proto index af34e03a279cb..33cfbec317d57 100644 --- a/pkg/logproto/indexgateway.proto +++ b/pkg/logproto/indexgateway.proto @@ -2,7 +2,9 @@ syntax = "proto3"; package indexgatewaypb; +import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; +import "pkg/logqlmodel/stats/stats.proto"; option go_package = "github.com/grafana/loki/pkg/logproto"; @@ -25,4 +27,55 @@ service IndexGateway { // Note: this MUST be the same as the variant defined in // logproto.proto on the Querier service. rpc GetVolume(logproto.VolumeRequest) returns (logproto.VolumeResponse) {} + + // GetShards is an optimized implemented shard-planning implementation + // on the index gateway and not on the ingester. + rpc GetShards(ShardsRequest) returns (stream ShardsResponse); +} + +message ShardsRequest { + int64 from = 1 [ + (gogoproto.customtype) = "github.com/prometheus/common/model.Time", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "from" + ]; + int64 through = 2 [ + (gogoproto.customtype) = "github.com/prometheus/common/model.Time", + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "through" + ]; + string query = 3 [(gogoproto.jsontag) = "query"]; + uint64 target_bytes_per_shard = 4 [(gogoproto.jsontag) = "targetBytesPerShard"]; +} + +message ShardsResponse { + repeated Shard shards = 1 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "shards" + ]; + stats.Result statistics = 2 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "statistics" + ]; +} + +message Shard { + FPBounds bounds = 1 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "bounds" + ]; + logproto.IndexStatsResponse stats = 2 [(gogoproto.jsontag) = "stats"]; +} + +// FPBounds is identical to the definition in `pkg/storage/bloom/v1/bounds.FingerprintBounds` +// which ensures we can cast between them without allocations. +message FPBounds { + uint64 min = 1 [ + (gogoproto.casttype) = "github.com/prometheus/common/model.Fingerprint", + (gogoproto.jsontag) = "min" + ]; + uint64 max = 2 [ + (gogoproto.casttype) = "github.com/prometheus/common/model.Fingerprint", + (gogoproto.jsontag) = "max" + ]; } diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index d50ae7d1e5db4..7302df0cfb212 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -386,6 +386,7 @@ func (m *SampleQueryRequest) GetDeletes() []*Delete { return nil } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. type Plan struct { Raw []byte `protobuf:"bytes,1,opt,name=raw,proto3" json:"raw,omitempty"` } @@ -1740,6 +1741,7 @@ func (m *LabelNamesForMetricNameRequest) GetMetricName() string { return "" } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. type LineFilter struct { Raw []byte `protobuf:"bytes,1,opt,name=raw,proto3" json:"raw,omitempty"` } diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index bf175168cfd93..c50246a1b57b5 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -83,6 +83,7 @@ message SampleQueryRequest { Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. message Plan { bytes raw = 1; } @@ -297,6 +298,7 @@ message LabelNamesForMetricNameRequest { ]; } +// TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. message LineFilter { bytes raw = 1; } diff --git a/pkg/logql/downstream.go b/pkg/logql/downstream.go index 11f94132a4bf6..5dea1144d9a18 100644 --- a/pkg/logql/downstream.go +++ b/pkg/logql/downstream.go @@ -16,7 +16,6 @@ import ( "github.com/grafana/loki/pkg/logqlmodel" "github.com/grafana/loki/pkg/logqlmodel/metadata" "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/util" util_log "github.com/grafana/loki/pkg/util/log" ) @@ -75,7 +74,7 @@ func (ng *DownstreamEngine) Query(ctx context.Context, p Params) Query { // DownstreamSampleExpr is a SampleExpr which signals downstream computation type DownstreamSampleExpr struct { - shard *astmapper.ShardAnnotation + shard *Shard syntax.SampleExpr } @@ -108,7 +107,7 @@ func (d DownstreamSampleExpr) Pretty(level int) string { // DownstreamLogSelectorExpr is a LogSelectorExpr which signals downstream computation type DownstreamLogSelectorExpr struct { - shard *astmapper.ShardAnnotation + shard *Shard syntax.LogSelectorExpr } @@ -302,50 +301,6 @@ func (e *QuantileSketchMergeExpr) Walk(f syntax.WalkFn) { } } -type Shards []astmapper.ShardAnnotation - -func (xs Shards) Encode() (encoded []string) { - for _, shard := range xs { - encoded = append(encoded, shard.String()) - } - - return encoded -} - -// ParseShards parses a list of string encoded shards -func ParseShards(strs []string) (Shards, error) { - if len(strs) == 0 { - return nil, nil - } - shards := make([]astmapper.ShardAnnotation, 0, len(strs)) - - for _, str := range strs { - shard, err := astmapper.ParseShard(str) - if err != nil { - return nil, err - } - shards = append(shards, shard) - } - return shards, nil -} - -func ParseShardCount(strs []string) int { - if len(strs) == 0 { - return 0 - } - - for _, str := range strs { - shard, err := astmapper.ParseShard(str) - if err != nil { - continue - } - - return shard.Of - } - - return 0 -} - type Downstreamable interface { Downstreamer(context.Context) Downstreamer } @@ -435,7 +390,7 @@ func (ev *DownstreamEvaluator) NewStepEvaluator( case DownstreamSampleExpr: // downstream to a querier - var shards []astmapper.ShardAnnotation + var shards Shards if e.shard != nil { shards = append(shards, *e.shard) } @@ -443,7 +398,7 @@ func (ev *DownstreamEvaluator) NewStepEvaluator( results, err := ev.Downstream(ctx, []DownstreamQuery{{ Params: ParamsWithShardsOverride{ Params: ParamsWithExpressionOverride{Params: params, ExpressionOverride: e.SampleExpr}, - ShardsOverride: Shards(shards).Encode(), + ShardsOverride: shards.Encode(), }, }}, acc) if err != nil { diff --git a/pkg/logql/downstream_test.go b/pkg/logql/downstream_test.go index b3b8c6f37e48a..68afe83cceead 100644 --- a/pkg/logql/downstream_test.go +++ b/pkg/logql/downstream_test.go @@ -96,7 +96,8 @@ func TestMappingEquivalence(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{}) // TODO (callum) refactor this test so that we won't need to set every // possible sharding config option to true when we have multiple in the future if tc.approximate { @@ -166,7 +167,8 @@ func TestMappingEquivalenceSketches(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -200,7 +202,8 @@ func TestMappingEquivalenceSketches(t *testing.T) { qry := regular.Query(params) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -265,7 +268,8 @@ func TestShardCounter(t *testing.T) { require.NoError(t, err) ctx := user.InjectOrgID(context.Background(), "fake") - mapper := NewShardMapper(ConstantShards(shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(shards)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) noop, _, mapped, err := mapper.Parse(params.GetExpression()) require.NoError(t, err) @@ -620,10 +624,10 @@ func TestFormat_ShardedExpr(t *testing.T) { name: "ConcatSampleExpr", in: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -636,10 +640,10 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -652,10 +656,10 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 3, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -701,7 +705,8 @@ func TestPrettierWithoutShards(t *testing.T) { q := `((quantile_over_time(0.5,{foo="bar"} | json | unwrap bytes[1d]) by (cluster) > 42) and (count by (cluster)(max_over_time({foo="baz"} |= "error" | json | unwrap bytes[1d]) by (cluster,namespace)) > 10))` e := syntax.MustParseExpr(q) - mapper := NewShardMapper(ConstantShards(4), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(4)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{}) _, _, mapped, err := mapper.Parse(e) require.NoError(t, err) got := syntax.Prettify(mapped) @@ -738,42 +743,3 @@ and 10` assert.Equal(t, expected, got) } - -func TestParseShardCount(t *testing.T) { - for _, st := range []struct { - name string - shards []string - expected int - }{ - { - name: "empty shards", - shards: []string{}, - expected: 0, - }, - { - name: "single shard", - shards: []string{"0_of_3"}, - expected: 3, - }, - { - name: "single shard with error", - shards: []string{"0_of_"}, - expected: 0, - }, - { - name: "multiple shards", - shards: []string{"0_of_3", "0_of_4"}, - expected: 3, - }, - { - name: "multiple shards with errors", - shards: []string{"_of_3", "0_of_4"}, - expected: 4, - }, - } { - t.Run(st.name, func(t *testing.T) { - require.Equal(t, st.expected, ParseShardCount(st.shards)) - }) - - } -} diff --git a/pkg/logql/explain_test.go b/pkg/logql/explain_test.go index 307aa10cfa98d..84364b633c228 100644 --- a/pkg/logql/explain_test.go +++ b/pkg/logql/explain_test.go @@ -28,7 +28,8 @@ func TestExplain(t *testing.T) { defaultEv := NewDefaultEvaluator(querier, 30*time.Second) downEv := &DownstreamEvaluator{Downstreamer: MockDownstreamer{regular}, defaultEvaluator: defaultEv} - mapper := NewShardMapper(ConstantShards(4), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(4)) + mapper := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, expr, err := mapper.Parse(syntax.MustParseExpr(query)) require.NoError(t, err) diff --git a/pkg/logql/mapper_metrics.go b/pkg/logql/mapper_metrics.go index 3588231700a63..4ec8cb8454c78 100644 --- a/pkg/logql/mapper_metrics.go +++ b/pkg/logql/mapper_metrics.go @@ -42,9 +42,10 @@ func newMapperMetrics(registerer prometheus.Registerer, mapper string) *MapperMe ConstLabels: prometheus.Labels{"mapper": mapper}, }, []string{"type"}), DownstreamFactor: promauto.With(registerer).NewHistogram(prometheus.HistogramOpts{ - Namespace: constants.Loki, - Name: "query_frontend_shard_factor", - Help: "Number of downstream queries per request", + Namespace: constants.Loki, + Name: "query_frontend_shard_factor", + Help: "Number of downstream queries per request", + // 1 -> 65k shards Buckets: prometheus.ExponentialBuckets(1, 4, 8), ConstLabels: prometheus.Labels{"mapper": mapper}, }), diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index 1399ac15ef9f5..bd5bfec77db5e 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -2,10 +2,12 @@ package logql import ( "context" + "fmt" "strconv" "strings" "time" + "github.com/c2h5oh/datasize" "github.com/dustin/go-humanize" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -32,6 +34,7 @@ const ( QueryTypeLabels = "labels" QueryTypeSeries = "series" QueryTypeStats = "stats" + QueryTypeShards = "shards" QueryTypeVolume = "volume" latencyTypeSlow = "slow" @@ -128,6 +131,11 @@ func RecordRangeAndInstantQueryMetrics( logValues := make([]interface{}, 0, 50) + var bloomRatio float64 // what % are filtered + if stats.Index.TotalChunks > 0 { + bloomRatio = float64(stats.Index.TotalChunks-stats.Index.PostFilterChunks) / float64(stats.Index.TotalChunks) + } + logValues = append(logValues, []interface{}{ "latency", latencyType, // this can be used to filter log lines. "query", query, @@ -191,6 +199,9 @@ func RecordRangeAndInstantQueryMetrics( "ingester_post_filter_lines", stats.Ingester.Store.Chunk.GetPostFilterLines(), // Time spent being blocked on congestion control. "congestion_control_latency", stats.CongestionControlLatency(), + "index_total_chunks", stats.Index.TotalChunks, + "index_post_bloom_filter_chunks", stats.Index.PostFilterChunks, + "index_bloom_filter_ratio", fmt.Sprintf("%.2f", bloomRatio), }...) logValues = append(logValues, tagsToKeyValues(queryTags)...) @@ -370,6 +381,58 @@ func RecordStatsQueryMetrics(ctx context.Context, log log.Logger, start, end tim execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) } +func RecordShardsQueryMetrics( + ctx context.Context, + log log.Logger, + start, + end time.Time, + query string, + targetBytesPerShard uint64, + status string, + shards int, + stats logql_stats.Result, +) { + var ( + logger = fixLogger(ctx, log) + latencyType = latencyTypeFast + queryType = QueryTypeShards + ) + + // Tag throughput metric by latency type based on a threshold. + // Latency below the threshold is fast, above is slow. + if stats.Summary.ExecTime > slowQueryThresholdSecond { + latencyType = latencyTypeSlow + } + + var bloomRatio float64 // what % are filtered + if stats.Index.TotalChunks > 0 { + bloomRatio = float64(stats.Index.TotalChunks-stats.Index.PostFilterChunks) / float64(stats.Index.TotalChunks) + } + logValues := make([]interface{}, 0, 15) + logValues = append(logValues, + "latency", latencyType, + "query_type", queryType, + "start", start.Format(time.RFC3339Nano), + "end", end.Format(time.RFC3339Nano), + "start_delta", time.Since(start), + "end_delta", time.Since(end), + "length", end.Sub(start), + "duration", time.Duration(int64(stats.Summary.ExecTime*float64(time.Second))), + "status", status, + "query", query, + "query_hash", util.HashedQuery(query), + "target_bytes_per_shard", datasize.ByteSize(targetBytesPerShard).HumanReadable(), + "shards", shards, + "index_total_chunks", stats.Index.TotalChunks, + "index_post_bloom_filter_chunks", stats.Index.PostFilterChunks, + "index_bloom_filter_ratio", fmt.Sprintf("%.2f", bloomRatio), + ) + + level.Info(logger).Log(logValues...) + + execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) +} + func RecordVolumeQueryMetrics(ctx context.Context, log log.Logger, start, end time.Time, query string, limit uint32, step time.Duration, status string, stats logql_stats.Result) { var ( logger = fixLogger(ctx, log) diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index a8fb04c826a6a..a1c17c86da036 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -7,35 +7,22 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/stores/index/stats" util_log "github.com/grafana/loki/pkg/util/log" ) -type ShardResolver interface { - Shards(expr syntax.Expr) (int, uint64, error) - GetStats(e syntax.Expr) (stats.Stats, error) -} - -type ConstantShards int - -func (s ConstantShards) Shards(_ syntax.Expr) (int, uint64, error) { return int(s), 0, nil } -func (s ConstantShards) GetStats(_ syntax.Expr) (stats.Stats, error) { return stats.Stats{}, nil } - const ( ShardQuantileOverTime = "quantile_over_time" ) type ShardMapper struct { - shards ShardResolver + shards ShardingStrategy metrics *MapperMetrics quantileOverTimeSharding bool } -func NewShardMapper(resolver ShardResolver, metrics *MapperMetrics, shardAggregation []string) ShardMapper { +func NewShardMapper(strategy ShardingStrategy, metrics *MapperMetrics, shardAggregation []string) ShardMapper { quantileOverTimeSharding := false for _, a := range shardAggregation { if a == ShardQuantileOverTime { @@ -43,7 +30,7 @@ func NewShardMapper(resolver ShardResolver, metrics *MapperMetrics, shardAggrega } } return ShardMapper{ - shards: resolver, + shards: strategy, metrics: metrics, quantileOverTimeSharding: quantileOverTimeSharding, } @@ -148,71 +135,70 @@ func (m ShardMapper) mapBinOpExpr(e *syntax.BinOpExpr, r *downstreamRecorder, to e.RHS = rhsSampleExpr // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return e, bytesPerShard, nil } func (m ShardMapper) mapLogSelectorExpr(expr syntax.LogSelectorExpr, r *downstreamRecorder) (syntax.LogSelectorExpr, uint64, error) { var head *ConcatLogSelectorExpr - shards, bytesPerShard, err := m.shards.Shards(expr) + shards, maxBytesPerShard, err := m.shards.Shards(expr) if err != nil { return nil, 0, err } - if shards == 0 { + if len(shards) == 0 { return &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ shard: nil, LogSelectorExpr: expr, }, - }, bytesPerShard, nil + }, maxBytesPerShard, nil } - for i := shards - 1; i >= 0; i-- { + + for i := len(shards) - 1; i >= 0; i-- { head = &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: i, - Of: shards, - }, + shard: &shards[i], LogSelectorExpr: expr, }, next: head, } } - r.Add(shards, StreamsKey) - return head, bytesPerShard, nil + r.Add(len(shards), StreamsKey) + return head, maxBytesPerShard, nil } func (m ShardMapper) mapSampleExpr(expr syntax.SampleExpr, r *downstreamRecorder) (syntax.SampleExpr, uint64, error) { var head *ConcatSampleExpr - shards, bytesPerShard, err := m.shards.Shards(expr) + shards, maxBytesPerShard, err := m.shards.Shards(expr) + if err != nil { return nil, 0, err } - if shards == 0 { + + if len(shards) == 0 { return &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ shard: nil, SampleExpr: expr, }, - }, bytesPerShard, nil + }, maxBytesPerShard, nil } - for shard := shards - 1; shard >= 0; shard-- { + + for i := len(shards) - 1; i >= 0; i-- { head = &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: shard, - Of: shards, - }, + shard: &shards[i], SampleExpr: expr, }, next: head, } } - r.Add(shards, MetricsKey) - return head, bytesPerShard, nil + r.Add(len(shards), MetricsKey) + + return head, maxBytesPerShard, nil } // turn a vector aggr into a wrapped+sharded variant, @@ -273,7 +259,7 @@ func (m ShardMapper) mapVectorAggregationExpr(expr *syntax.VectorAggregationExpr } // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return &syntax.BinOpExpr{ SampleExpr: lhs, @@ -307,7 +293,7 @@ func (m ShardMapper) mapVectorAggregationExpr(expr *syntax.VectorAggregationExpr "msg", "unexpected operation which appears shardable, ignoring", "operation", expr.Operation, ) - exprStats, err := m.shards.GetStats(expr) + exprStats, err := m.shards.Resolver().GetStats(expr) if err != nil { return nil, 0, err } @@ -366,7 +352,7 @@ var rangeMergeMap = map[string]string{ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, r *downstreamRecorder, topLevel bool) (syntax.SampleExpr, uint64, error) { if !expr.Shardable(topLevel) { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } switch expr.Operation { @@ -442,7 +428,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, } // We take the maximum bytes per shard of both sides of the operation - bytesPerShard := uint64(math.Max(int(lhsBytesPerShard), int(rhsBytesPerShard))) + bytesPerShard := uint64(max(int(lhsBytesPerShard), int(rhsBytesPerShard))) return &syntax.BinOpExpr{ SampleExpr: lhs, @@ -452,7 +438,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, case syntax.OpRangeTypeQuantile: if !m.quantileOverTimeSharding { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } potentialConflict := syntax.ReducesLabels(expr) @@ -460,12 +446,15 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, return m.mapSampleExpr(expr, r) } - shards, bytesPerShard, err := m.shards.Shards(expr) + // TODO(owen-d): integrate bounded sharding with quantile over time + // I'm not doing this now because it uses a separate code path and may not handle + // bounded shards in the same way + shards, bytesPerShard, err := m.shards.Resolver().Shards(expr) if err != nil { return nil, 0, err } if shards == 0 { - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } // quantile_over_time() by (foo) -> @@ -475,11 +464,12 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, downstreams := make([]DownstreamSampleExpr, 0, shards) expr.Operation = syntax.OpRangeTypeQuantileSketch for shard := shards - 1; shard >= 0; shard-- { + s := NewPowerOfTwoShard(astmapper.ShardAnnotation{ + Shard: shard, + Of: shards, + }) downstreams = append(downstreams, DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: shard, - Of: shards, - }, + shard: &s, SampleExpr: expr, }) } @@ -493,7 +483,7 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, default: // don't shard if there's not an appropriate optimization - return noOp(expr, m.shards) + return noOp(expr, m.shards.Resolver()) } } diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index e7fbb9d5204c3..472bc51806041 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -20,20 +20,20 @@ func TestShardedStringer(t *testing.T) { { in: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, @@ -51,7 +51,9 @@ func TestShardedStringer(t *testing.T) { } func TestMapSampleExpr(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) for _, tc := range []struct { in syntax.SampleExpr @@ -69,10 +71,10 @@ func TestMapSampleExpr(t *testing.T) { }, out: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -85,10 +87,10 @@ func TestMapSampleExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -113,7 +115,8 @@ func TestMapSampleExpr(t *testing.T) { } func TestMappingStrings(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) for _, tc := range []struct { in string out string @@ -452,7 +455,8 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { }, } { t.Run(tc.in, func(t *testing.T) { - shardedMapper := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{ShardQuantileOverTime}) + + shardedMapper := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(2)), nilShardMetrics, []string{ShardQuantileOverTime}) ast, err := syntax.ParseExpr(tc.in) require.Nil(t, err) @@ -462,7 +466,7 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { require.Equal(t, removeWhiteSpace(tc.out), removeWhiteSpace(sharded.String())) - unshardedMapper := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{}) + unshardedMapper := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(2)), nilShardMetrics, []string{}) ast, err = syntax.ParseExpr(tc.in) require.Nil(t, err) @@ -476,7 +480,8 @@ func TestMappingStrings_NoProbabilisticSharding(t *testing.T) { } func TestMapping(t *testing.T) { - m := NewShardMapper(ConstantShards(2), nilShardMetrics, []string{}) + strategy := NewPowerOfTwoStrategy(ConstantShards(2)) + m := NewShardMapper(strategy, nilShardMetrics, []string{}) for _, tc := range []struct { in string @@ -487,20 +492,20 @@ func TestMapping(t *testing.T) { in: `{foo="bar"}`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, }, @@ -513,10 +518,10 @@ func TestMapping(t *testing.T) { in: `{foo="bar"} |= "error"`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.PipelineExpr{ Left: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, @@ -534,10 +539,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), LogSelectorExpr: &syntax.PipelineExpr{ Left: &syntax.MatchersExpr{ Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, @@ -561,10 +566,10 @@ func TestMapping(t *testing.T) { in: `rate({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -577,10 +582,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -599,10 +604,10 @@ func TestMapping(t *testing.T) { in: `count_over_time({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeCount, Left: &syntax.LogRange{ @@ -615,10 +620,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeCount, Left: &syntax.LogRange{ @@ -640,10 +645,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -660,10 +665,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -691,10 +696,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeTopK, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -707,10 +712,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.RangeAggregationExpr{ Operation: syntax.OpRangeTypeRate, Left: &syntax.LogRange{ @@ -733,10 +738,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -753,10 +758,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -785,10 +790,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -805,10 +810,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -832,10 +837,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -852,10 +857,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -892,10 +897,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -914,10 +919,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -954,10 +959,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -974,10 +979,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeSum, @@ -1009,10 +1014,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1029,10 +1034,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1071,10 +1076,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1093,10 +1098,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1123,10 +1128,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1143,10 +1148,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1185,10 +1190,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1207,10 +1212,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1236,10 +1241,10 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1256,10 +1261,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{}, Operation: syntax.OpTypeCount, @@ -1291,10 +1296,10 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1316,10 +1321,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1350,10 +1355,10 @@ func TestMapping(t *testing.T) { }, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1372,10 +1377,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Grouping: &syntax.Grouping{ Groups: []string{"cluster"}, @@ -1461,10 +1466,10 @@ func TestMapping(t *testing.T) { RHS: &syntax.VectorAggregationExpr{ Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 0, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Left: &syntax.RangeAggregationExpr{ Left: &syntax.LogRange{ @@ -1484,10 +1489,10 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: &astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ Shard: 1, Of: 2, - }, + }).Ptr(), SampleExpr: &syntax.VectorAggregationExpr{ Left: &syntax.RangeAggregationExpr{ Left: &syntax.LogRange{ @@ -1661,7 +1666,8 @@ func TestStringTrimming(t *testing.T) { }, } { t.Run(tc.expr.String(), func(t *testing.T) { - m := NewShardMapper(ConstantShards(tc.shards), nilShardMetrics, []string{ShardQuantileOverTime}) + strategy := NewPowerOfTwoStrategy(ConstantShards(tc.shards)) + m := NewShardMapper(strategy, nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mappedExpr, err := m.Parse(tc.expr) require.Nil(t, err) require.Equal(t, removeWhiteSpace(tc.expected), removeWhiteSpace(mappedExpr.String())) @@ -1680,7 +1686,7 @@ func TestShardTopk(t *testing.T) { sum_over_time({job="foo"} | json | unwrap bytes(bytes)[1m]) ) )` - m := NewShardMapper(ConstantShards(5), nilShardMetrics, []string{ShardQuantileOverTime}) + m := NewShardMapper(NewPowerOfTwoStrategy(ConstantShards(5)), nilShardMetrics, []string{ShardQuantileOverTime}) _, _, mappedExpr, err := m.Parse(syntax.MustParseExpr(expr)) require.NoError(t, err) diff --git a/pkg/logql/shards.go b/pkg/logql/shards.go new file mode 100644 index 0000000000000..7ca7f67cb367d --- /dev/null +++ b/pkg/logql/shards.go @@ -0,0 +1,248 @@ +package logql + +import ( + "encoding/json" + + "github.com/grafana/dskit/multierror" + "github.com/pkg/errors" + "github.com/prometheus/common/model" + + "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/pkg/querier/astmapper" + v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" +) + +type Shards []Shard + +type ShardVersion uint8 + +const ( + PowerOfTwoVersion ShardVersion = iota + BoundedVersion +) + +func (v ShardVersion) Strategy(resolver ShardResolver, defaultTargetShardBytes uint64) ShardingStrategy { + switch v { + case BoundedVersion: + return NewDynamicBoundsStrategy(resolver, defaultTargetShardBytes) + default: + // TODO(owen-d): refactor, ugly, etc, but the power of two strategy already populated + // the default target shard bytes through it's resolver + return NewPowerOfTwoStrategy(resolver) + } +} + +func (v ShardVersion) String() string { + switch v { + case PowerOfTwoVersion: + return "power_of_two" + case BoundedVersion: + return "bounded" + default: + return "unknown" + } +} + +var validStrategies = map[string]ShardVersion{ + PowerOfTwoVersion.String(): PowerOfTwoVersion, + BoundedVersion.String(): BoundedVersion, +} + +func ParseShardVersion(s string) (ShardVersion, error) { + v, ok := validStrategies[s] + if !ok { + return PowerOfTwoVersion, errors.Errorf("invalid shard version %s", s) + } + return v, nil +} + +type ShardResolver interface { + Shards(expr syntax.Expr) (int, uint64, error) + ShardingRanges(expr syntax.Expr, targetBytesPerShard uint64) ([]logproto.Shard, error) + GetStats(e syntax.Expr) (stats.Stats, error) +} + +type ConstantShards int + +func (s ConstantShards) Shards(_ syntax.Expr) (int, uint64, error) { return int(s), 0, nil } +func (s ConstantShards) ShardingRanges(_ syntax.Expr, _ uint64) ([]logproto.Shard, error) { + return sharding.LinearShards(int(s), 0), nil +} +func (s ConstantShards) GetStats(_ syntax.Expr) (stats.Stats, error) { return stats.Stats{}, nil } + +type ShardingStrategy interface { + Shards(expr syntax.Expr) (shards Shards, maxBytesPerShard uint64, err error) + Resolver() ShardResolver +} + +type DynamicBoundsStrategy struct { + resolver ShardResolver + targetBytesPerShard uint64 +} + +func (s DynamicBoundsStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { + shards, err := s.resolver.ShardingRanges(expr, s.targetBytesPerShard) + if err != nil { + return nil, 0, err + } + + var maxBytes uint64 + res := make(Shards, 0, len(shards)) + for _, shard := range shards { + if shard.Stats != nil { + maxBytes = max(maxBytes, shard.Stats.Bytes) + } + res = append(res, NewBoundedShard(shard)) + } + + return res, maxBytes, nil +} + +func (s DynamicBoundsStrategy) Resolver() ShardResolver { + return s.resolver +} + +func NewDynamicBoundsStrategy(resolver ShardResolver, targetBytesPerShard uint64) DynamicBoundsStrategy { + return DynamicBoundsStrategy{resolver: resolver, targetBytesPerShard: targetBytesPerShard} +} + +type PowerOfTwoStrategy struct { + resolver ShardResolver +} + +func NewPowerOfTwoStrategy(resolver ShardResolver) PowerOfTwoStrategy { + return PowerOfTwoStrategy{resolver: resolver} +} + +func (s PowerOfTwoStrategy) Resolver() ShardResolver { + return s.resolver +} + +func (s PowerOfTwoStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { + factor, bytesPerShard, err := s.resolver.Shards(expr) + if err != nil { + return nil, 0, err + } + + if factor == 0 { + return nil, bytesPerShard, nil + } + + res := make(Shards, 0, factor) + for i := 0; i < factor; i++ { + res = append(res, NewPowerOfTwoShard(astmapper.ShardAnnotation{Of: factor, Shard: i})) + } + return res, bytesPerShard, nil +} + +// Shard represents a shard annotation +// It holds either a power of two shard (legacy) or a bounded shard +type Shard struct { + PowerOfTwo *astmapper.ShardAnnotation + Bounded *logproto.Shard +} + +func (s *Shard) Variant() ShardVersion { + if s.Bounded != nil { + return BoundedVersion + } + + return PowerOfTwoVersion +} + +// implement FingerprintFilter +func (s *Shard) Match(fp model.Fingerprint) bool { + if s.Bounded != nil { + return v1.BoundsFromProto(s.Bounded.Bounds).Match(fp) + } + + return s.PowerOfTwo.Match(fp) +} + +func (s *Shard) GetFromThrough() (model.Fingerprint, model.Fingerprint) { + if s.Bounded != nil { + return v1.BoundsFromProto(s.Bounded.Bounds).GetFromThrough() + } + + return s.PowerOfTwo.TSDB().GetFromThrough() +} + +// convenience method for unaddressability concerns using constructors in literals (tests) +func (s Shard) Ptr() *Shard { + return &s +} + +func NewBoundedShard(shard logproto.Shard) Shard { + return Shard{Bounded: &shard} +} + +func NewPowerOfTwoShard(shard astmapper.ShardAnnotation) Shard { + return Shard{PowerOfTwo: &shard} +} + +func (s Shard) String() string { + if s.Bounded != nil { + b, err := json.Marshal(s.Bounded) + if err != nil { + panic(err) + } + return string(b) + } + + return s.PowerOfTwo.String() +} + +func (xs Shards) Encode() (encoded []string) { + for _, shard := range xs { + encoded = append(encoded, shard.String()) + } + + return encoded +} + +// ParseShards parses a list of string encoded shards +func ParseShards(strs []string) (Shards, ShardVersion, error) { + if len(strs) == 0 { + return nil, PowerOfTwoVersion, nil + } + shards := make(Shards, 0, len(strs)) + + var prevVersion ShardVersion + for i, str := range strs { + shard, version, err := ParseShard(str) + if err != nil { + return nil, PowerOfTwoVersion, err + } + + if i == 0 { + prevVersion = version + } else if prevVersion != version { + return nil, PowerOfTwoVersion, errors.New("shards must be of the same version") + } + shards = append(shards, shard) + } + return shards, prevVersion, nil +} + +func ParseShard(s string) (Shard, ShardVersion, error) { + + var bounded logproto.Shard + v2Err := json.Unmarshal([]byte(s), &bounded) + if v2Err == nil { + return Shard{Bounded: &bounded}, BoundedVersion, nil + } + + old, v1Err := astmapper.ParseShard(s) + if v1Err == nil { + return Shard{PowerOfTwo: &old}, PowerOfTwoVersion, nil + } + + err := errors.Wrap( + multierror.New(v1Err, v2Err).Err(), + "failed to parse shard", + ) + return Shard{}, PowerOfTwoVersion, err +} diff --git a/pkg/logql/shards_test.go b/pkg/logql/shards_test.go new file mode 100644 index 0000000000000..fd0adb35f881f --- /dev/null +++ b/pkg/logql/shards_test.go @@ -0,0 +1,188 @@ +package logql + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/querier/astmapper" +) + +func TestShardString(t *testing.T) { + for _, rc := range []struct { + shard Shard + exp string + }{ + { + shard: Shard{ + PowerOfTwo: &astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + exp: "1_of_2", + }, + { + shard: Shard{ + Bounded: &logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":null}`, + }, + { + shard: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + }, + { + // when more than one are present, + // return the newest successful version (v2) + shard: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + PowerOfTwo: &astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + exp: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + }, + } { + t.Run(fmt.Sprintf("%+v", rc.shard), func(t *testing.T) { + require.Equal(t, rc.exp, rc.shard.String()) + }) + } +} + +func TestParseShard(t *testing.T) { + for _, rc := range []struct { + str string + version ShardVersion + exp Shard + }{ + { + str: "1_of_2", + version: PowerOfTwoVersion, + exp: Shard{ + PowerOfTwo: &astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + }, + }, + { + str: `{"bounds":{"min":1,"max":2},"stats":null}`, + version: BoundedVersion, + exp: Shard{ + Bounded: &logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + }, + { + str: `{"bounds":{"min":1,"max":2},"stats":{"streams":0,"chunks":0,"bytes":1,"entries":0}}`, + version: BoundedVersion, + exp: Shard{ + Bounded: &logproto.Shard{ + Stats: &logproto.IndexStatsResponse{ + Bytes: 1, + }, + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }, + }, + }, + } { + t.Run(rc.str, func(t *testing.T) { + shard, version, err := ParseShard(rc.str) + require.NoError(t, err) + require.Equal(t, rc.version, version) + require.Equal(t, rc.exp, shard) + }) + } +} + +func TestParseShards(t *testing.T) { + for _, rc := range []struct { + strs []string + version ShardVersion + exp Shards + err bool + }{ + { + strs: []string{"1_of_2", "1_of_2"}, + version: PowerOfTwoVersion, + exp: Shards{ + NewPowerOfTwoShard(astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }), + NewPowerOfTwoShard(astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }), + }, + }, + { + strs: []string{`{"bounds":{"min":1,"max":2},"stats":null}`, `{"bounds":{"min":1,"max":2},"stats":null}`}, + version: BoundedVersion, + exp: Shards{ + NewBoundedShard(logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }), + NewBoundedShard(logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: 1, + Max: 2, + }, + }), + }, + }, + { + strs: []string{`{"bounds":{"min":1,"max":2},"stats":null}`, "1_of_2"}, + version: PowerOfTwoVersion, + err: true, + }, + } { + t.Run(fmt.Sprintf("%+v", rc.strs), func(t *testing.T) { + shards, version, err := ParseShards(rc.strs) + if rc.err { + require.Error(t, err) + return + } + require.NoError(t, err) + require.Equal(t, rc.version, version) + require.Equal(t, rc.exp, shards) + }) + } +} diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 72b8429e11bf9..7f41e45be60d2 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -34,6 +34,20 @@ type MockQuerier struct { streams []logproto.Stream } +func (q MockQuerier) extractOldShard(xs []string) (*astmapper.ShardAnnotation, error) { + parsed, version, err := ParseShards(xs) + if err != nil { + return nil, err + } + + if version != PowerOfTwoVersion { + return nil, fmt.Errorf("unsupported shard version: %d", version) + } + + return parsed[0].PowerOfTwo, nil + +} + func (q MockQuerier) SelectLogs(_ context.Context, req SelectLogParams) (iter.EntryIterator, error) { expr, err := req.LogSelector() if err != nil { @@ -48,11 +62,10 @@ func (q MockQuerier) SelectLogs(_ context.Context, req SelectLogParams) (iter.En var shard *astmapper.ShardAnnotation if len(req.Shards) > 0 { - shards, err := ParseShards(req.Shards) + shard, err = q.extractOldShard(req.Shards) if err != nil { return nil, err } - shard = &shards[0] } var matched []logproto.Stream @@ -174,11 +187,10 @@ func (q MockQuerier) SelectSamples(_ context.Context, req SelectSampleParams) (i var shard *astmapper.ShardAnnotation if len(req.Shards) > 0 { - shards, err := ParseShards(req.Shards) + shard, err = q.extractOldShard(req.Shards) if err != nil { return nil, err } - shard = &shards[0] } var matched []logproto.Stream diff --git a/pkg/logqlmodel/stats/context.go b/pkg/logqlmodel/stats/context.go index 31344b01c585d..7f2b9cfaf40bb 100644 --- a/pkg/logqlmodel/stats/context.go +++ b/pkg/logqlmodel/stats/context.go @@ -43,6 +43,7 @@ type Context struct { querier Querier ingester Ingester caches Caches + index Index // store is the store statistics collected across the query path store Store @@ -115,6 +116,11 @@ func (c *Context) Caches() Caches { } } +// Index returns the index statistics accumulated so far. +func (c *Context) Index() Index { + return c.index +} + // Reset clears the statistics. func (c *Context) Reset() { c.mtx.Lock() @@ -125,6 +131,7 @@ func (c *Context) Reset() { c.ingester.Reset() c.result.Reset() c.caches.Reset() + c.index.Reset() } // Result calculates the summary based on store and ingester data. @@ -137,6 +144,7 @@ func (c *Context) Result(execTime time.Duration, queueTime time.Duration, totalE }, Ingester: c.ingester, Caches: c.caches, + Index: c.index, }) r.ComputeSummary(execTime, queueTime, totalEntriesReturned) @@ -162,6 +170,15 @@ func JoinIngesters(ctx context.Context, inc Ingester) { stats.ingester.Merge(inc) } +// JoinIndex joins the index statistics in a concurrency-safe manner. +func JoinIndex(ctx context.Context, index Index) { + stats := FromContext(ctx) + stats.mtx.Lock() + defer stats.mtx.Unlock() + + stats.index.Merge(index) +} + // ComputeSummary compute the summary of the statistics. func (r *Result) ComputeSummary(execTime time.Duration, queueTime time.Duration, totalEntriesReturned int) { r.Summary.TotalBytesProcessed = r.Querier.Store.Chunk.DecompressedBytes + r.Querier.Store.Chunk.HeadChunkBytes + @@ -226,6 +243,11 @@ func (i *Ingester) Merge(m Ingester) { i.TotalReached += m.TotalReached } +func (i *Index) Merge(m Index) { + i.TotalChunks += m.TotalChunks + i.PostFilterChunks += m.PostFilterChunks +} + func (c *Caches) Merge(m Caches) { c.Chunk.Merge(m.Chunk) c.Index.Merge(m.Index) @@ -267,6 +289,7 @@ func (r *Result) Merge(m Result) { r.Ingester.Merge(m.Ingester) r.Caches.Merge(m.Caches) r.Summary.Merge(m.Summary) + r.Index.Merge(m.Index) r.ComputeSummary(ConvertSecondsToNanoseconds(r.Summary.ExecTime+m.Summary.ExecTime), ConvertSecondsToNanoseconds(r.Summary.QueueTime+m.Summary.QueueTime), int(r.Summary.TotalEntriesReturned)) } diff --git a/pkg/logqlmodel/stats/stats.pb.go b/pkg/logqlmodel/stats/stats.pb.go index f25143272599b..e26bedf55d33b 100644 --- a/pkg/logqlmodel/stats/stats.pb.go +++ b/pkg/logqlmodel/stats/stats.pb.go @@ -32,6 +32,7 @@ type Result struct { Querier Querier `protobuf:"bytes,2,opt,name=querier,proto3" json:"querier"` Ingester Ingester `protobuf:"bytes,3,opt,name=ingester,proto3" json:"ingester"` Caches Caches `protobuf:"bytes,4,opt,name=caches,proto3" json:"cache"` + Index Index `protobuf:"bytes,5,opt,name=index,proto3" json:"index"` } func (m *Result) Reset() { *m = Result{} } @@ -94,6 +95,13 @@ func (m *Result) GetCaches() Caches { return Caches{} } +func (m *Result) GetIndex() Index { + if m != nil { + return m.Index + } + return Index{} +} + type Caches struct { Chunk Cache `protobuf:"bytes,1,opt,name=chunk,proto3" json:"chunk"` Index Cache `protobuf:"bytes,2,opt,name=index,proto3" json:"index"` @@ -342,6 +350,62 @@ func (m *Summary) GetTotalStructuredMetadataBytesProcessed() int64 { return 0 } +// Statistics from Index queries +// TODO(owen-d): include bytes. +// Needs some index methods added to return _sized_ chunk refs to know +type Index struct { + // Total chunks + TotalChunks int64 `protobuf:"varint,1,opt,name=totalChunks,proto3" json:"totalChunks"` + // Post-filtered chunks + PostFilterChunks int64 `protobuf:"varint,2,opt,name=postFilterChunks,proto3" json:"postFilterChunks"` +} + +func (m *Index) Reset() { *m = Index{} } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_6cdfe5d2aea33ebb, []int{3} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(m, src) +} +func (m *Index) XXX_Size() int { + return m.Size() +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetTotalChunks() int64 { + if m != nil { + return m.TotalChunks + } + return 0 +} + +func (m *Index) GetPostFilterChunks() int64 { + if m != nil { + return m.PostFilterChunks + } + return 0 +} + type Querier struct { Store Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store"` } @@ -349,7 +413,7 @@ type Querier struct { func (m *Querier) Reset() { *m = Querier{} } func (*Querier) ProtoMessage() {} func (*Querier) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{3} + return fileDescriptor_6cdfe5d2aea33ebb, []int{4} } func (m *Querier) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -400,7 +464,7 @@ type Ingester struct { func (m *Ingester) Reset() { *m = Ingester{} } func (*Ingester) ProtoMessage() {} func (*Ingester) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{4} + return fileDescriptor_6cdfe5d2aea33ebb, []int{5} } func (m *Ingester) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -483,7 +547,7 @@ type Store struct { func (m *Store) Reset() { *m = Store{} } func (*Store) ProtoMessage() {} func (*Store) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{5} + return fileDescriptor_6cdfe5d2aea33ebb, []int{6} } func (m *Store) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -585,7 +649,7 @@ type Chunk struct { func (m *Chunk) Reset() { *m = Chunk{} } func (*Chunk) ProtoMessage() {} func (*Chunk) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{6} + return fileDescriptor_6cdfe5d2aea33ebb, []int{7} } func (m *Chunk) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -691,7 +755,7 @@ type Cache struct { func (m *Cache) Reset() { *m = Cache{} } func (*Cache) ProtoMessage() {} func (*Cache) Descriptor() ([]byte, []int) { - return fileDescriptor_6cdfe5d2aea33ebb, []int{7} + return fileDescriptor_6cdfe5d2aea33ebb, []int{8} } func (m *Cache) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -780,6 +844,7 @@ func init() { proto.RegisterType((*Result)(nil), "stats.Result") proto.RegisterType((*Caches)(nil), "stats.Caches") proto.RegisterType((*Summary)(nil), "stats.Summary") + proto.RegisterType((*Index)(nil), "stats.Index") proto.RegisterType((*Querier)(nil), "stats.Querier") proto.RegisterType((*Ingester)(nil), "stats.Ingester") proto.RegisterType((*Store)(nil), "stats.Store") @@ -790,87 +855,90 @@ func init() { func init() { proto.RegisterFile("pkg/logqlmodel/stats/stats.proto", fileDescriptor_6cdfe5d2aea33ebb) } var fileDescriptor_6cdfe5d2aea33ebb = []byte{ - // 1274 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0x4b, 0x6f, 0xe3, 0x54, - 0x14, 0x8e, 0x27, 0x75, 0xd2, 0xde, 0x3e, 0xe7, 0xb6, 0xc3, 0x64, 0x18, 0x64, 0x97, 0xc0, 0x88, - 0x22, 0x50, 0x23, 0x1e, 0x12, 0x02, 0x31, 0x12, 0x72, 0x87, 0x4a, 0x95, 0x5a, 0x51, 0x4e, 0x40, - 0x42, 0xb0, 0x72, 0xec, 0xdb, 0xc4, 0xaa, 0x63, 0xa7, 0xf6, 0x75, 0x99, 0xae, 0xe0, 0x27, 0xb0, - 0x62, 0xc3, 0x1f, 0x60, 0xc3, 0x8a, 0x15, 0x6b, 0x36, 0xb3, 0xec, 0x72, 0x56, 0x16, 0x4d, 0x37, - 0xc8, 0xab, 0x91, 0xf8, 0x03, 0xe8, 0x3e, 0xe2, 0x57, 0xec, 0x99, 0x6c, 0xea, 0x7b, 0xbe, 0xf3, - 0x7d, 0xe7, 0x3e, 0x72, 0xce, 0xb9, 0xb7, 0x68, 0x77, 0x72, 0x3e, 0xec, 0xb9, 0xfe, 0xf0, 0xc2, - 0x1d, 0xfb, 0x36, 0x71, 0x7b, 0x21, 0x35, 0x69, 0x28, 0xfe, 0xee, 0x4f, 0x02, 0x9f, 0xfa, 0x58, - 0xe5, 0xc6, 0xeb, 0x3b, 0x43, 0x7f, 0xe8, 0x73, 0xa4, 0xc7, 0x46, 0xc2, 0xd9, 0xfd, 0x4f, 0x41, - 0x2d, 0x20, 0x61, 0xe4, 0x52, 0xfc, 0x29, 0x6a, 0x87, 0xd1, 0x78, 0x6c, 0x06, 0x57, 0x1d, 0x65, - 0x57, 0xd9, 0x5b, 0xfd, 0x70, 0x63, 0x5f, 0x84, 0xe9, 0x0b, 0xd4, 0xd8, 0x7c, 0x16, 0xeb, 0x8d, - 0x24, 0xd6, 0x67, 0x34, 0x98, 0x0d, 0x98, 0xf4, 0x22, 0x22, 0x81, 0x43, 0x82, 0xce, 0x9d, 0x82, - 0xf4, 0x6b, 0x81, 0x66, 0x52, 0x49, 0x83, 0xd9, 0x00, 0x3f, 0x46, 0xcb, 0x8e, 0x37, 0x24, 0x21, - 0x25, 0x41, 0xa7, 0xc9, 0xb5, 0x9b, 0x52, 0x7b, 0x24, 0x61, 0x63, 0x4b, 0x8a, 0x53, 0x22, 0xa4, - 0x23, 0xfc, 0x31, 0x6a, 0x59, 0xa6, 0x35, 0x22, 0x61, 0x67, 0x89, 0x8b, 0xd7, 0xa5, 0xf8, 0x80, - 0x83, 0xc6, 0xba, 0x94, 0xaa, 0x9c, 0x04, 0x92, 0xdb, 0xfd, 0x6d, 0x09, 0xb5, 0x04, 0x03, 0x7f, - 0x80, 0x54, 0x6b, 0x14, 0x79, 0xe7, 0x72, 0xcf, 0x6b, 0x79, 0x7d, 0x4e, 0xce, 0x28, 0x20, 0x3e, - 0x4c, 0xe2, 0x78, 0x36, 0x79, 0x2a, 0xf7, 0x5a, 0x23, 0xe1, 0x14, 0x10, 0x1f, 0xb6, 0xcc, 0x80, - 0x9f, 0xb2, 0xdc, 0x63, 0x51, 0xb3, 0x21, 0x35, 0x92, 0x03, 0xf2, 0x8b, 0x0f, 0xd0, 0x2a, 0xa7, - 0x89, 0x1f, 0x48, 0xee, 0xb0, 0x28, 0xdd, 0x96, 0xd2, 0x3c, 0x11, 0xf2, 0x06, 0x3e, 0x44, 0x6b, - 0x97, 0xbe, 0x1b, 0x8d, 0x89, 0x8c, 0xa2, 0x56, 0x44, 0xd9, 0x91, 0x51, 0x0a, 0x4c, 0x28, 0x58, - 0x2c, 0x4e, 0xc8, 0x7e, 0xb2, 0xd9, 0x6a, 0x5a, 0x2f, 0x8b, 0x93, 0x67, 0x42, 0xc1, 0x62, 0x9b, - 0x72, 0xcd, 0x01, 0x71, 0x65, 0x98, 0xf6, 0xcb, 0x36, 0x95, 0x23, 0x42, 0xde, 0xc0, 0x3f, 0xa0, - 0x6d, 0xc7, 0x0b, 0xa9, 0xe9, 0xd1, 0x13, 0x42, 0x03, 0xc7, 0x92, 0xc1, 0x96, 0x2b, 0x82, 0x3d, - 0x94, 0xc1, 0xaa, 0x04, 0x50, 0x05, 0x76, 0xff, 0x6a, 0xa1, 0xb6, 0xcc, 0x79, 0xfc, 0x2d, 0xba, - 0x3f, 0xb8, 0xa2, 0x24, 0x3c, 0x0d, 0x7c, 0x8b, 0x84, 0x21, 0xb1, 0x4f, 0x49, 0xd0, 0x27, 0x96, - 0xef, 0xd9, 0x3c, 0x61, 0x9a, 0xc6, 0xc3, 0x24, 0xd6, 0xeb, 0x28, 0x50, 0xe7, 0x60, 0x61, 0x5d, - 0xc7, 0xab, 0x0c, 0x7b, 0x27, 0x0b, 0x5b, 0x43, 0x81, 0x3a, 0x07, 0x3e, 0x42, 0xdb, 0xd4, 0xa7, - 0xa6, 0x6b, 0x14, 0xa6, 0xe5, 0x39, 0xd7, 0x34, 0xee, 0xb3, 0x43, 0xa8, 0x70, 0x43, 0x15, 0x98, - 0x86, 0x3a, 0x2e, 0x4c, 0xc5, 0x73, 0x30, 0x1f, 0xaa, 0xe8, 0x86, 0x2a, 0x10, 0xef, 0xa1, 0x65, - 0xf2, 0x94, 0x58, 0xdf, 0x38, 0x63, 0xc2, 0xb3, 0x4f, 0x31, 0xd6, 0x58, 0x35, 0xcf, 0x30, 0x48, - 0x47, 0xf8, 0x3d, 0xb4, 0x72, 0x11, 0x91, 0x88, 0x70, 0x6a, 0x8b, 0x53, 0xd7, 0x93, 0x58, 0xcf, - 0x40, 0xc8, 0x86, 0x78, 0x1f, 0xa1, 0x30, 0x1a, 0x88, 0x3e, 0x12, 0xf2, 0x3c, 0x6a, 0x1a, 0x1b, - 0x49, 0xac, 0xe7, 0x50, 0xc8, 0x8d, 0xf1, 0x31, 0xda, 0xe1, 0xab, 0xfb, 0xd2, 0xa3, 0x22, 0x1d, - 0x69, 0x14, 0x78, 0xc4, 0xe6, 0x49, 0xd3, 0x34, 0x3a, 0x49, 0xac, 0x57, 0xfa, 0xa1, 0x12, 0xc5, - 0x5d, 0xd4, 0x0a, 0x27, 0xae, 0x43, 0xc3, 0xce, 0x0a, 0xd7, 0x23, 0x56, 0xbf, 0x02, 0x01, 0xf9, - 0xe5, 0x9c, 0x91, 0x19, 0xd8, 0x61, 0x07, 0xe5, 0x38, 0x1c, 0x01, 0xf9, 0x4d, 0x57, 0x75, 0xea, - 0x87, 0xf4, 0xd0, 0x71, 0x29, 0x09, 0xf8, 0xe9, 0x75, 0x56, 0x4b, 0xab, 0x2a, 0xf9, 0xa1, 0x12, - 0xc5, 0x3f, 0xa1, 0x47, 0x1c, 0xef, 0xd3, 0x20, 0xb2, 0x68, 0x14, 0x10, 0xfb, 0x84, 0x50, 0xd3, - 0x36, 0xa9, 0x59, 0x4a, 0x89, 0x35, 0x1e, 0xfe, 0xdd, 0x24, 0xd6, 0x17, 0x13, 0xc0, 0x62, 0xb4, - 0xee, 0xe7, 0xa8, 0x2d, 0x7b, 0x3e, 0x6b, 0x93, 0x21, 0xf5, 0x03, 0x52, 0xea, 0xac, 0x7d, 0x86, - 0x65, 0x6d, 0x92, 0x53, 0x40, 0x7c, 0xba, 0x7f, 0xdc, 0x41, 0xcb, 0x47, 0x59, 0x6b, 0x5f, 0xe3, - 0x73, 0x02, 0x61, 0x75, 0x2c, 0xea, 0x4d, 0x35, 0xb6, 0x58, 0x7b, 0xc9, 0xe3, 0x50, 0xb0, 0xf0, - 0x21, 0xc2, 0xdc, 0x3e, 0x60, 0xad, 0x3a, 0x3c, 0x31, 0x29, 0xd7, 0x8a, 0xa2, 0x7a, 0x2d, 0x89, - 0xf5, 0x0a, 0x2f, 0x54, 0x60, 0xe9, 0xec, 0x06, 0xb7, 0x43, 0x59, 0x43, 0xd9, 0xec, 0x12, 0x87, - 0x82, 0x85, 0x3f, 0x43, 0x1b, 0x59, 0x05, 0xf4, 0x89, 0x47, 0x65, 0xc1, 0xe0, 0x24, 0xd6, 0x4b, - 0x1e, 0x28, 0xd9, 0xd9, 0x79, 0xa9, 0x0b, 0x9f, 0xd7, 0xaf, 0x4b, 0x48, 0xe5, 0xfe, 0x74, 0x62, - 0xb1, 0x09, 0x20, 0x67, 0xb2, 0x3d, 0x65, 0x13, 0xa7, 0x1e, 0x28, 0xd9, 0xf8, 0x2b, 0x74, 0x2f, - 0x87, 0x3c, 0xf1, 0x7f, 0xf4, 0x5c, 0xdf, 0xb4, 0xd3, 0x53, 0x7b, 0x90, 0xc4, 0x7a, 0x35, 0x01, - 0xaa, 0x61, 0xf6, 0x1b, 0x58, 0x05, 0x8c, 0xd7, 0x73, 0x33, 0xfb, 0x0d, 0xe6, 0xbd, 0x50, 0x81, - 0x61, 0x0b, 0x3d, 0x60, 0xc5, 0x7b, 0x05, 0xe4, 0x8c, 0x04, 0xc4, 0xb3, 0x88, 0x9d, 0xe5, 0x5f, - 0x67, 0x7d, 0x57, 0xd9, 0x5b, 0x36, 0x1e, 0x25, 0xb1, 0xfe, 0x66, 0x2d, 0x69, 0x96, 0xa4, 0x50, - 0x1f, 0x27, 0x7b, 0x00, 0x94, 0xae, 0x57, 0x86, 0xd5, 0x3c, 0x00, 0x66, 0xfb, 0x03, 0x72, 0x16, - 0x1e, 0x12, 0x6a, 0x8d, 0xd2, 0xd6, 0x96, 0xdf, 0x5f, 0xc1, 0x0b, 0x15, 0x18, 0xfe, 0x0e, 0x75, - 0x2c, 0x9f, 0xa7, 0xbb, 0xe3, 0x7b, 0x07, 0xbe, 0x47, 0x03, 0xdf, 0x3d, 0x36, 0x29, 0xf1, 0xac, - 0x2b, 0xde, 0xfd, 0x9a, 0xc6, 0x1b, 0x49, 0xac, 0xd7, 0x72, 0xa0, 0xd6, 0xd3, 0xfd, 0x53, 0x45, - 0x2a, 0xdf, 0x01, 0x4b, 0x8c, 0x11, 0x31, 0x6d, 0xb1, 0x1d, 0x56, 0xab, 0xf9, 0x8c, 0x2c, 0x7a, - 0xa0, 0x64, 0x17, 0xb4, 0xa2, 0x2b, 0xa9, 0x15, 0x5a, 0xd1, 0x8f, 0x4a, 0x36, 0x3e, 0x40, 0x77, - 0x6d, 0x62, 0xf9, 0xe3, 0x49, 0xc0, 0x1b, 0x83, 0x98, 0x5a, 0x6c, 0xea, 0x5e, 0x12, 0xeb, 0xf3, - 0x4e, 0x98, 0x87, 0xca, 0x41, 0xc4, 0x1a, 0xda, 0xd5, 0x41, 0xc4, 0x32, 0xe6, 0x21, 0xfc, 0x18, - 0x6d, 0x96, 0xd7, 0x21, 0x5a, 0xfe, 0x76, 0x12, 0xeb, 0x65, 0x17, 0x94, 0x01, 0x26, 0xe7, 0x59, - 0xfe, 0x24, 0x9a, 0xb8, 0x8e, 0x65, 0x32, 0xf9, 0x4a, 0x26, 0x2f, 0xb9, 0xa0, 0x0c, 0x30, 0xf9, - 0xa4, 0xd4, 0xda, 0x51, 0x26, 0x2f, 0xb9, 0xa0, 0x0c, 0xe0, 0x09, 0xda, 0x4d, 0x0f, 0xb6, 0xa6, - 0xf9, 0xca, 0xab, 0xe2, 0xed, 0x24, 0xd6, 0x5f, 0xc9, 0x85, 0x57, 0x32, 0xf0, 0x15, 0x7a, 0x2b, - 0x7f, 0x86, 0x75, 0x93, 0x8a, 0x0b, 0xe4, 0x9d, 0x24, 0xd6, 0x17, 0xa1, 0xc3, 0x22, 0xa4, 0xee, - 0xdf, 0x4d, 0xa4, 0xf2, 0x47, 0x1b, 0xeb, 0xbe, 0x44, 0x5c, 0xb8, 0x87, 0x7e, 0xe4, 0x15, 0x7a, - 0x7f, 0x1e, 0x87, 0x82, 0x85, 0xbf, 0x40, 0x5b, 0x64, 0x76, 0x4d, 0x5f, 0x44, 0xec, 0x16, 0x11, - 0x3d, 0x4c, 0x35, 0x76, 0x92, 0x58, 0x9f, 0xf3, 0xc1, 0x1c, 0x82, 0x3f, 0x41, 0xeb, 0x12, 0xe3, - 0x6d, 0x55, 0x3c, 0x9d, 0x54, 0xe3, 0x6e, 0x12, 0xeb, 0x45, 0x07, 0x14, 0x4d, 0x26, 0xe4, 0x6f, - 0x3d, 0x20, 0x16, 0x71, 0x2e, 0xd3, 0x87, 0x12, 0x17, 0x16, 0x1c, 0x50, 0x34, 0xd9, 0x93, 0x87, - 0x03, 0xfc, 0xb2, 0x10, 0xe5, 0xc5, 0x9f, 0x3c, 0x29, 0x08, 0xd9, 0x90, 0xbd, 0xa4, 0x02, 0xb1, - 0x56, 0x51, 0x4b, 0xaa, 0x78, 0x49, 0xcd, 0x30, 0x48, 0x47, 0xec, 0x00, 0xed, 0x7c, 0xf3, 0x6d, - 0x67, 0xd7, 0x57, 0x1e, 0x87, 0x82, 0xc5, 0xea, 0x8d, 0x37, 0xca, 0x63, 0xe2, 0x0d, 0xe9, 0xa8, - 0x4f, 0x82, 0xcb, 0xf4, 0x7d, 0xc4, 0xeb, 0x6d, 0xce, 0x09, 0xf3, 0x90, 0x31, 0xb8, 0xbe, 0xd1, - 0x1a, 0xcf, 0x6f, 0xb4, 0xc6, 0x8b, 0x1b, 0x4d, 0xf9, 0x79, 0xaa, 0x29, 0xbf, 0x4f, 0x35, 0xe5, - 0xd9, 0x54, 0x53, 0xae, 0xa7, 0x9a, 0xf2, 0xcf, 0x54, 0x53, 0xfe, 0x9d, 0x6a, 0x8d, 0x17, 0x53, - 0x4d, 0xf9, 0xe5, 0x56, 0x6b, 0x5c, 0xdf, 0x6a, 0x8d, 0xe7, 0xb7, 0x5a, 0xe3, 0xfb, 0xf7, 0x87, - 0x0e, 0x1d, 0x45, 0x83, 0x7d, 0xcb, 0x1f, 0xf7, 0x86, 0x81, 0x79, 0x66, 0x7a, 0x66, 0xcf, 0xf5, - 0xcf, 0x9d, 0x5e, 0xd5, 0xff, 0xb7, 0x83, 0x16, 0xff, 0xef, 0xf5, 0xa3, 0xff, 0x03, 0x00, 0x00, - 0xff, 0xff, 0x39, 0xf0, 0xb8, 0xbf, 0xfe, 0x0e, 0x00, 0x00, + // 1325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xcf, 0x8f, 0xdb, 0x44, + 0x14, 0x4e, 0x36, 0x75, 0xb2, 0x9d, 0xcd, 0xee, 0xb6, 0xb3, 0x5b, 0x9a, 0x52, 0x64, 0x2f, 0x81, + 0x8a, 0x22, 0xd0, 0x46, 0x05, 0x24, 0x04, 0xa2, 0x12, 0x72, 0xca, 0x4a, 0x95, 0xb6, 0xa2, 0xbc, + 0x80, 0x84, 0xe0, 0xe4, 0xd8, 0xb3, 0x89, 0x55, 0xc7, 0xce, 0xda, 0xe3, 0xd2, 0x95, 0x90, 0xe0, + 0x4f, 0xe0, 0xc4, 0x85, 0x3b, 0xe2, 0xc2, 0x89, 0x13, 0x67, 0x2e, 0x3d, 0xf6, 0xd8, 0x93, 0x45, + 0xd3, 0x0b, 0xf2, 0xa9, 0x7f, 0x02, 0x9a, 0x37, 0x13, 0xff, 0x8a, 0xd3, 0xee, 0x25, 0x9e, 0xf7, + 0xbd, 0xef, 0x7b, 0xf3, 0xf3, 0xbd, 0x99, 0x90, 0x83, 0xf9, 0x83, 0xc9, 0xc0, 0x0b, 0x26, 0xa7, + 0xde, 0x2c, 0x70, 0x98, 0x37, 0x88, 0xb8, 0xc5, 0x23, 0xf9, 0x7b, 0x38, 0x0f, 0x03, 0x1e, 0x50, + 0x0d, 0x8d, 0xd7, 0xf7, 0x27, 0xc1, 0x24, 0x40, 0x64, 0x20, 0x5a, 0xd2, 0xd9, 0xff, 0x7d, 0x83, + 0xb4, 0x81, 0x45, 0xb1, 0xc7, 0xe9, 0x27, 0xa4, 0x13, 0xc5, 0xb3, 0x99, 0x15, 0x9e, 0xf5, 0x9a, + 0x07, 0xcd, 0x9b, 0x5b, 0x1f, 0xec, 0x1c, 0xca, 0x30, 0x23, 0x89, 0x9a, 0xbb, 0x8f, 0x13, 0xa3, + 0x91, 0x26, 0xc6, 0x92, 0x06, 0xcb, 0x86, 0x90, 0x9e, 0xc6, 0x2c, 0x74, 0x59, 0xd8, 0xdb, 0x28, + 0x49, 0xbf, 0x92, 0x68, 0x2e, 0x55, 0x34, 0x58, 0x36, 0xe8, 0x6d, 0xb2, 0xe9, 0xfa, 0x13, 0x16, + 0x71, 0x16, 0xf6, 0x5a, 0xa8, 0xdd, 0x55, 0xda, 0xbb, 0x0a, 0x36, 0x2f, 0x29, 0x71, 0x46, 0x84, + 0xac, 0x45, 0x3f, 0x22, 0x6d, 0xdb, 0xb2, 0xa7, 0x2c, 0xea, 0x5d, 0x40, 0xf1, 0xb6, 0x12, 0x0f, + 0x11, 0x34, 0xb7, 0x95, 0x54, 0x43, 0x12, 0x28, 0x2e, 0xbd, 0x45, 0x34, 0xd7, 0x77, 0xd8, 0xa3, + 0x9e, 0x86, 0xa2, 0x6e, 0xd6, 0xa3, 0xc3, 0x1e, 0xe5, 0x1a, 0xa4, 0x80, 0xfc, 0xf4, 0x7f, 0xbb, + 0x40, 0xda, 0xc3, 0x4c, 0x6d, 0x4f, 0x63, 0xff, 0x81, 0x5a, 0xa6, 0x6e, 0xb1, 0xcb, 0x42, 0x8f, + 0x82, 0x02, 0xf2, 0x93, 0x77, 0xb8, 0xf1, 0x32, 0x49, 0xb1, 0x43, 0x31, 0xb3, 0x10, 0x37, 0x46, + 0x2d, 0x4b, 0x59, 0xb3, 0xa3, 0x34, 0x8a, 0x03, 0xea, 0x4b, 0x87, 0x64, 0x0b, 0x69, 0x72, 0x4f, + 0xd5, 0xa2, 0x94, 0xa5, 0x7b, 0x4a, 0x5a, 0x24, 0x42, 0xd1, 0xa0, 0x47, 0xa4, 0xfb, 0x30, 0xf0, + 0xe2, 0x19, 0x53, 0x51, 0xb4, 0x9a, 0x28, 0xfb, 0x2a, 0x4a, 0x89, 0x09, 0x25, 0x4b, 0xc4, 0x89, + 0xc4, 0x2e, 0x2f, 0x47, 0xd3, 0x7e, 0x59, 0x9c, 0x22, 0x13, 0x4a, 0x96, 0x98, 0x94, 0x67, 0x8d, + 0x99, 0xa7, 0xc2, 0x74, 0x5e, 0x36, 0xa9, 0x02, 0x11, 0x8a, 0x06, 0xfd, 0x9e, 0xec, 0xb9, 0x7e, + 0xc4, 0x2d, 0x9f, 0xdf, 0x63, 0x3c, 0x74, 0x6d, 0x15, 0x6c, 0xb3, 0x26, 0xd8, 0x75, 0x15, 0xac, + 0x4e, 0x00, 0x75, 0x60, 0xff, 0xef, 0x36, 0xe9, 0xa8, 0x34, 0xa1, 0xdf, 0x90, 0xab, 0xe3, 0x33, + 0xce, 0xa2, 0xfb, 0x61, 0x60, 0xb3, 0x28, 0x62, 0xce, 0x7d, 0x16, 0x8e, 0x98, 0x1d, 0xf8, 0x0e, + 0x1e, 0x98, 0x96, 0x79, 0x3d, 0x4d, 0x8c, 0x75, 0x14, 0x58, 0xe7, 0x10, 0x61, 0x3d, 0xd7, 0xaf, + 0x0d, 0xbb, 0x91, 0x87, 0x5d, 0x43, 0x81, 0x75, 0x0e, 0x7a, 0x97, 0xec, 0xf1, 0x80, 0x5b, 0x9e, + 0x59, 0xea, 0x16, 0xcf, 0x5c, 0xcb, 0xbc, 0x2a, 0x16, 0xa1, 0xc6, 0x0d, 0x75, 0x60, 0x16, 0xea, + 0xb8, 0xd4, 0x15, 0x9e, 0xc1, 0x62, 0xa8, 0xb2, 0x1b, 0xea, 0x40, 0x7a, 0x93, 0x6c, 0xb2, 0x47, + 0xcc, 0xfe, 0xda, 0x9d, 0x31, 0x3c, 0x7d, 0x4d, 0xb3, 0x2b, 0x0a, 0xc0, 0x12, 0x83, 0xac, 0x45, + 0xdf, 0x23, 0x17, 0x4f, 0x63, 0x16, 0x33, 0xa4, 0xb6, 0x91, 0xba, 0x9d, 0x26, 0x46, 0x0e, 0x42, + 0xde, 0xa4, 0x87, 0x84, 0x44, 0xf1, 0x58, 0x96, 0x9e, 0x08, 0xcf, 0x51, 0xcb, 0xdc, 0x49, 0x13, + 0xa3, 0x80, 0x42, 0xa1, 0x4d, 0x8f, 0xc9, 0x3e, 0x8e, 0xee, 0x0b, 0x9f, 0xcb, 0xe3, 0xc8, 0xe3, + 0xd0, 0x67, 0x0e, 0x1e, 0x9a, 0x96, 0xd9, 0x4b, 0x13, 0xa3, 0xd6, 0x0f, 0xb5, 0x28, 0xed, 0x93, + 0x76, 0x34, 0xf7, 0x5c, 0x1e, 0xf5, 0x2e, 0xa2, 0x9e, 0x88, 0xfc, 0x95, 0x08, 0xa8, 0x2f, 0x72, + 0xa6, 0x56, 0xe8, 0x44, 0x3d, 0x52, 0xe0, 0x20, 0x02, 0xea, 0x9b, 0x8d, 0xea, 0x7e, 0x10, 0xf1, + 0x23, 0xd7, 0xe3, 0x2c, 0xc4, 0xd5, 0xeb, 0x6d, 0x55, 0x46, 0x55, 0xf1, 0x43, 0x2d, 0x4a, 0x7f, + 0x22, 0x37, 0x10, 0x1f, 0xf1, 0x30, 0xb6, 0x79, 0x1c, 0x32, 0xe7, 0x1e, 0xe3, 0x96, 0x63, 0x71, + 0xab, 0x72, 0x24, 0xba, 0x18, 0xfe, 0xdd, 0x34, 0x31, 0xce, 0x27, 0x80, 0xf3, 0xd1, 0xfa, 0x3f, + 0x12, 0x0d, 0x0b, 0x2f, 0xbd, 0x45, 0xb6, 0x50, 0x31, 0x14, 0x25, 0x33, 0x52, 0xc9, 0xb2, 0x2b, + 0x92, 0xba, 0x00, 0x43, 0xd1, 0xa0, 0x9f, 0x93, 0x4b, 0xf3, 0x6c, 0x3e, 0x4a, 0x27, 0xb3, 0x61, + 0x3f, 0x4d, 0x8c, 0x15, 0x1f, 0xac, 0x20, 0xfd, 0xcf, 0x48, 0x47, 0x5d, 0x52, 0xa2, 0x48, 0x47, + 0x3c, 0x08, 0x59, 0xa5, 0xae, 0x8f, 0x04, 0x96, 0x17, 0x69, 0xa4, 0x80, 0xfc, 0xf4, 0xff, 0xdc, + 0x20, 0x9b, 0x77, 0xf3, 0xbb, 0xa8, 0x8b, 0x63, 0x03, 0x26, 0xaa, 0x88, 0xcc, 0x76, 0xcd, 0xbc, + 0x24, 0x8a, 0x5b, 0x11, 0x87, 0x92, 0x45, 0x8f, 0x08, 0x2d, 0xcc, 0xe8, 0x9e, 0xc5, 0x51, 0x2b, + 0x27, 0xf1, 0x5a, 0x9a, 0x18, 0x35, 0x5e, 0xa8, 0xc1, 0xb2, 0xde, 0x4d, 0xb4, 0x23, 0x95, 0xc1, + 0x79, 0xef, 0x0a, 0x87, 0x92, 0x45, 0x3f, 0x25, 0x3b, 0x79, 0xfe, 0x8d, 0x98, 0xcf, 0x55, 0xba, + 0xd2, 0x34, 0x31, 0x2a, 0x1e, 0xa8, 0xd8, 0xf9, 0x7a, 0x69, 0xe7, 0x5e, 0xaf, 0x5f, 0x2f, 0x10, + 0x0d, 0xfd, 0x59, 0xc7, 0x6a, 0x63, 0xd8, 0x89, 0xda, 0xef, 0xbc, 0xe3, 0xcc, 0x03, 0x15, 0x9b, + 0x7e, 0x49, 0xae, 0x14, 0x90, 0x3b, 0xc1, 0x0f, 0xbe, 0x17, 0x58, 0x4e, 0xb6, 0x6a, 0xd7, 0xd2, + 0xc4, 0xa8, 0x27, 0x40, 0x3d, 0x2c, 0xf6, 0xc0, 0x2e, 0x61, 0x58, 0x4d, 0x5a, 0xf9, 0x1e, 0xac, + 0x7a, 0xa1, 0x06, 0xa3, 0x36, 0xb9, 0x26, 0x4a, 0xc7, 0x19, 0xb0, 0x13, 0x16, 0x32, 0xdf, 0x66, + 0x4e, 0x7e, 0xfa, 0x7b, 0xdb, 0x07, 0xcd, 0x9b, 0x9b, 0xe6, 0x8d, 0x34, 0x31, 0xde, 0x5c, 0x4b, + 0x5a, 0xa6, 0x08, 0xac, 0x8f, 0x93, 0x3f, 0x3f, 0x2a, 0x97, 0xbb, 0xc0, 0xd6, 0x3c, 0x3f, 0x96, + 0xf3, 0x03, 0x76, 0x12, 0x1d, 0x31, 0x6e, 0x4f, 0xb3, 0xc2, 0x5a, 0x9c, 0x5f, 0xc9, 0x0b, 0x35, + 0x18, 0xfd, 0x96, 0xf4, 0xec, 0x00, 0x8f, 0xbb, 0x1b, 0xf8, 0xc3, 0xc0, 0xe7, 0x61, 0xe0, 0x1d, + 0x5b, 0x9c, 0xf9, 0xf6, 0x19, 0xd6, 0xde, 0x96, 0xf9, 0x46, 0x9a, 0x18, 0x6b, 0x39, 0xb0, 0xd6, + 0xd3, 0xff, 0x4b, 0x23, 0x1a, 0xce, 0x40, 0x1c, 0x8c, 0x29, 0xb3, 0x1c, 0x39, 0x1d, 0x51, 0x29, + 0x8a, 0x27, 0xb2, 0xec, 0x81, 0x8a, 0x5d, 0xd2, 0xca, 0x9a, 0xa8, 0xd5, 0x68, 0x65, 0x35, 0xac, + 0xd8, 0x74, 0x48, 0x2e, 0x3b, 0xcc, 0x0e, 0x66, 0xf3, 0x10, 0xcb, 0x92, 0xec, 0x5a, 0x4e, 0xea, + 0x4a, 0x9a, 0x18, 0xab, 0x4e, 0x58, 0x85, 0xaa, 0x41, 0xe4, 0x18, 0x3a, 0xf5, 0x41, 0xe4, 0x30, + 0x56, 0x21, 0x7a, 0x9b, 0xec, 0x56, 0xc7, 0x21, 0x2f, 0x9c, 0xbd, 0x34, 0x31, 0xaa, 0x2e, 0xa8, + 0x02, 0x42, 0x8e, 0xa7, 0xfc, 0x4e, 0x3c, 0xf7, 0x5c, 0xdb, 0x12, 0xf2, 0x8b, 0xb9, 0xbc, 0xe2, + 0x82, 0x2a, 0x20, 0xe4, 0xf3, 0xca, 0xc5, 0x42, 0x72, 0x79, 0xc5, 0x05, 0x55, 0x80, 0xce, 0xc9, + 0x41, 0xb6, 0xb0, 0x6b, 0x4a, 0xbf, 0xba, 0xa8, 0xde, 0x4e, 0x13, 0xe3, 0x95, 0x5c, 0x78, 0x25, + 0x83, 0x9e, 0x91, 0xb7, 0x8a, 0x6b, 0xb8, 0xae, 0x53, 0x79, 0x7d, 0xbd, 0x93, 0x26, 0xc6, 0x79, + 0xe8, 0x70, 0x1e, 0x52, 0xff, 0x9f, 0x16, 0xd1, 0xf0, 0xc9, 0x28, 0xaa, 0x2f, 0x93, 0xd7, 0xfd, + 0x51, 0x10, 0xfb, 0xa5, 0xda, 0x5f, 0xc4, 0xa1, 0x64, 0x89, 0xeb, 0x8b, 0x2d, 0x1f, 0x09, 0xa7, + 0xb1, 0xb8, 0x45, 0x64, 0x0d, 0xd3, 0xe4, 0xf5, 0x55, 0xf5, 0xc1, 0x0a, 0x42, 0x3f, 0x26, 0xdb, + 0x0a, 0xc3, 0xb2, 0x2a, 0x1f, 0x6e, 0x9a, 0x79, 0x39, 0x4d, 0x8c, 0xb2, 0x03, 0xca, 0xa6, 0x10, + 0xe2, 0x4b, 0x13, 0x98, 0xcd, 0xdc, 0x87, 0xd9, 0x33, 0x0d, 0x85, 0x25, 0x07, 0x94, 0x4d, 0xf1, + 0xe0, 0x42, 0x00, 0x2f, 0x0b, 0x99, 0x5e, 0xf8, 0xe0, 0xca, 0x40, 0xc8, 0x9b, 0xe2, 0x1d, 0x17, + 0xca, 0xb1, 0xca, 0x5c, 0xd2, 0xe4, 0x3b, 0x6e, 0x89, 0x41, 0xd6, 0x12, 0x0b, 0xe8, 0x14, 0x8b, + 0x6f, 0x27, 0xbf, 0xbe, 0x8a, 0x38, 0x94, 0x2c, 0x91, 0x6f, 0x58, 0x28, 0x8f, 0x99, 0x3f, 0xe1, + 0xd3, 0x11, 0x0b, 0x1f, 0x66, 0xaf, 0x33, 0xcc, 0xb7, 0x15, 0x27, 0xac, 0x42, 0xe6, 0xf8, 0xc9, + 0x33, 0xbd, 0xf1, 0xf4, 0x99, 0xde, 0x78, 0xf1, 0x4c, 0x6f, 0xfe, 0xbc, 0xd0, 0x9b, 0x7f, 0x2c, + 0xf4, 0xe6, 0xe3, 0x85, 0xde, 0x7c, 0xb2, 0xd0, 0x9b, 0xff, 0x2e, 0xf4, 0xe6, 0x7f, 0x0b, 0xbd, + 0xf1, 0x62, 0xa1, 0x37, 0x7f, 0x79, 0xae, 0x37, 0x9e, 0x3c, 0xd7, 0x1b, 0x4f, 0x9f, 0xeb, 0x8d, + 0xef, 0xde, 0x9f, 0xb8, 0x7c, 0x1a, 0x8f, 0x0f, 0xed, 0x60, 0x36, 0x98, 0x84, 0xd6, 0x89, 0xe5, + 0x5b, 0x03, 0x2f, 0x78, 0xe0, 0x0e, 0xea, 0xfe, 0x90, 0x8f, 0xdb, 0xf8, 0x77, 0xfb, 0xc3, 0xff, + 0x03, 0x00, 0x00, 0xff, 0xff, 0x22, 0x3f, 0x1a, 0x51, 0xaf, 0x0f, 0x00, 0x00, } func (this *Result) Equal(that interface{}) bool { @@ -904,6 +972,9 @@ func (this *Result) Equal(that interface{}) bool { if !this.Caches.Equal(&that1.Caches) { return false } + if !this.Index.Equal(&that1.Index) { + return false + } return true } func (this *Caches) Equal(that interface{}) bool { @@ -1008,6 +1079,33 @@ func (this *Summary) Equal(that interface{}) bool { } return true } +func (this *Index) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*Index) + if !ok { + that2, ok := that.(Index) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.TotalChunks != that1.TotalChunks { + return false + } + if this.PostFilterChunks != that1.PostFilterChunks { + return false + } + return true +} func (this *Querier) Equal(that interface{}) bool { if that == nil { return this == nil @@ -1207,12 +1305,13 @@ func (this *Result) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 8) + s := make([]string, 0, 9) s = append(s, "&stats.Result{") s = append(s, "Summary: "+strings.Replace(this.Summary.GoString(), `&`, ``, 1)+",\n") s = append(s, "Querier: "+strings.Replace(this.Querier.GoString(), `&`, ``, 1)+",\n") s = append(s, "Ingester: "+strings.Replace(this.Ingester.GoString(), `&`, ``, 1)+",\n") s = append(s, "Caches: "+strings.Replace(this.Caches.GoString(), `&`, ``, 1)+",\n") + s = append(s, "Index: "+strings.Replace(this.Index.GoString(), `&`, ``, 1)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -1254,6 +1353,17 @@ func (this *Summary) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *Index) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&stats.Index{") + s = append(s, "TotalChunks: "+fmt.Sprintf("%#v", this.TotalChunks)+",\n") + s = append(s, "PostFilterChunks: "+fmt.Sprintf("%#v", this.PostFilterChunks)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *Querier) GoString() string { if this == nil { return "nil" @@ -1357,6 +1467,16 @@ func (m *Result) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + { + size, err := m.Index.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintStats(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x2a { size, err := m.Caches.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -1588,6 +1708,39 @@ func (m *Summary) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *Index) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Index) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Index) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.PostFilterChunks != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.PostFilterChunks)) + i-- + dAtA[i] = 0x10 + } + if m.TotalChunks != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.TotalChunks)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + func (m *Querier) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -1898,6 +2051,8 @@ func (m *Result) Size() (n int) { n += 1 + l + sovStats(uint64(l)) l = m.Caches.Size() n += 1 + l + sovStats(uint64(l)) + l = m.Index.Size() + n += 1 + l + sovStats(uint64(l)) return n } @@ -1971,6 +2126,21 @@ func (m *Summary) Size() (n int) { return n } +func (m *Index) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.TotalChunks != 0 { + n += 1 + sovStats(uint64(m.TotalChunks)) + } + if m.PostFilterChunks != 0 { + n += 1 + sovStats(uint64(m.PostFilterChunks)) + } + return n +} + func (m *Querier) Size() (n int) { if m == nil { return 0 @@ -2118,6 +2288,7 @@ func (this *Result) String() string { `Querier:` + strings.Replace(strings.Replace(this.Querier.String(), "Querier", "Querier", 1), `&`, ``, 1) + `,`, `Ingester:` + strings.Replace(strings.Replace(this.Ingester.String(), "Ingester", "Ingester", 1), `&`, ``, 1) + `,`, `Caches:` + strings.Replace(strings.Replace(this.Caches.String(), "Caches", "Caches", 1), `&`, ``, 1) + `,`, + `Index:` + strings.Replace(strings.Replace(this.Index.String(), "Index", "Index", 1), `&`, ``, 1) + `,`, `}`, }, "") return s @@ -2160,6 +2331,17 @@ func (this *Summary) String() string { }, "") return s } +func (this *Index) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&Index{`, + `TotalChunks:` + fmt.Sprintf("%v", this.TotalChunks) + `,`, + `PostFilterChunks:` + fmt.Sprintf("%v", this.PostFilterChunks) + `,`, + `}`, + }, "") + return s +} func (this *Querier) String() string { if this == nil { return "nil" @@ -2404,6 +2586,39 @@ func (m *Result) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Index", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthStats + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthStats + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Index.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipStats(dAtA[iNdEx:]) @@ -3010,6 +3225,97 @@ func (m *Summary) Unmarshal(dAtA []byte) error { } return nil } +func (m *Index) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Index: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Index: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field TotalChunks", wireType) + } + m.TotalChunks = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.TotalChunks |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field PostFilterChunks", wireType) + } + m.PostFilterChunks = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.PostFilterChunks |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipStats(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthStats + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthStats + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *Querier) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/pkg/logqlmodel/stats/stats.proto b/pkg/logqlmodel/stats/stats.proto index 20ad6077392b6..84219f11510be 100644 --- a/pkg/logqlmodel/stats/stats.proto +++ b/pkg/logqlmodel/stats/stats.proto @@ -26,6 +26,10 @@ message Result { (gogoproto.nullable) = false, (gogoproto.jsontag) = "cache" ]; + Index index = 5 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "index" + ]; } message Caches { @@ -96,6 +100,16 @@ message Summary { int64 totalStructuredMetadataBytesProcessed = 12 [(gogoproto.jsontag) = "totalStructuredMetadataBytesProcessed"]; } +// Statistics from Index queries +// TODO(owen-d): include bytes. +// Needs some index methods added to return _sized_ chunk refs to know +message Index { + // Total chunks + int64 totalChunks = 1 [(gogoproto.jsontag) = "totalChunks"]; + // Post-filtered chunks + int64 postFilterChunks = 2 [(gogoproto.jsontag) = "postFilterChunks"]; +} + message Querier { Store store = 1 [ (gogoproto.nullable) = false, diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index eb513910f1707..5eab58e357c53 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -312,7 +312,7 @@ type Loki struct { querierAPI *querier.QuerierAPI ingesterQuerier *querier.IngesterQuerier Store storage.Store - BloomStore bloomshipper.Store + BloomStore bloomshipper.StoreWithMetrics tableManager *index.TableManager frontend Frontend ruler *base_ruler.Ruler diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index d3a9a4842adc7..1c9a8c189a5b3 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -414,6 +414,7 @@ func (t *Loki) initQuerier() (services.Service, error) { t.querierAPI = querier.NewQuerierAPI(t.Cfg.Querier, t.Querier, t.Overrides, logger) indexStatsHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.IndexStats", t.Overrides) + indexShardsHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.IndexShards", t.Overrides) volumeHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.VolumeInstant", t.Overrides) volumeRangeHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.VolumeRange", t.Overrides) seriesHTTPMiddleware := querier.WrapQuerySpanAndTimeout("query.Series", t.Overrides) @@ -465,6 +466,7 @@ func (t *Loki) initQuerier() (services.Service, error) { if querierWorkerServiceConfig.QuerierRunningStandalone() { labelsHTTPMiddleware = middleware.Merge(httpMiddleware, labelsHTTPMiddleware) indexStatsHTTPMiddleware = middleware.Merge(httpMiddleware, indexStatsHTTPMiddleware) + indexShardsHTTPMiddleware = middleware.Merge(httpMiddleware, indexShardsHTTPMiddleware) volumeHTTPMiddleware = middleware.Merge(httpMiddleware, volumeHTTPMiddleware) volumeRangeHTTPMiddleware = middleware.Merge(httpMiddleware, volumeRangeHTTPMiddleware) seriesHTTPMiddleware = middleware.Merge(httpMiddleware, seriesHTTPMiddleware) @@ -495,6 +497,7 @@ func (t *Loki) initQuerier() (services.Service, error) { router.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(seriesHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(indexStatsHTTPMiddleware.Wrap(httpHandler)) + router.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(indexShardsHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(volumeHTTPMiddleware.Wrap(httpHandler)) router.Path("/loki/api/v1/index/volume_range").Methods("GET", "POST").Handler(volumeRangeHTTPMiddleware.Wrap(httpHandler)) @@ -1038,6 +1041,7 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/loki/api/v1/label/{name}/values").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume_range").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/api/prom/query").Methods("GET", "POST").Handler(frontendHandler) diff --git a/pkg/querier/handler.go b/pkg/querier/handler.go index 47a4c15e07511..3e57d61396945 100644 --- a/pkg/querier/handler.go +++ b/pkg/querier/handler.go @@ -93,6 +93,18 @@ func (h *Handler) Do(ctx context.Context, req queryrangebase.Request) (queryrang return nil, err } return &queryrange.IndexStatsResponse{Response: result}, nil + case *logproto.ShardsRequest: + request := loghttp.NewRangeQueryWithDefaults() + request.Start = concrete.From.Time() + request.End = concrete.Through.Time() + request.Query = concrete.GetQuery() + request.UpdateStep() + result, err := h.api.IndexShardsHandler(ctx, request, concrete.TargetBytesPerShard) + if err != nil { + return nil, err + } + return &queryrange.ShardsResponse{Response: result}, nil + case *logproto.VolumeRequest: result, err := h.api.VolumeHandler(ctx, concrete) if err != nil { diff --git a/pkg/querier/http.go b/pkg/querier/http.go index a508bf9f7286b..664e3b2ba4c9e 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -309,6 +309,39 @@ func (q *QuerierAPI) IndexStatsHandler(ctx context.Context, req *loghttp.RangeQu return resp, err } +func (q *QuerierAPI) IndexShardsHandler(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) { + timer := prometheus.NewTimer(logql.QueryTime.WithLabelValues(logql.QueryTypeShards)) + defer timer.ObserveDuration() + + start := time.Now() + statsCtx, ctx := stats.NewContext(ctx) + + resp, err := q.querier.IndexShards(ctx, req, targetBytesPerShard) + queueTime, _ := ctx.Value(httpreq.QueryQueueTimeHTTPHeader).(time.Duration) + + resLength := 0 + if resp != nil { + resLength = len(resp.Shards) + stats.JoinResults(ctx, resp.Statistics) + } + + statResult := statsCtx.Result(time.Since(start), queueTime, resLength) + + log := spanlogger.FromContext(ctx) + statResult.Log(level.Debug(log)) + + status := 200 + if err != nil { + status, _ = serverutil.ClientHTTPStatusAndError(err) + } + + logql.RecordShardsQueryMetrics( + ctx, log, req.Start, req.End, req.Query, targetBytesPerShard, strconv.Itoa(status), resLength, statResult, + ) + + return resp, err +} + // TODO(trevorwhitney): add test for the handler split // VolumeHandler queries the index label volumes related to the passed matchers and given time range. diff --git a/pkg/querier/multi_tenant_querier.go b/pkg/querier/multi_tenant_querier.go index 2849830141167..6338b51e978a6 100644 --- a/pkg/querier/multi_tenant_querier.go +++ b/pkg/querier/multi_tenant_querier.go @@ -199,6 +199,44 @@ func (q *MultiTenantQuerier) IndexStats(ctx context.Context, req *loghttp.RangeQ return &merged, nil } +func (q *MultiTenantQuerier) IndexShards( + ctx context.Context, + req *loghttp.RangeQuery, + targetBytesPerShard uint64, +) (*logproto.ShardsResponse, error) { + tenantIDs, err := tenant.TenantIDs(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.IndexShards(ctx, req, targetBytesPerShard) + } + + responses := make([]*logproto.ShardsResponse, len(tenantIDs)) + for i, id := range tenantIDs { + singleContext := user.InjectOrgID(ctx, id) + resp, err := q.Querier.IndexShards(singleContext, req, targetBytesPerShard) + if err != nil { + return nil, err + } + + responses[i] = resp + } + + // TODO(owen-d): better merging + var highestIdx int + var highestVal int + for i, resp := range responses { + if len(resp.Shards) > highestVal { + highestIdx = i + highestVal = len(resp.Shards) + } + } + + return responses[highestIdx], nil +} + func (q *MultiTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { tenantIDs, err := tenant.TenantIDs(ctx) if err != nil { diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index a91293c977968..d368900b21e68 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/loki/pkg/storage/stores/index" "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" @@ -92,6 +93,7 @@ type Querier interface { Series(ctx context.Context, req *logproto.SeriesRequest) (*logproto.SeriesResponse, error) Tail(ctx context.Context, req *logproto.TailRequest, categorizedLabels bool) (*Tailer, error) IndexStats(ctx context.Context, req *loghttp.RangeQuery) (*stats.Stats, error) + IndexShards(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) } @@ -772,6 +774,45 @@ func (q *SingleTenantQuerier) IndexStats(ctx context.Context, req *loghttp.Range ) } +func (q *SingleTenantQuerier) IndexShards( + ctx context.Context, + req *loghttp.RangeQuery, + targetBytesPerShard uint64, +) (*logproto.ShardsResponse, error) { + userID, err := tenant.TenantID(ctx) + if err != nil { + return nil, err + } + + start, end, err := validateQueryTimeRangeLimits(ctx, userID, q.limits, req.Start, req.End) + if err != nil { + return nil, err + } + + // Enforce the query timeout while querying backends + queryTimeout := q.limits.QueryTimeout(ctx, userID) + ctx, cancel := context.WithDeadline(ctx, time.Now().Add(queryTimeout)) + defer cancel() + + p, err := indexgateway.ExtractShardRequestMatchersAndAST(req.Query) + if err != nil { + return nil, err + } + + shards, err := q.store.GetShards( + ctx, + userID, + model.TimeFromUnixNano(start.UnixNano()), + model.TimeFromUnixNano(end.UnixNano()), + targetBytesPerShard, + p, + ) + if err != nil { + return nil, err + } + return shards, nil +} + func (q *SingleTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { sp, ctx := opentracing.StartSpanFromContext(ctx, "Querier.Volume") defer sp.Finish() diff --git a/pkg/querier/querier_mock_test.go b/pkg/querier/querier_mock_test.go index 268e05528f781..ed34a91bcaf7b 100644 --- a/pkg/querier/querier_mock_test.go +++ b/pkg/querier/querier_mock_test.go @@ -31,6 +31,7 @@ import ( "github.com/grafana/loki/pkg/storage/chunk/fetcher" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util" "github.com/grafana/loki/pkg/validation" ) @@ -371,6 +372,14 @@ func (s *storeMock) Stats(_ context.Context, _ string, _, _ model.Time, _ ...*la return nil, nil } +func (s *storeMock) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (s *storeMock) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (s *storeMock) Volume(ctx context.Context, userID string, from, through model.Time, _ int32, targetLabels []string, _ string, matchers ...*labels.Matcher) (*logproto.VolumeResponse, error) { args := s.Called(ctx, userID, from, through, targetLabels, matchers) return args.Get(0).(*logproto.VolumeResponse), args.Error(1) @@ -547,6 +556,18 @@ func (q *querierMock) IndexStats(_ context.Context, _ *loghttp.RangeQuery) (*sta return nil, nil } +func (q *querierMock) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) ([]logproto.Shard, error) { + return nil, nil +} + +func (q *querierMock) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + +func (q *querierMock) IndexShards(_ context.Context, _ *loghttp.RangeQuery, _ uint64) (*logproto.ShardsResponse, error) { + return nil, errors.New("unimplemented") +} + func (q *querierMock) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { args := q.MethodCalled("Volume", ctx, req) diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 44de02408b4df..76d726c240fa8 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -346,6 +346,18 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer Through: through, Matchers: req.Query, }, err + case IndexShardsOp: + req, targetBytes, err := loghttp.ParseIndexShardsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + from, through := util.RoundToMilliseconds(req.Start, req.End) + return &logproto.ShardsRequest{ + From: from, + Through: through, + Query: req.Query, + TargetBytesPerShard: targetBytes.Bytes(), + }, err case VolumeOp: req, err := loghttp.ParseVolumeInstantQuery(r) if err != nil { @@ -521,6 +533,19 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) Through: through, Matchers: req.Query, }, ctx, err + case IndexShardsOp: + req, targetBytes, err := loghttp.ParseIndexShardsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + from, through := util.RoundToMilliseconds(req.Start, req.End) + return &logproto.ShardsRequest{ + From: from, + Through: through, + Query: req.Query, + TargetBytesPerShard: targetBytes.Bytes(), + }, ctx, nil + case VolumeOp: req, err := loghttp.ParseVolumeInstantQuery(httpReq) if err != nil { @@ -789,6 +814,25 @@ func (c Codec) EncodeRequest(ctx context.Context, r queryrangebase.Request) (*ht Header: header, } return req.WithContext(ctx), nil + case *logproto.ShardsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.From.Time().UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.Through.Time().UnixNano())}, + "query": []string{request.GetQuery()}, + "targetBytesPerShard": []string{fmt.Sprintf("%d", request.TargetBytesPerShard)}, + } + u := &url.URL{ + Path: "/loki/api/v1/index/shards", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + return req.WithContext(ctx), nil default: return nil, httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid request format, got (%T)", r)) } @@ -897,6 +941,15 @@ func decodeResponseJSONFrom(buf []byte, req queryrangebase.Request, headers http Response: &resp, Headers: httpResponseHeadersToPromResponseHeaders(headers), }, nil + case *logproto.ShardsRequest: + var resp logproto.ShardsResponse + if err := json.Unmarshal(buf, &resp); err != nil { + return nil, httpgrpc.Errorf(http.StatusInternalServerError, "error decoding response: %v", err) + } + return &ShardsResponse{ + Response: &resp, + Headers: httpResponseHeadersToPromResponseHeaders(headers), + }, nil case *logproto.VolumeRequest: var resp logproto.VolumeResponse if err := json.Unmarshal(buf, &resp); err != nil { @@ -1013,6 +1066,8 @@ func decodeResponseProtobuf(r *http.Response, req queryrangebase.Request) (query return resp.GetLabels().WithHeaders(headers), nil case *logproto.IndexStatsRequest: return resp.GetStats().WithHeaders(headers), nil + case *logproto.ShardsRequest: + return resp.GetShardsResponse().WithHeaders(headers), nil default: switch concrete := resp.Response.(type) { case *QueryResponse_Prom: @@ -1110,6 +1165,10 @@ func encodeResponseJSONTo(version loghttp.Version, res queryrangebase.Response, if err := marshal.WriteIndexStatsResponseJSON(response.Response, w); err != nil { return err } + case *ShardsResponse: + if err := marshal.WriteIndexShardsResponseJSON(response.Response, w); err != nil { + return err + } case *VolumeResponse: if err := marshal.WriteVolumeResponseJSON(response.Response, w); err != nil { return err diff --git a/pkg/querier/queryrange/codec_test.go b/pkg/querier/queryrange/codec_test.go index fa6fa9e036711..b615d73b09c01 100644 --- a/pkg/querier/queryrange/codec_test.go +++ b/pkg/querier/queryrange/codec_test.go @@ -1593,6 +1593,10 @@ var ( "queryReferencedStructuredMetadata": true } }, + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "cache": { "chunk": { "entriesFound": 0, diff --git a/pkg/querier/queryrange/downstreamer_test.go b/pkg/querier/queryrange/downstreamer_test.go index cadfceeee20e3..8a305176b6870 100644 --- a/pkg/querier/queryrange/downstreamer_test.go +++ b/pkg/querier/queryrange/downstreamer_test.go @@ -22,6 +22,7 @@ import ( "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel" "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" ) @@ -290,7 +291,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - {Shard: 0, Of: 2}, + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 2}), }.Encode(), }, }, @@ -298,7 +299,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - {Shard: 1, Of: 2}, + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 2}), }.Encode(), }, }, @@ -363,8 +364,10 @@ func TestInstanceDownstream(t *testing.T) { queries := []logql.DownstreamQuery{ { Params: logql.ParamsWithShardsOverride{ - Params: logql.ParamsWithExpressionOverride{Params: params, ExpressionOverride: expr}, - ShardsOverride: logql.Shards{{Shard: 0, Of: 2}}.Encode(), + Params: logql.ParamsWithExpressionOverride{Params: params, ExpressionOverride: expr}, + ShardsOverride: logql.Shards{ + logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 2}), + }.Encode(), }, }, } diff --git a/pkg/querier/queryrange/extensions.go b/pkg/querier/queryrange/extensions.go index 75d4ce2cb4edd..46a8ebdc17084 100644 --- a/pkg/querier/queryrange/extensions.go +++ b/pkg/querier/queryrange/extensions.go @@ -220,3 +220,19 @@ func (m *QuantileSketchResponse) WithHeaders(h []queryrangebase.PrometheusRespon m.Headers = h return m } + +func (m *ShardsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *ShardsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *ShardsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} diff --git a/pkg/querier/queryrange/limits/definitions.go b/pkg/querier/queryrange/limits/definitions.go index 9e1232b750797..f2e55728a594e 100644 --- a/pkg/querier/queryrange/limits/definitions.go +++ b/pkg/querier/queryrange/limits/definitions.go @@ -27,6 +27,7 @@ type Limits interface { TSDBMaxQueryParallelism(context.Context, string) int // TSDBMaxBytesPerShard returns the limit to the number of bytes a single shard TSDBMaxBytesPerShard(string) int + TSDBShardingStrategy(userID string) string RequiredLabels(context.Context, string) []string RequiredNumberLabels(context.Context, string) int diff --git a/pkg/querier/queryrange/marshal.go b/pkg/querier/queryrange/marshal.go index 4480b06adcfac..473b3714464e2 100644 --- a/pkg/querier/queryrange/marshal.go +++ b/pkg/querier/queryrange/marshal.go @@ -200,6 +200,8 @@ func QueryResponseUnwrap(res *QueryResponse) (queryrangebase.Response, error) { return concrete.Labels, nil case *QueryResponse_Stats: return concrete.Stats, nil + case *QueryResponse_ShardsResponse: + return concrete.ShardsResponse, nil case *QueryResponse_Prom: return concrete.Prom, nil case *QueryResponse_Streams: @@ -243,6 +245,8 @@ func QueryResponseWrap(res queryrangebase.Response) (*QueryResponse, error) { p.Response = &QueryResponse_TopkSketches{response} case *QuantileSketchResponse: p.Response = &QueryResponse_QuantileSketches{response} + case *ShardsResponse: + p.Response = &QueryResponse_ShardsResponse{response} default: return nil, fmt.Errorf("invalid response format, got (%T)", res) } @@ -311,6 +315,8 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra return concrete.Instant, ctx, nil case *QueryRequest_Stats: return concrete.Stats, ctx, nil + case *QueryRequest_ShardsRequest: + return concrete.ShardsRequest, ctx, nil case *QueryRequest_Volume: return concrete.Volume, ctx, nil case *QueryRequest_Streams: @@ -330,7 +336,7 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra LabelRequest: *concrete.Labels, }, ctx, nil default: - return nil, ctx, fmt.Errorf("unsupported request type, got (%T)", req.Request) + return nil, ctx, fmt.Errorf("unsupported request type while unwrapping, got (%T)", req.Request) } } @@ -353,8 +359,10 @@ func (Codec) QueryRequestWrap(ctx context.Context, r queryrangebase.Request) (*Q result.Request = &QueryRequest_Instant{Instant: req} case *LokiRequest: result.Request = &QueryRequest_Streams{Streams: req} + case *logproto.ShardsRequest: + result.Request = &QueryRequest_ShardsRequest{ShardsRequest: req} default: - return nil, fmt.Errorf("unsupported request type, got (%T)", r) + return nil, fmt.Errorf("unsupported request type while wrapping, got (%T)", r) } // Add query tags diff --git a/pkg/querier/queryrange/prometheus_test.go b/pkg/querier/queryrange/prometheus_test.go index 80e4f5367afb0..624c8b5194eec 100644 --- a/pkg/querier/queryrange/prometheus_test.go +++ b/pkg/querier/queryrange/prometheus_test.go @@ -13,6 +13,10 @@ import ( ) var emptyStats = `"stats": { + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index cbc541a044044..38477b8b83912 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -831,6 +831,43 @@ func (m *QuantileSketchResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QuantileSketchResponse proto.InternalMessageInfo +type ShardsResponse struct { + Response *github_com_grafana_loki_pkg_logproto.ShardsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.ShardsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *ShardsResponse) Reset() { *m = ShardsResponse{} } +func (*ShardsResponse) ProtoMessage() {} +func (*ShardsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{13} +} +func (m *ShardsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ShardsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ShardsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ShardsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShardsResponse.Merge(m, src) +} +func (m *ShardsResponse) XXX_Size() int { + return m.Size() +} +func (m *ShardsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ShardsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo + type QueryResponse struct { Status *rpc.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` // Types that are valid to be assigned to Response: @@ -842,13 +879,14 @@ type QueryResponse struct { // *QueryResponse_Volume // *QueryResponse_TopkSketches // *QueryResponse_QuantileSketches + // *QueryResponse_ShardsResponse Response isQueryResponse_Response `protobuf_oneof:"response"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } func (*QueryResponse) ProtoMessage() {} func (*QueryResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{13} + return fileDescriptor_51b9d53b40d11902, []int{14} } func (m *QueryResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -908,6 +946,9 @@ type QueryResponse_TopkSketches struct { type QueryResponse_QuantileSketches struct { QuantileSketches *QuantileSketchResponse `protobuf:"bytes,9,opt,name=quantileSketches,proto3,oneof"` } +type QueryResponse_ShardsResponse struct { + ShardsResponse *ShardsResponse `protobuf:"bytes,10,opt,name=shardsResponse,proto3,oneof"` +} func (*QueryResponse_Series) isQueryResponse_Response() {} func (*QueryResponse_Labels) isQueryResponse_Response() {} @@ -917,6 +958,7 @@ func (*QueryResponse_Streams) isQueryResponse_Response() {} func (*QueryResponse_Volume) isQueryResponse_Response() {} func (*QueryResponse_TopkSketches) isQueryResponse_Response() {} func (*QueryResponse_QuantileSketches) isQueryResponse_Response() {} +func (*QueryResponse_ShardsResponse) isQueryResponse_Response() {} func (m *QueryResponse) GetResponse() isQueryResponse_Response { if m != nil { @@ -988,6 +1030,13 @@ func (m *QueryResponse) GetQuantileSketches() *QuantileSketchResponse { return nil } +func (m *QueryResponse) GetShardsResponse() *ShardsResponse { + if x, ok := m.GetResponse().(*QueryResponse_ShardsResponse); ok { + return x.ShardsResponse + } + return nil +} + // XXX_OneofWrappers is for the internal use of the proto package. func (*QueryResponse) XXX_OneofWrappers() []interface{} { return []interface{}{ @@ -999,6 +1048,7 @@ func (*QueryResponse) XXX_OneofWrappers() []interface{} { (*QueryResponse_Volume)(nil), (*QueryResponse_TopkSketches)(nil), (*QueryResponse_QuantileSketches)(nil), + (*QueryResponse_ShardsResponse)(nil), } } @@ -1010,6 +1060,7 @@ type QueryRequest struct { // *QueryRequest_Instant // *QueryRequest_Streams // *QueryRequest_Volume + // *QueryRequest_ShardsRequest Request isQueryRequest_Request `protobuf_oneof:"request"` Metadata map[string]string `protobuf:"bytes,7,rep,name=metadata,proto3" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } @@ -1017,7 +1068,7 @@ type QueryRequest struct { func (m *QueryRequest) Reset() { *m = QueryRequest{} } func (*QueryRequest) ProtoMessage() {} func (*QueryRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{14} + return fileDescriptor_51b9d53b40d11902, []int{15} } func (m *QueryRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1071,13 +1122,17 @@ type QueryRequest_Streams struct { type QueryRequest_Volume struct { Volume *logproto.VolumeRequest `protobuf:"bytes,6,opt,name=volume,proto3,oneof"` } +type QueryRequest_ShardsRequest struct { + ShardsRequest *logproto.ShardsRequest `protobuf:"bytes,8,opt,name=shardsRequest,proto3,oneof"` +} -func (*QueryRequest_Series) isQueryRequest_Request() {} -func (*QueryRequest_Labels) isQueryRequest_Request() {} -func (*QueryRequest_Stats) isQueryRequest_Request() {} -func (*QueryRequest_Instant) isQueryRequest_Request() {} -func (*QueryRequest_Streams) isQueryRequest_Request() {} -func (*QueryRequest_Volume) isQueryRequest_Request() {} +func (*QueryRequest_Series) isQueryRequest_Request() {} +func (*QueryRequest_Labels) isQueryRequest_Request() {} +func (*QueryRequest_Stats) isQueryRequest_Request() {} +func (*QueryRequest_Instant) isQueryRequest_Request() {} +func (*QueryRequest_Streams) isQueryRequest_Request() {} +func (*QueryRequest_Volume) isQueryRequest_Request() {} +func (*QueryRequest_ShardsRequest) isQueryRequest_Request() {} func (m *QueryRequest) GetRequest() isQueryRequest_Request { if m != nil { @@ -1128,6 +1183,13 @@ func (m *QueryRequest) GetVolume() *logproto.VolumeRequest { return nil } +func (m *QueryRequest) GetShardsRequest() *logproto.ShardsRequest { + if x, ok := m.GetRequest().(*QueryRequest_ShardsRequest); ok { + return x.ShardsRequest + } + return nil +} + func (m *QueryRequest) GetMetadata() map[string]string { if m != nil { return m.Metadata @@ -1144,6 +1206,7 @@ func (*QueryRequest) XXX_OneofWrappers() []interface{} { (*QueryRequest_Instant)(nil), (*QueryRequest_Streams)(nil), (*QueryRequest_Volume)(nil), + (*QueryRequest_ShardsRequest)(nil), } } @@ -1161,6 +1224,7 @@ func init() { proto.RegisterType((*VolumeResponse)(nil), "queryrange.VolumeResponse") proto.RegisterType((*TopKSketchesResponse)(nil), "queryrange.TopKSketchesResponse") proto.RegisterType((*QuantileSketchResponse)(nil), "queryrange.QuantileSketchResponse") + proto.RegisterType((*ShardsResponse)(nil), "queryrange.ShardsResponse") proto.RegisterType((*QueryResponse)(nil), "queryrange.QueryResponse") proto.RegisterType((*QueryRequest)(nil), "queryrange.QueryRequest") proto.RegisterMapType((map[string]string)(nil), "queryrange.QueryRequest.MetadataEntry") @@ -1171,102 +1235,107 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1514 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x5b, 0x6f, 0x1b, 0x45, - 0x1b, 0xf6, 0xfa, 0x18, 0x4f, 0x0e, 0x5f, 0xbe, 0x49, 0x94, 0xee, 0x97, 0xf6, 0xdb, 0xb5, 0x2c, - 0xd1, 0x06, 0x04, 0x6b, 0x9a, 0x94, 0x1e, 0x01, 0xd1, 0xa5, 0xad, 0x52, 0xd1, 0xa2, 0x76, 0x13, - 0x71, 0x81, 0xb8, 0x99, 0xd8, 0x13, 0x7b, 0xf1, 0x9e, 0xb2, 0x33, 0x0e, 0xcd, 0x1d, 0x3f, 0x00, - 0xa4, 0xfe, 0x0a, 0x84, 0x44, 0x55, 0x89, 0x5b, 0x2e, 0xb9, 0xa1, 0x97, 0xbd, 0xac, 0x2c, 0xb1, - 0x50, 0x97, 0x0b, 0x94, 0xab, 0xfe, 0x04, 0x34, 0x87, 0x5d, 0xef, 0xda, 0x6e, 0xeb, 0x14, 0x21, - 0xb5, 0x12, 0x37, 0xf6, 0x1c, 0xde, 0x67, 0xf6, 0xdd, 0xe7, 0x79, 0xdf, 0x77, 0x66, 0x16, 0x9c, - 0x0a, 0xba, 0xed, 0xc6, 0x5e, 0x0f, 0x87, 0x36, 0x0e, 0xf9, 0xff, 0x41, 0x88, 0xbc, 0x36, 0x4e, - 0x35, 0x8d, 0x20, 0xf4, 0xa9, 0x0f, 0xc1, 0x70, 0x64, 0x75, 0xbd, 0x6d, 0xd3, 0x4e, 0x6f, 0xc7, - 0x68, 0xfa, 0x6e, 0xa3, 0xed, 0xb7, 0xfd, 0x46, 0xdb, 0xf7, 0xdb, 0x0e, 0x46, 0x81, 0x4d, 0x64, - 0xb3, 0x11, 0x06, 0xcd, 0x06, 0xa1, 0x88, 0xf6, 0x88, 0xc0, 0xaf, 0x2e, 0x33, 0x43, 0xde, 0xe4, - 0x10, 0x39, 0xaa, 0x4b, 0x73, 0xde, 0xdb, 0xe9, 0xed, 0x36, 0xa8, 0xed, 0x62, 0x42, 0x91, 0x1b, - 0x48, 0x83, 0xe3, 0xcc, 0x3f, 0xc7, 0x6f, 0x0b, 0x64, 0xdc, 0x90, 0x93, 0xff, 0xcb, 0x4c, 0x92, - 0x2e, 0xa6, 0xcd, 0x8e, 0x9c, 0xaa, 0xc9, 0xa9, 0x3d, 0xc7, 0xf5, 0x5b, 0xd8, 0xe1, 0xbe, 0x10, - 0xf1, 0x2b, 0x2d, 0x96, 0x98, 0x45, 0xd0, 0x23, 0x1d, 0xfe, 0x23, 0x07, 0x3f, 0x7e, 0x21, 0x1d, - 0x3b, 0x88, 0xe0, 0x46, 0x0b, 0xef, 0xda, 0x9e, 0x4d, 0x6d, 0xdf, 0x23, 0xe9, 0xb6, 0x5c, 0xe4, - 0xec, 0x74, 0x8b, 0x8c, 0x52, 0x5c, 0xbf, 0x5f, 0x00, 0xb3, 0x37, 0xfc, 0xae, 0x6d, 0xe1, 0xbd, - 0x1e, 0x26, 0x14, 0x2e, 0x83, 0x12, 0xb7, 0x51, 0x95, 0x9a, 0xb2, 0x56, 0xb5, 0x44, 0x87, 0x8d, - 0x3a, 0xb6, 0x6b, 0x53, 0x35, 0x5f, 0x53, 0xd6, 0xe6, 0x2d, 0xd1, 0x81, 0x10, 0x14, 0x09, 0xc5, - 0x81, 0x5a, 0xa8, 0x29, 0x6b, 0x05, 0x8b, 0xb7, 0xe1, 0x2a, 0x98, 0xb1, 0x3d, 0x8a, 0xc3, 0x7d, - 0xe4, 0xa8, 0x55, 0x3e, 0x9e, 0xf4, 0xe1, 0x87, 0xa0, 0x42, 0x28, 0x0a, 0xe9, 0x36, 0x51, 0x8b, - 0x35, 0x65, 0x6d, 0x76, 0x7d, 0xd5, 0x10, 0x52, 0x18, 0xb1, 0x14, 0xc6, 0x76, 0x2c, 0x85, 0x39, - 0xf3, 0x20, 0xd2, 0x73, 0x77, 0x7f, 0xd3, 0x15, 0x2b, 0x06, 0xc1, 0x8b, 0xa0, 0x84, 0xbd, 0xd6, - 0x36, 0x51, 0x4b, 0x47, 0x40, 0x0b, 0x08, 0x3c, 0x0d, 0xaa, 0x2d, 0x3b, 0xc4, 0x4d, 0xc6, 0x99, - 0x5a, 0xae, 0x29, 0x6b, 0x0b, 0xeb, 0x4b, 0x46, 0x22, 0xed, 0x95, 0x78, 0xca, 0x1a, 0x5a, 0xb1, - 0xd7, 0x0b, 0x10, 0xed, 0xa8, 0x15, 0xce, 0x04, 0x6f, 0xc3, 0x3a, 0x28, 0x93, 0x0e, 0x0a, 0x5b, - 0x44, 0x9d, 0xa9, 0x15, 0xd6, 0xaa, 0x26, 0x38, 0x8c, 0x74, 0x39, 0x62, 0xc9, 0x7f, 0xf8, 0x05, - 0x28, 0x06, 0x0e, 0xf2, 0x54, 0xc0, 0xbd, 0x5c, 0x34, 0x52, 0x9c, 0xdf, 0x72, 0x90, 0x67, 0x9e, - 0xed, 0x47, 0x7a, 0x26, 0x9a, 0x43, 0xb4, 0x8b, 0x3c, 0xd4, 0x70, 0xfc, 0xae, 0xdd, 0x48, 0xcb, - 0xc8, 0x56, 0x31, 0x6e, 0x33, 0x34, 0xc3, 0x59, 0x7c, 0xd5, 0xfa, 0x2f, 0x79, 0x00, 0x99, 0x60, - 0xd7, 0x3d, 0x42, 0x91, 0x47, 0x5f, 0x46, 0xb7, 0xf7, 0x41, 0x99, 0x85, 0xfc, 0x36, 0xe1, 0xca, - 0x4d, 0x4b, 0xa4, 0xc4, 0x64, 0x99, 0x2c, 0x1e, 0x89, 0xc9, 0xd2, 0x44, 0x26, 0xcb, 0x2f, 0x64, - 0xb2, 0xf2, 0x8f, 0x30, 0xa9, 0x82, 0x22, 0xeb, 0xc1, 0x45, 0x50, 0x08, 0xd1, 0x57, 0x9c, 0xb8, - 0x39, 0x8b, 0x35, 0xeb, 0x3f, 0x14, 0xc1, 0x9c, 0x48, 0x0a, 0x12, 0xf8, 0x1e, 0xc1, 0xcc, 0xd9, - 0x2d, 0x5e, 0x58, 0x04, 0xbd, 0xd2, 0x59, 0x3e, 0x62, 0xc9, 0x19, 0xf8, 0x11, 0x28, 0x5e, 0x41, - 0x14, 0x71, 0xaa, 0x67, 0xd7, 0x97, 0xd3, 0xce, 0xb2, 0xb5, 0xd8, 0x9c, 0xb9, 0xc2, 0xd8, 0x3c, - 0x8c, 0xf4, 0x85, 0x16, 0xa2, 0xe8, 0x6d, 0xdf, 0xb5, 0x29, 0x76, 0x03, 0x7a, 0x60, 0x71, 0x24, - 0x7c, 0x0f, 0x54, 0xaf, 0x86, 0xa1, 0x1f, 0x6e, 0x1f, 0x04, 0x98, 0x4b, 0x53, 0x35, 0x8f, 0x1d, - 0x46, 0xfa, 0x12, 0x8e, 0x07, 0x53, 0x88, 0xa1, 0x25, 0x7c, 0x13, 0x94, 0x78, 0x87, 0x8b, 0x51, - 0x35, 0x97, 0x0e, 0x23, 0xfd, 0x3f, 0x1c, 0x92, 0x32, 0x17, 0x16, 0x59, 0xed, 0x4a, 0x53, 0x69, - 0x97, 0x84, 0x50, 0x39, 0x1d, 0x42, 0x2a, 0xa8, 0xec, 0xe3, 0x90, 0xb0, 0x65, 0x2a, 0x7c, 0x3c, - 0xee, 0xc2, 0xcb, 0x00, 0x30, 0x62, 0x6c, 0x42, 0xed, 0x26, 0xcb, 0x12, 0x46, 0xc6, 0xbc, 0x21, - 0x8a, 0xa0, 0x85, 0x49, 0xcf, 0xa1, 0x26, 0x94, 0x2c, 0xa4, 0x0c, 0xad, 0x54, 0x1b, 0xde, 0x53, - 0x40, 0x65, 0x13, 0xa3, 0x16, 0x0e, 0x89, 0x5a, 0xad, 0x15, 0xd6, 0x66, 0xd7, 0xdf, 0x30, 0xd2, - 0x15, 0xef, 0x56, 0xe8, 0xbb, 0x98, 0x76, 0x70, 0x8f, 0xc4, 0x02, 0x09, 0x6b, 0xb3, 0xdb, 0x8f, - 0xf4, 0x9d, 0x69, 0xe2, 0x61, 0xaa, 0x2a, 0xfb, 0xcc, 0xe7, 0x1c, 0x46, 0xba, 0xf2, 0x8e, 0x15, - 0xbb, 0x58, 0xff, 0x55, 0x01, 0xff, 0x65, 0x0a, 0x6f, 0xb1, 0xb5, 0x49, 0x2a, 0x21, 0x5d, 0x44, - 0x9b, 0x1d, 0x55, 0x61, 0xe1, 0x6d, 0x89, 0x4e, 0xba, 0x04, 0xe6, 0xff, 0x56, 0x09, 0x2c, 0x1c, - 0xbd, 0x04, 0xc6, 0x59, 0x58, 0x9c, 0x98, 0x85, 0xa5, 0x67, 0x65, 0x61, 0xfd, 0x9b, 0x82, 0xa8, - 0x38, 0xf1, 0xfb, 0x1d, 0x21, 0x27, 0xae, 0x25, 0x39, 0x51, 0xe0, 0xde, 0x26, 0xa1, 0x26, 0xd6, - 0xba, 0xde, 0xc2, 0x1e, 0xb5, 0x77, 0x6d, 0x1c, 0xbe, 0x20, 0x33, 0x52, 0xe1, 0x56, 0xc8, 0x86, - 0x5b, 0x3a, 0x56, 0x8a, 0xaf, 0x7c, 0xac, 0x8c, 0x64, 0x47, 0xe9, 0x25, 0xb2, 0xa3, 0xfe, 0x34, - 0x0f, 0x56, 0x98, 0x1c, 0x37, 0xd0, 0x0e, 0x76, 0x3e, 0x45, 0xee, 0x11, 0x25, 0x39, 0x99, 0x92, - 0xa4, 0x6a, 0xc2, 0x7f, 0x29, 0x9f, 0x82, 0xf2, 0xef, 0x14, 0x30, 0x13, 0xd7, 0x70, 0x68, 0x00, - 0x20, 0x60, 0xbc, 0x4c, 0x0b, 0xa2, 0x17, 0x18, 0x38, 0x4c, 0x46, 0xad, 0x94, 0x05, 0xfc, 0x12, - 0x94, 0x45, 0x4f, 0x66, 0xc1, 0xb1, 0x54, 0x16, 0xd0, 0x10, 0x23, 0xf7, 0x72, 0x0b, 0x05, 0x14, - 0x87, 0xe6, 0x05, 0xe6, 0x45, 0x3f, 0xd2, 0x4f, 0x3d, 0x8f, 0x22, 0x7e, 0x6e, 0x14, 0x38, 0x26, - 0xae, 0x78, 0xa6, 0x25, 0x9f, 0x50, 0xff, 0x56, 0x01, 0x8b, 0xcc, 0x51, 0x46, 0x4d, 0x12, 0x15, - 0x57, 0xc0, 0x4c, 0x28, 0xdb, 0xdc, 0xdd, 0xd9, 0xf5, 0xba, 0x91, 0xa5, 0x75, 0x02, 0x95, 0x66, - 0xf1, 0x41, 0xa4, 0x2b, 0x56, 0x82, 0x84, 0x1b, 0x19, 0x1a, 0xf3, 0x93, 0x68, 0x64, 0x90, 0x5c, - 0x86, 0xb8, 0x9f, 0xf2, 0x00, 0x5e, 0xf7, 0x5a, 0xf8, 0x0e, 0x0b, 0xbe, 0x61, 0x9c, 0xf6, 0xc6, - 0x3c, 0x3a, 0x31, 0x24, 0x65, 0xdc, 0xde, 0xbc, 0xd4, 0x8f, 0xf4, 0x73, 0xcf, 0x63, 0xe5, 0x39, - 0xe0, 0xd4, 0x2b, 0xa4, 0x03, 0x37, 0xff, 0xea, 0xef, 0x2b, 0xf7, 0xf3, 0x60, 0xe1, 0x33, 0xdf, - 0xe9, 0xb9, 0x38, 0x21, 0xce, 0x1d, 0x23, 0x4e, 0x1d, 0x12, 0x97, 0xb5, 0x35, 0xcf, 0xf5, 0x23, - 0x7d, 0x63, 0x2a, 0xd2, 0xb2, 0xc0, 0xd7, 0x97, 0xb0, 0x7b, 0x79, 0xb0, 0xbc, 0xed, 0x07, 0x9f, - 0x6c, 0xf1, 0x4b, 0x59, 0xaa, 0x2e, 0xe2, 0x31, 0xda, 0x96, 0x87, 0xb4, 0x31, 0xc4, 0x4d, 0x44, - 0x43, 0xfb, 0x8e, 0xb9, 0xd1, 0x8f, 0xf4, 0xc6, 0x54, 0x94, 0x0d, 0x41, 0xaf, 0x2f, 0x5d, 0x3f, - 0xe7, 0xc1, 0xca, 0xed, 0x1e, 0xf2, 0xa8, 0xed, 0x60, 0x41, 0x59, 0x42, 0xd8, 0xc1, 0x18, 0x61, - 0xda, 0x90, 0xb0, 0x2c, 0x46, 0x52, 0xf7, 0x41, 0x3f, 0xd2, 0x2f, 0x4c, 0x45, 0xdd, 0x24, 0xf8, - 0xeb, 0x4b, 0xe2, 0x8f, 0x45, 0x30, 0xcf, 0x2f, 0x16, 0x09, 0x77, 0x6f, 0x01, 0xb9, 0xe5, 0x4a, - 0xe6, 0x60, 0x7c, 0x46, 0x0b, 0x83, 0xa6, 0xb1, 0x25, 0x37, 0x63, 0x61, 0x01, 0xcf, 0x83, 0x32, - 0xe1, 0x27, 0x21, 0x59, 0x50, 0xb5, 0xd1, 0x5b, 0x43, 0xf6, 0xcc, 0xb5, 0x99, 0xb3, 0xa4, 0x3d, - 0xbb, 0xc3, 0x39, 0xec, 0x00, 0x10, 0x9f, 0x04, 0xeb, 0xa3, 0xc8, 0xf1, 0xe3, 0x01, 0x43, 0x0b, - 0x0c, 0x3c, 0x0b, 0x4a, 0xbc, 0x72, 0xcb, 0x7b, 0x78, 0xe6, 0xb1, 0xe3, 0x25, 0x74, 0x33, 0x67, - 0x09, 0x73, 0xb8, 0x0e, 0x8a, 0x41, 0xe8, 0xbb, 0x72, 0x17, 0x3d, 0x31, 0xfa, 0xcc, 0xf4, 0xb6, - 0xb3, 0x99, 0xb3, 0xb8, 0x2d, 0x3c, 0xc3, 0x8e, 0xbc, 0x6c, 0xbf, 0x22, 0xfc, 0x0a, 0xc1, 0x4a, - 0xd6, 0x08, 0x2c, 0x05, 0x89, 0x4d, 0xe1, 0x19, 0x50, 0xde, 0xe7, 0x65, 0x49, 0x5e, 0xfe, 0x56, - 0xd3, 0xa0, 0x6c, 0xc1, 0x62, 0xef, 0x25, 0x6c, 0xe1, 0x35, 0x30, 0x47, 0xfd, 0xa0, 0x1b, 0x17, - 0x00, 0x79, 0xfd, 0xa8, 0xa5, 0xb1, 0x93, 0x0a, 0xc4, 0x66, 0xce, 0xca, 0xe0, 0xe0, 0x2d, 0xb0, - 0xb8, 0x97, 0x09, 0x53, 0x4c, 0xf8, 0xd7, 0x8c, 0x11, 0x9e, 0x27, 0x67, 0xcf, 0x66, 0xce, 0x1a, - 0x43, 0x9b, 0x60, 0x98, 0x51, 0xf5, 0x3f, 0x0a, 0x60, 0x4e, 0xc6, 0x8c, 0xb8, 0x2b, 0x9c, 0x4b, - 0xc2, 0x40, 0x84, 0xcc, 0xff, 0x9f, 0x15, 0x06, 0xdc, 0x3c, 0x15, 0x05, 0xef, 0x26, 0x51, 0x20, - 0xe2, 0x67, 0x65, 0x98, 0xa5, 0x5c, 0xff, 0x14, 0x42, 0x2a, 0xbf, 0x11, 0x2b, 0x2f, 0xc2, 0xe6, - 0xf8, 0xe4, 0x7d, 0x37, 0x46, 0x49, 0xd9, 0x2f, 0x82, 0x8a, 0x2d, 0x3e, 0x37, 0x4c, 0x0a, 0x98, - 0xf1, 0xaf, 0x11, 0x4c, 0x48, 0x09, 0x80, 0x1b, 0x43, 0xf9, 0x45, 0xd4, 0x1c, 0x1b, 0x97, 0x3f, - 0x01, 0xc5, 0xea, 0x9f, 0x4e, 0xd4, 0x2f, 0x4b, 0xcc, 0xd8, 0x66, 0x95, 0xbc, 0x98, 0x94, 0x7e, - 0x13, 0xcc, 0xb8, 0x98, 0x22, 0x76, 0x96, 0x55, 0x2b, 0xbc, 0x6e, 0x9c, 0xcc, 0x4a, 0x35, 0xe4, - 0xdb, 0xb8, 0x29, 0x0d, 0xaf, 0x7a, 0x34, 0x3c, 0x90, 0xc7, 0x96, 0x04, 0xbd, 0x7a, 0x09, 0xcc, - 0x67, 0x0c, 0xe0, 0x22, 0x28, 0x74, 0x71, 0xfc, 0x65, 0x85, 0x35, 0xd9, 0xe5, 0x6e, 0x1f, 0x39, - 0x3d, 0xcc, 0x69, 0xaf, 0x5a, 0xa2, 0x73, 0x31, 0x7f, 0x5e, 0x31, 0xab, 0xa0, 0x12, 0x8a, 0xa7, - 0x98, 0xad, 0x87, 0x8f, 0xb5, 0xdc, 0xa3, 0xc7, 0x5a, 0xee, 0xe9, 0x63, 0x4d, 0xf9, 0x7a, 0xa0, - 0x29, 0xdf, 0x0f, 0x34, 0xe5, 0xc1, 0x40, 0x53, 0x1e, 0x0e, 0x34, 0xe5, 0xf7, 0x81, 0xa6, 0xfc, - 0x39, 0xd0, 0x72, 0x4f, 0x07, 0x9a, 0x72, 0xf7, 0x89, 0x96, 0x7b, 0xf8, 0x44, 0xcb, 0x3d, 0x7a, - 0xa2, 0xe5, 0x3e, 0x37, 0x8e, 0x56, 0xc2, 0x76, 0xca, 0x9c, 0x96, 0x8d, 0xbf, 0x02, 0x00, 0x00, - 0xff, 0xff, 0xe6, 0x4a, 0x9a, 0x06, 0x55, 0x15, 0x00, 0x00, + // 1586 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0xcb, 0x6f, 0x1b, 0x45, + 0x18, 0xf7, 0xfa, 0x19, 0x4f, 0x9a, 0x10, 0x26, 0x51, 0xba, 0xa4, 0xed, 0xae, 0x65, 0x89, 0x36, + 0x20, 0x58, 0xd3, 0xa4, 0xf4, 0x09, 0x88, 0x2e, 0x69, 0xe5, 0x8a, 0x16, 0xb5, 0x9b, 0x88, 0x03, + 0xe2, 0x32, 0x89, 0x27, 0xce, 0x92, 0x7d, 0x65, 0x67, 0x9c, 0x36, 0x37, 0xfe, 0x00, 0x90, 0xfa, + 0x57, 0x20, 0x24, 0xaa, 0x9e, 0x38, 0x71, 0xe4, 0x42, 0x8f, 0x3d, 0x56, 0x91, 0x30, 0x34, 0xbd, + 0xa0, 0x9c, 0x7a, 0xe3, 0x8a, 0xe6, 0xb1, 0xeb, 0x59, 0xdb, 0x69, 0x9d, 0x22, 0xa4, 0x46, 0xe2, + 0x62, 0xcf, 0xe3, 0xfb, 0xcd, 0xce, 0xfe, 0xbe, 0xdf, 0xf7, 0xed, 0x37, 0x03, 0xce, 0x44, 0x9b, + 0xed, 0xc6, 0x56, 0x07, 0xc7, 0x2e, 0x8e, 0xf9, 0xff, 0x4e, 0x8c, 0x82, 0x36, 0x56, 0x9a, 0x56, + 0x14, 0x87, 0x34, 0x84, 0xa0, 0x37, 0x32, 0xb7, 0xd0, 0x76, 0xe9, 0x46, 0x67, 0xd5, 0x5a, 0x0b, + 0xfd, 0x46, 0x3b, 0x6c, 0x87, 0x8d, 0x76, 0x18, 0xb6, 0x3d, 0x8c, 0x22, 0x97, 0xc8, 0x66, 0x23, + 0x8e, 0xd6, 0x1a, 0x84, 0x22, 0xda, 0x21, 0x02, 0x3f, 0x37, 0xc3, 0x0c, 0x79, 0x93, 0x43, 0xe4, + 0xa8, 0x29, 0xcd, 0x79, 0x6f, 0xb5, 0xb3, 0xde, 0xa0, 0xae, 0x8f, 0x09, 0x45, 0x7e, 0x94, 0x18, + 0xb0, 0xfd, 0x79, 0x61, 0x5b, 0x20, 0xdd, 0xa0, 0x85, 0xef, 0xb5, 0x11, 0xc5, 0x77, 0xd1, 0x8e, + 0x34, 0x38, 0x91, 0x31, 0x48, 0x1a, 0x72, 0xf2, 0xad, 0xcc, 0x24, 0xd9, 0xc4, 0x74, 0x6d, 0x43, + 0x4e, 0xd5, 0xe4, 0xd4, 0x96, 0xe7, 0x87, 0x2d, 0xec, 0xf1, 0xcd, 0x12, 0xf1, 0x2b, 0x2d, 0xa6, + 0x99, 0x45, 0xd4, 0x21, 0x1b, 0xfc, 0x47, 0x0e, 0x7e, 0xf6, 0x52, 0xbe, 0x56, 0x11, 0xc1, 0x8d, + 0x16, 0x5e, 0x77, 0x03, 0x97, 0xba, 0x61, 0x40, 0xd4, 0xb6, 0x5c, 0xe4, 0xfc, 0x68, 0x8b, 0xf4, + 0xfb, 0xa0, 0xfe, 0xb0, 0x00, 0xc6, 0x6f, 0x86, 0x9b, 0xae, 0x83, 0xb7, 0x3a, 0x98, 0x50, 0x38, + 0x03, 0x4a, 0xdc, 0x46, 0xd7, 0x6a, 0xda, 0x7c, 0xd5, 0x11, 0x1d, 0x36, 0xea, 0xb9, 0xbe, 0x4b, + 0xf5, 0x7c, 0x4d, 0x9b, 0x9f, 0x70, 0x44, 0x07, 0x42, 0x50, 0x24, 0x14, 0x47, 0x7a, 0xa1, 0xa6, + 0xcd, 0x17, 0x1c, 0xde, 0x86, 0x73, 0x60, 0xcc, 0x0d, 0x28, 0x8e, 0xb7, 0x91, 0xa7, 0x57, 0xf9, + 0x78, 0xda, 0x87, 0x9f, 0x80, 0x0a, 0xa1, 0x28, 0xa6, 0x2b, 0x44, 0x2f, 0xd6, 0xb4, 0xf9, 0xf1, + 0x85, 0x39, 0x4b, 0xf8, 0xca, 0x4a, 0x7c, 0x65, 0xad, 0x24, 0xbe, 0xb2, 0xc7, 0x1e, 0x75, 0xcd, + 0xdc, 0xfd, 0x3f, 0x4c, 0xcd, 0x49, 0x40, 0xf0, 0x32, 0x28, 0xe1, 0xa0, 0xb5, 0x42, 0xf4, 0xd2, + 0x21, 0xd0, 0x02, 0x02, 0xcf, 0x82, 0x6a, 0xcb, 0x8d, 0xf1, 0x1a, 0xe3, 0x4c, 0x2f, 0xd7, 0xb4, + 0xf9, 0xc9, 0x85, 0x69, 0x2b, 0x75, 0xed, 0x52, 0x32, 0xe5, 0xf4, 0xac, 0xd8, 0xeb, 0x45, 0x88, + 0x6e, 0xe8, 0x15, 0xce, 0x04, 0x6f, 0xc3, 0x3a, 0x28, 0x93, 0x0d, 0x14, 0xb7, 0x88, 0x3e, 0x56, + 0x2b, 0xcc, 0x57, 0x6d, 0xb0, 0xdf, 0x35, 0xe5, 0x88, 0x23, 0xff, 0xe1, 0xd7, 0xa0, 0x18, 0x79, + 0x28, 0xd0, 0x01, 0xdf, 0xe5, 0x94, 0xa5, 0x70, 0x7e, 0xdb, 0x43, 0x81, 0x7d, 0x7e, 0xb7, 0x6b, + 0x66, 0xe4, 0x1e, 0xa3, 0x75, 0x14, 0xa0, 0x86, 0x17, 0x6e, 0xba, 0x0d, 0xd5, 0x8d, 0x6c, 0x15, + 0xeb, 0x0e, 0x43, 0x33, 0x9c, 0xc3, 0x57, 0xad, 0xff, 0x96, 0x07, 0x90, 0x39, 0xec, 0x46, 0x40, + 0x28, 0x0a, 0xe8, 0xab, 0xf8, 0xed, 0x23, 0x50, 0x66, 0x31, 0xb1, 0x42, 0xb8, 0xe7, 0x46, 0x25, + 0x52, 0x62, 0xb2, 0x4c, 0x16, 0x0f, 0xc5, 0x64, 0x69, 0x28, 0x93, 0xe5, 0x97, 0x32, 0x59, 0xf9, + 0x4f, 0x98, 0xd4, 0x41, 0x91, 0xf5, 0xe0, 0x14, 0x28, 0xc4, 0xe8, 0x2e, 0x27, 0xee, 0x98, 0xc3, + 0x9a, 0xf5, 0x9f, 0x8a, 0xe0, 0x98, 0x08, 0x0a, 0x12, 0x85, 0x01, 0xc1, 0x6c, 0xb3, 0xcb, 0x3c, + 0xf3, 0x08, 0x7a, 0xe5, 0x66, 0xf9, 0x88, 0x23, 0x67, 0xe0, 0xa7, 0xa0, 0xb8, 0x84, 0x28, 0xe2, + 0x54, 0x8f, 0x2f, 0xcc, 0xa8, 0x9b, 0x65, 0x6b, 0xb1, 0x39, 0x7b, 0x96, 0xb1, 0xb9, 0xdf, 0x35, + 0x27, 0x5b, 0x88, 0xa2, 0xf7, 0x42, 0xdf, 0xa5, 0xd8, 0x8f, 0xe8, 0x8e, 0xc3, 0x91, 0xf0, 0x43, + 0x50, 0xbd, 0x16, 0xc7, 0x61, 0xbc, 0xb2, 0x13, 0x61, 0xee, 0x9a, 0xaa, 0x7d, 0x7c, 0xbf, 0x6b, + 0x4e, 0xe3, 0x64, 0x50, 0x41, 0xf4, 0x2c, 0xe1, 0x3b, 0xa0, 0xc4, 0x3b, 0xdc, 0x19, 0x55, 0x7b, + 0x7a, 0xbf, 0x6b, 0xbe, 0xc1, 0x21, 0x8a, 0xb9, 0xb0, 0xc8, 0xfa, 0xae, 0x34, 0x92, 0xef, 0x52, + 0x09, 0x95, 0x55, 0x09, 0xe9, 0xa0, 0xb2, 0x8d, 0x63, 0xc2, 0x96, 0xa9, 0xf0, 0xf1, 0xa4, 0x0b, + 0xaf, 0x02, 0xc0, 0x88, 0x71, 0x09, 0x75, 0xd7, 0x58, 0x94, 0x30, 0x32, 0x26, 0x2c, 0x91, 0x04, + 0x1d, 0x4c, 0x3a, 0x1e, 0xb5, 0xa1, 0x64, 0x41, 0x31, 0x74, 0x94, 0x36, 0x7c, 0xa0, 0x81, 0x4a, + 0x13, 0xa3, 0x16, 0x8e, 0x89, 0x5e, 0xad, 0x15, 0xe6, 0xc7, 0x17, 0xde, 0xb6, 0xd4, 0x8c, 0x77, + 0x3b, 0x0e, 0x7d, 0x4c, 0x37, 0x70, 0x87, 0x24, 0x0e, 0x12, 0xd6, 0xf6, 0xe6, 0x6e, 0xd7, 0x5c, + 0x1d, 0x45, 0x0f, 0x23, 0x65, 0xd9, 0x03, 0x9f, 0xb3, 0xdf, 0x35, 0xb5, 0xf7, 0x9d, 0x64, 0x8b, + 0xf5, 0xdf, 0x35, 0xf0, 0x26, 0xf3, 0xf0, 0x32, 0x5b, 0x9b, 0x28, 0x01, 0xe9, 0x23, 0xba, 0xb6, + 0xa1, 0x6b, 0x4c, 0xde, 0x8e, 0xe8, 0xa8, 0x29, 0x30, 0xff, 0xaf, 0x52, 0x60, 0xe1, 0xf0, 0x29, + 0x30, 0x89, 0xc2, 0xe2, 0xd0, 0x28, 0x2c, 0x1d, 0x14, 0x85, 0xf5, 0xef, 0x0a, 0x22, 0xe3, 0x24, + 0xef, 0x77, 0x88, 0x98, 0xb8, 0x9e, 0xc6, 0x44, 0x81, 0xef, 0x36, 0x95, 0x9a, 0x58, 0xeb, 0x46, + 0x0b, 0x07, 0xd4, 0x5d, 0x77, 0x71, 0xfc, 0x92, 0xc8, 0x50, 0xe4, 0x56, 0xc8, 0xca, 0x4d, 0xd5, + 0x4a, 0xf1, 0xb5, 0xd7, 0x4a, 0x5f, 0x74, 0x94, 0x5e, 0x21, 0x3a, 0xea, 0xcf, 0xf3, 0x60, 0x96, + 0xb9, 0xe3, 0x26, 0x5a, 0xc5, 0xde, 0x17, 0xc8, 0x3f, 0xa4, 0x4b, 0x4e, 0x2b, 0x2e, 0xa9, 0xda, + 0xf0, 0x7f, 0xca, 0x47, 0xa0, 0xfc, 0x07, 0x0d, 0x8c, 0x25, 0x39, 0x1c, 0x5a, 0x00, 0x08, 0x18, + 0x4f, 0xd3, 0x82, 0xe8, 0x49, 0x06, 0x8e, 0xd3, 0x51, 0x47, 0xb1, 0x80, 0xdf, 0x80, 0xb2, 0xe8, + 0xc9, 0x28, 0x38, 0xae, 0x44, 0x01, 0x8d, 0x31, 0xf2, 0xaf, 0xb6, 0x50, 0x44, 0x71, 0x6c, 0x5f, + 0x62, 0xbb, 0xd8, 0xed, 0x9a, 0x67, 0x5e, 0x44, 0x11, 0xaf, 0x1b, 0x05, 0x8e, 0x39, 0x57, 0x3c, + 0xd3, 0x91, 0x4f, 0xa8, 0x7f, 0xaf, 0x81, 0x29, 0xb6, 0x51, 0x46, 0x4d, 0xaa, 0x8a, 0x25, 0x30, + 0x16, 0xcb, 0x36, 0xdf, 0xee, 0xf8, 0x42, 0xdd, 0xca, 0xd2, 0x3a, 0x84, 0x4a, 0xbb, 0xf8, 0xa8, + 0x6b, 0x6a, 0x4e, 0x8a, 0x84, 0x8b, 0x19, 0x1a, 0xf3, 0xc3, 0x68, 0x64, 0x90, 0x5c, 0x86, 0xb8, + 0x5f, 0xf2, 0x00, 0xde, 0x60, 0x05, 0x36, 0x13, 0x5f, 0x4f, 0xa7, 0x9d, 0x81, 0x1d, 0x9d, 0xec, + 0x91, 0x32, 0x68, 0x6f, 0x5f, 0xd9, 0xed, 0x9a, 0x17, 0x5e, 0xc4, 0xca, 0x0b, 0xc0, 0xca, 0x2b, + 0xa8, 0xc2, 0xcd, 0xbf, 0xfe, 0xdf, 0x95, 0x87, 0x79, 0x30, 0xf9, 0x65, 0xe8, 0x75, 0x7c, 0x9c, + 0x12, 0xe7, 0x0f, 0x10, 0xa7, 0xf7, 0x88, 0xcb, 0xda, 0xda, 0x17, 0x76, 0xbb, 0xe6, 0xe2, 0x48, + 0xa4, 0x65, 0x81, 0x47, 0x97, 0xb0, 0x07, 0x79, 0x30, 0xb3, 0x12, 0x46, 0x9f, 0x2f, 0xf3, 0x43, + 0x99, 0x92, 0x17, 0xf1, 0x00, 0x6d, 0x33, 0x3d, 0xda, 0x18, 0xe2, 0x16, 0xa2, 0xb1, 0x7b, 0xcf, + 0x5e, 0xdc, 0xed, 0x9a, 0x8d, 0x91, 0x28, 0xeb, 0x81, 0x8e, 0x2e, 0x5d, 0xbf, 0xe6, 0xc1, 0xec, + 0x9d, 0x0e, 0x0a, 0xa8, 0xeb, 0x61, 0x41, 0x59, 0x4a, 0xd8, 0xce, 0x00, 0x61, 0x46, 0x8f, 0xb0, + 0x2c, 0x46, 0x52, 0xf7, 0xf1, 0x6e, 0xd7, 0xbc, 0x34, 0x12, 0x75, 0xc3, 0xe0, 0x47, 0x97, 0xc4, + 0x9f, 0xf3, 0x60, 0x72, 0x59, 0xd4, 0x4b, 0xc9, 0x1b, 0x90, 0x21, 0xe4, 0xa9, 0xb7, 0x0c, 0xd1, + 0xaa, 0x95, 0x45, 0x1c, 0x22, 0x54, 0xb3, 0xc0, 0xa3, 0x4b, 0xdb, 0xdf, 0x45, 0x30, 0xc1, 0xcf, + 0x63, 0x29, 0x6b, 0xef, 0x02, 0x59, 0xa9, 0x48, 0xce, 0x60, 0x52, 0xda, 0xc6, 0xd1, 0x9a, 0xb5, + 0x2c, 0x6b, 0x18, 0x61, 0x01, 0x2f, 0x82, 0x32, 0xe1, 0x05, 0xa4, 0xfc, 0x0e, 0x19, 0xfd, 0x87, + 0xad, 0x6c, 0xa9, 0xda, 0xcc, 0x39, 0xd2, 0x9e, 0x1d, 0x7d, 0x3d, 0x56, 0x37, 0x25, 0x05, 0x74, + 0xbd, 0x1f, 0x39, 0x58, 0x55, 0x31, 0xb4, 0xc0, 0xc0, 0xf3, 0xa0, 0xc4, 0x3f, 0x78, 0xf2, 0xfa, + 0x22, 0xf3, 0xd8, 0xc1, 0x2f, 0x4f, 0x33, 0xe7, 0x08, 0x73, 0xb8, 0x00, 0x8a, 0x51, 0x1c, 0xfa, + 0xb2, 0xf8, 0x38, 0xd9, 0xff, 0x4c, 0xf5, 0x6b, 0xdd, 0xcc, 0x39, 0xdc, 0x16, 0x9e, 0x63, 0x27, + 0x05, 0xf6, 0x99, 0x27, 0xfc, 0xe4, 0xc5, 0x32, 0x7d, 0x1f, 0x4c, 0x81, 0x24, 0xa6, 0xf0, 0x1c, + 0x28, 0x6f, 0xf3, 0x6c, 0x2e, 0xcf, 0xcc, 0x73, 0x2a, 0x28, 0x9b, 0xe7, 0xd9, 0x7b, 0x09, 0x5b, + 0x78, 0x1d, 0x1c, 0xa3, 0x61, 0xb4, 0x99, 0xe4, 0x4d, 0x79, 0x6a, 0xab, 0xa9, 0xd8, 0x61, 0x79, + 0xb5, 0x99, 0x73, 0x32, 0x38, 0x78, 0x1b, 0x4c, 0x6d, 0x65, 0xa2, 0x1b, 0x13, 0x7e, 0x09, 0xd4, + 0xc7, 0xf3, 0xf0, 0xa4, 0xd3, 0xcc, 0x39, 0x03, 0x68, 0xb8, 0x04, 0x26, 0x49, 0x46, 0xf2, 0xf2, + 0x56, 0x25, 0xf3, 0x5e, 0xd9, 0xa0, 0x68, 0xe6, 0x9c, 0x3e, 0x8c, 0x0d, 0x7a, 0x11, 0xc9, 0xcf, + 0xf6, 0x52, 0x79, 0xe2, 0xa0, 0x76, 0x21, 0x15, 0x93, 0x10, 0xde, 0xa9, 0x83, 0xc4, 0xc4, 0xcd, + 0x15, 0x2d, 0x7d, 0x90, 0x6a, 0x49, 0xa8, 0x70, 0xb6, 0x97, 0x22, 0xb9, 0x8a, 0x14, 0x84, 0xd4, + 0xcf, 0x62, 0xa2, 0x1f, 0x21, 0xbe, 0x13, 0xc3, 0x8b, 0x9e, 0x04, 0x25, 0xc5, 0x73, 0x19, 0x54, + 0x5c, 0x71, 0xd7, 0x33, 0x4c, 0x76, 0x83, 0x57, 0x41, 0x4c, 0x0e, 0x12, 0x00, 0x17, 0x7b, 0x22, + 0x12, 0xda, 0x3b, 0x3e, 0x28, 0xa2, 0x14, 0x94, 0x68, 0xe8, 0x6c, 0xaa, 0xa1, 0xb2, 0xc4, 0x0c, + 0x54, 0x0a, 0xe9, 0x8b, 0x49, 0x01, 0x5d, 0x03, 0x13, 0x09, 0xe5, 0x7c, 0x4a, 0x2a, 0xe8, 0xd4, + 0x41, 0x79, 0x2f, 0xc1, 0x67, 0x51, 0xb0, 0x09, 0xc6, 0x7c, 0x4c, 0x11, 0x3b, 0x8f, 0xe8, 0x15, + 0x9e, 0xc4, 0x4e, 0x67, 0x75, 0xd3, 0x73, 0x9b, 0x75, 0x4b, 0x1a, 0x5e, 0x0b, 0x68, 0xbc, 0x23, + 0x4b, 0xcf, 0x14, 0x3d, 0x77, 0x05, 0x4c, 0x64, 0x0c, 0xe0, 0x14, 0x28, 0x6c, 0xe2, 0xe4, 0x76, + 0x8c, 0x35, 0xd9, 0x01, 0x7d, 0x1b, 0x79, 0x1d, 0xcc, 0xbd, 0x57, 0x75, 0x44, 0xe7, 0x72, 0xfe, + 0xa2, 0x66, 0x57, 0x41, 0x25, 0x16, 0x4f, 0xb1, 0x5b, 0x8f, 0x9f, 0x1a, 0xb9, 0x27, 0x4f, 0x8d, + 0xdc, 0xf3, 0xa7, 0x86, 0xf6, 0xed, 0x9e, 0xa1, 0xfd, 0xb8, 0x67, 0x68, 0x8f, 0xf6, 0x0c, 0xed, + 0xf1, 0x9e, 0xa1, 0xfd, 0xb9, 0x67, 0x68, 0x7f, 0xed, 0x19, 0xb9, 0xe7, 0x7b, 0x86, 0x76, 0xff, + 0x99, 0x91, 0x7b, 0xfc, 0xcc, 0xc8, 0x3d, 0x79, 0x66, 0xe4, 0xbe, 0xb2, 0x0e, 0x97, 0x4f, 0x57, + 0xcb, 0x9c, 0xdd, 0xc5, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x3b, 0x20, 0xe2, 0xc2, 0x3a, 0x17, + 0x00, 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -1798,6 +1867,42 @@ func (this *QuantileSketchResponse) Equal(that interface{}) bool { } return true } +func (this *ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*ShardsResponse) + if !ok { + that2, ok := that.(ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} func (this *QueryResponse) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2023,6 +2128,30 @@ func (this *QueryResponse_QuantileSketches) Equal(that interface{}) bool { } return true } +func (this *QueryResponse_ShardsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_ShardsResponse) + if !ok { + that2, ok := that.(QueryResponse_ShardsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.ShardsResponse.Equal(that1.ShardsResponse) { + return false + } + return true +} func (this *QueryRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2205,6 +2334,30 @@ func (this *QueryRequest_Volume) Equal(that interface{}) bool { } return true } +func (this *QueryRequest_ShardsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_ShardsRequest) + if !ok { + that2, ok := that.(QueryRequest_ShardsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.ShardsRequest.Equal(that1.ShardsRequest) { + return false + } + return true +} func (this *LokiRequest) GoString() string { if this == nil { return "nil" @@ -2384,11 +2537,22 @@ func (this *QuantileSketchResponse) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.ShardsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *QueryResponse) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 13) + s := make([]string, 0, 14) s = append(s, "&queryrange.QueryResponse{") if this.Status != nil { s = append(s, "Status: "+fmt.Sprintf("%#v", this.Status)+",\n") @@ -2463,11 +2627,19 @@ func (this *QueryResponse_QuantileSketches) GoString() string { `QuantileSketches:` + fmt.Sprintf("%#v", this.QuantileSketches) + `}`}, ", ") return s } +func (this *QueryResponse_ShardsResponse) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_ShardsResponse{` + + `ShardsResponse:` + fmt.Sprintf("%#v", this.ShardsResponse) + `}`}, ", ") + return s +} func (this *QueryRequest) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 11) + s := make([]string, 0, 12) s = append(s, "&queryrange.QueryRequest{") if this.Request != nil { s = append(s, "Request: "+fmt.Sprintf("%#v", this.Request)+",\n") @@ -2536,6 +2708,14 @@ func (this *QueryRequest_Volume) GoString() string { `Volume:` + fmt.Sprintf("%#v", this.Volume) + `}`}, ", ") return s } +func (this *QueryRequest_ShardsRequest) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_ShardsRequest{` + + `ShardsRequest:` + fmt.Sprintf("%#v", this.ShardsRequest) + `}`}, ", ") + return s +} func valueToGoStringQueryrange(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -3327,6 +3507,55 @@ func (m *QuantileSketchResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) return len(dAtA) - i, nil } +func (m *ShardsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ShardsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Response != nil { + { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *QueryResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3531,6 +3760,26 @@ func (m *QueryResponse_QuantileSketches) MarshalToSizedBuffer(dAtA []byte) (int, } return len(dAtA) - i, nil } +func (m *QueryResponse_ShardsResponse) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.ShardsResponse != nil { + { + size, err := m.ShardsResponse.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + return len(dAtA) - i, nil +} func (m *QueryRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3551,6 +3800,15 @@ func (m *QueryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Request != nil { + { + size := m.Request.Size() + i -= size + if _, err := m.Request.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + } + } if len(m.Metadata) > 0 { for k := range m.Metadata { v := m.Metadata[k] @@ -3570,15 +3828,6 @@ func (m *QueryRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x3a } } - if m.Request != nil { - { - size := m.Request.Size() - i -= size - if _, err := m.Request.MarshalTo(dAtA[i:]); err != nil { - return 0, err - } - } - } return len(dAtA) - i, nil } @@ -3702,6 +3951,26 @@ func (m *QueryRequest_Volume) MarshalToSizedBuffer(dAtA []byte) (int, error) { } return len(dAtA) - i, nil } +func (m *QueryRequest_ShardsRequest) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_ShardsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.ShardsRequest != nil { + { + size, err := m.ShardsRequest.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x42 + } + return len(dAtA) - i, nil +} func encodeVarintQueryrange(dAtA []byte, offset int, v uint64) int { offset -= sovQueryrange(v) base := offset @@ -4043,6 +4312,25 @@ func (m *QuantileSketchResponse) Size() (n int) { return n } +func (m *ShardsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Response != nil { + l = m.Response.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + } + return n +} + func (m *QueryResponse) Size() (n int) { if m == nil { return 0 @@ -4155,6 +4443,18 @@ func (m *QueryResponse_QuantileSketches) Size() (n int) { } return n } +func (m *QueryResponse_ShardsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ShardsResponse != nil { + l = m.ShardsResponse.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func (m *QueryRequest) Size() (n int) { if m == nil { return 0 @@ -4247,6 +4547,18 @@ func (m *QueryRequest_Volume) Size() (n int) { } return n } +func (m *QueryRequest_ShardsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ShardsRequest != nil { + l = m.ShardsRequest.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func sovQueryrange(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 @@ -4430,6 +4742,17 @@ func (this *QuantileSketchResponse) String() string { }, "") return s } +func (this *ShardsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&ShardsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} func (this *QueryResponse) String() string { if this == nil { return "nil" @@ -4521,6 +4844,16 @@ func (this *QueryResponse_QuantileSketches) String() string { }, "") return s } +func (this *QueryResponse_ShardsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_ShardsResponse{`, + `ShardsResponse:` + strings.Replace(fmt.Sprintf("%v", this.ShardsResponse), "ShardsResponse", "ShardsResponse", 1) + `,`, + `}`, + }, "") + return s +} func (this *QueryRequest) String() string { if this == nil { return "nil" @@ -4602,6 +4935,16 @@ func (this *QueryRequest_Volume) String() string { }, "") return s } +func (this *QueryRequest_ShardsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_ShardsRequest{`, + `ShardsRequest:` + strings.Replace(fmt.Sprintf("%v", this.ShardsRequest), "ShardsRequest", "logproto.ShardsRequest", 1) + `,`, + `}`, + }, "") + return s +} func valueToStringQueryrange(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -6942,6 +7285,129 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *ShardsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ShardsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ShardsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Response == nil { + m.Response = &github_com_grafana_loki_pkg_logproto.ShardsResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *QueryResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -7287,6 +7753,41 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { } m.Response = &QueryResponse_QuantileSketches{v} iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ShardsResponse", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &ShardsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_ShardsResponse{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) @@ -7677,6 +8178,41 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { } m.Metadata[mapkey] = mapvalue iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ShardsRequest", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.ShardsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_ShardsRequest{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index f673464acfc0b..4d9fb84853757 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -5,6 +5,7 @@ package queryrange; import "github.com/gogo/googleapis/google/rpc/status.proto"; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; +import "pkg/logproto/indexgateway.proto"; import "pkg/logproto/logproto.proto"; import "pkg/logproto/sketch.proto"; import "pkg/logqlmodel/stats/stats.proto"; @@ -166,6 +167,14 @@ message QuantileSketchResponse { ]; } +message ShardsResponse { + indexgatewaypb.ShardsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.ShardsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + message QueryResponse { google.rpc.Status status = 1; oneof response { @@ -177,6 +186,7 @@ message QueryResponse { VolumeResponse volume = 7; TopKSketchesResponse topkSketches = 8; QuantileSketchResponse quantileSketches = 9; + ShardsResponse shardsResponse = 10; } } @@ -188,6 +198,7 @@ message QueryRequest { LokiInstantRequest instant = 4; LokiRequest streams = 5; logproto.VolumeRequest volume = 6; + indexgatewaypb.ShardsRequest shardsRequest = 8; } map metadata = 7 [(gogoproto.nullable) = false]; } diff --git a/pkg/querier/queryrange/querysharding.go b/pkg/querier/queryrange/querysharding.go index a6c32b1525862..8a11c546a7b64 100644 --- a/pkg/querier/queryrange/querysharding.go +++ b/pkg/querier/queryrange/querysharding.go @@ -189,13 +189,27 @@ func (ast *astMapperware) Do(ctx context.Context, r queryrangebase.Request) (que ast.maxShards, r, ast.statsHandler, + ast.next, ast.limits, ) if !ok { return ast.next.Do(ctx, r) } - mapper := logql.NewShardMapper(resolver, ast.metrics, ast.shardAggregation) + v := ast.limits.TSDBShardingStrategy(tenants[0]) + version, err := logql.ParseShardVersion(v) + if err != nil { + level.Warn(logger).Log( + "msg", "failed to parse shard version", + "fallback", version.String(), + "err", err.Error(), + "user", tenants[0], + "query", r.GetQuery(), + ) + } + strategy := version.Strategy(resolver, uint64(ast.limits.TSDBMaxBytesPerShard(tenants[0]))) + + mapper := logql.NewShardMapper(strategy, ast.metrics, ast.shardAggregation) noop, bytesPerShard, parsed, err := mapper.Parse(params.GetExpression()) if err != nil { @@ -232,9 +246,7 @@ func (ast *astMapperware) Do(ctx context.Context, r queryrangebase.Request) (que } // Merge index and volume stats result cache stats from shard resolver into the query stats. - res.Statistics.Caches.StatsResult.Merge(resolverStats.Caches().StatsResult) - res.Statistics.Caches.VolumeResult.Merge(resolverStats.Caches().VolumeResult) - + res.Statistics.Merge(resolverStats.Result(0, 0, 0)) value, err := marshal.NewResultValue(res.Data) if err != nil { return nil, err diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 5184ef62bb13c..28d71f8fa880a 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -397,6 +397,7 @@ const ( IndexStatsOp = "index_stats" VolumeOp = "volume" VolumeRangeOp = "volume_range" + IndexShardsOp = "index_shards" ) func getOperation(path string) string { @@ -415,6 +416,8 @@ func getOperation(path string) string { return VolumeOp case path == "/loki/api/v1/index/volume_range": return VolumeRangeOp + case path == "/loki/api/v1/index/shards": + return IndexShardsOp default: return "" } diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index 206822a50f6e8..f7ce311eee3f7 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -1380,6 +1380,9 @@ func (f fakeLimits) VolumeEnabled(_ string) bool { func (f fakeLimits) TSDBMaxBytesPerShard(_ string) int { return valid.DefaultTSDBMaxBytesPerShard } +func (f fakeLimits) TSDBShardingStrategy(string) string { + return logql.PowerOfTwoVersion.String() +} type ingesterQueryOpts struct { queryStoreOnly bool diff --git a/pkg/querier/queryrange/shard_resolver.go b/pkg/querier/queryrange/shard_resolver.go index 652637a724655..2808a4ae2eb21 100644 --- a/pkg/querier/queryrange/shard_resolver.go +++ b/pkg/querier/queryrange/shard_resolver.go @@ -3,14 +3,16 @@ package queryrange import ( "context" "fmt" - "math" + "net/http" strings "strings" "time" "github.com/dustin/go-humanize" + "github.com/efficientgo/core/errors" "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/grafana/dskit/concurrency" + "github.com/grafana/dskit/httpgrpc" "github.com/grafana/dskit/tenant" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" @@ -18,13 +20,13 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/logql/syntax" + logqlstats "github.com/grafana/loki/pkg/logqlmodel/stats" "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/index/stats" - utilMath "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/spanlogger" "github.com/grafana/loki/pkg/util/validation" - valid "github.com/grafana/loki/pkg/validation" ) func shardResolverForConf( @@ -35,14 +37,15 @@ func shardResolverForConf( maxParallelism int, maxShards int, r queryrangebase.Request, - handler queryrangebase.Handler, + statsHandler, next queryrangebase.Handler, limits Limits, ) (logql.ShardResolver, bool) { if conf.IndexType == config.TSDBType { return &dynamicShardResolver{ ctx: ctx, logger: logger, - handler: handler, + statsHandler: statsHandler, + next: next, limits: limits, from: model.Time(r.GetStart().UnixMilli()), through: model.Time(r.GetEnd().UnixMilli()), @@ -58,10 +61,13 @@ func shardResolverForConf( } type dynamicShardResolver struct { - ctx context.Context - handler queryrangebase.Handler - logger log.Logger - limits Limits + ctx context.Context + // TODO(owen-d): shouldn't have to fork handlers here -- one should just transparently handle the right logic + // depending on the underlying type? + statsHandler queryrangebase.Handler // index stats handler (hooked up to results cache, etc) + next queryrangebase.Handler // next handler in the chain (used for non-stats reqs) + logger log.Logger + limits Limits from, through model.Time maxParallelism int @@ -154,7 +160,7 @@ func (r *dynamicShardResolver) GetStats(e syntax.Expr) (stats.Stats, error) { grps = append(grps, syntax.MatcherRange{}) } - results, err := getStatsForMatchers(ctx, log, r.handler, r.from, r.through, grps, r.maxParallelism, r.defaultLookback) + results, err := getStatsForMatchers(ctx, log, r.statsHandler, r.from, r.through, grps, r.maxParallelism, r.defaultLookback) if err != nil { return stats.Stats{}, err } @@ -192,7 +198,7 @@ func (r *dynamicShardResolver) Shards(e syntax.Expr) (int, uint64, error) { } maxBytesPerShard := validation.SmallestPositiveIntPerTenant(tenantIDs, r.limits.TSDBMaxBytesPerShard) - factor := guessShardFactor(combined, maxBytesPerShard, r.maxShards) + factor := sharding.GuessShardFactor(combined.Bytes, uint64(maxBytesPerShard), r.maxShards) var bytesPerShard = combined.Bytes if factor > 0 { @@ -211,34 +217,84 @@ func (r *dynamicShardResolver) Shards(e syntax.Expr) (int, uint64, error) { return factor, bytesPerShard, nil } -// Since we shard by powers of two and we increase shard factor -// once each shard surpasses maxBytesPerShard, if the shard factor -// is at least two, the range of data per shard is (maxBytesPerShard/2, maxBytesPerShard] -// For instance, for a maxBytesPerShard of 500MB and a query touching 1000MB, we split into two shards of 500MB. -// If there are 1004MB, we split into four shards of 251MB. -func guessShardFactor(stats stats.Stats, maxBytesPerShard, maxShards int) int { - // If maxBytesPerShard is 0, we use the default value - // to avoid division by zero - if maxBytesPerShard < 1 { - maxBytesPerShard = valid.DefaultTSDBMaxBytesPerShard +func (r *dynamicShardResolver) ShardingRanges(expr syntax.Expr, targetBytesPerShard uint64) ([]logproto.Shard, error) { + sp, ctx := opentracing.StartSpanFromContext(r.ctx, "dynamicShardResolver.ShardingRanges") + defer sp.Finish() + log := spanlogger.FromContext(ctx) + defer log.Finish() + + adjustedFrom := r.from + + // NB(owen-d): there should only ever be 1 matcher group passed + // to this call as we call it separately for different legs + // of binary ops, but I'm putting in the loop for completion + grps, err := syntax.MatcherGroups(expr) + if err != nil { + return nil, err } - minShards := float64(stats.Bytes) / float64(maxBytesPerShard) + for _, grp := range grps { + diff := grp.Interval + grp.Offset - // round up to nearest power of 2 - power := math.Ceil(math.Log2(minShards)) + // For instant queries, when start == end, + // we have a default lookback which we add here + if grp.Interval == 0 { + diff = diff + r.defaultLookback + } - // Since x^0 == 1 and we only support factors of 2 - // reset this edge case manually - factor := int(math.Pow(2, power)) - if maxShards > 0 { - factor = utilMath.Min(factor, maxShards) + // use the oldest adjustedFrom + if r.from.Add(-diff).Before(adjustedFrom) { + adjustedFrom = r.from.Add(-diff) + } } - // shortcut: no need to run any sharding logic when factor=1 - // as it's the same as no sharding - if factor == 1 { - factor = 0 + exprStr := expr.String() + // try to get shards for the given expression + // if it fails, fallback to linearshards based on stats + resp, err := r.next.Do(ctx, &logproto.ShardsRequest{ + From: adjustedFrom, + Through: r.through, + Query: expr.String(), + TargetBytesPerShard: targetBytesPerShard, + }) + + if err != nil { + // check unimplemented to fallback + // TODO(owen-d): fix if this isn't right + if resp, ok := httpgrpc.HTTPResponseFromError(err); ok && (resp.Code == http.StatusNotFound) { + n, bytesPerShard, err := r.Shards(expr) + if err != nil { + return nil, errors.Wrap(err, "falling back to building linear shards from stats") + } + level.Debug(log).Log( + "msg", "falling back to building linear shards from stats", + "bytes_per_shard", bytesPerShard, + "shards", n, + "query", exprStr, + ) + return sharding.LinearShards(n, uint64(n)*bytesPerShard), nil + } + + return nil, errors.Wrapf(err, "failed to get shards for expression, got %T: %+v", err, err) + + } + + casted, ok := resp.(*ShardsResponse) + if !ok { + return nil, fmt.Errorf("expected *ShardsResponse while querying index, got %T", resp) } - return factor + + // accumulate stats + logqlstats.JoinResults(ctx, casted.Response.Statistics) + + level.Debug(log).Log( + "msg", "retrieved sharding ranges", + "target_bytes_per_shard", targetBytesPerShard, + "shards", len(casted.Response.Shards), + "query", exprStr, + "total_chunks", casted.Response.Statistics.Index.TotalChunks, + "post_filter_chunks:", casted.Response.Statistics.Index.PostFilterChunks, + ) + + return casted.Response.Shards, err } diff --git a/pkg/querier/queryrange/split_by_interval.go b/pkg/querier/queryrange/split_by_interval.go index ef05aa969ec1e..2f693b438dbd0 100644 --- a/pkg/querier/queryrange/split_by_interval.go +++ b/pkg/querier/queryrange/split_by_interval.go @@ -223,7 +223,7 @@ func (h *splitByInterval) Do(ctx context.Context, r queryrangebase.Request) (que intervals[i], intervals[j] = intervals[j], intervals[i] } } - case *LokiSeriesRequest, *LabelRequest, *logproto.IndexStatsRequest, *logproto.VolumeRequest: + case *LokiSeriesRequest, *LabelRequest, *logproto.IndexStatsRequest, *logproto.VolumeRequest, *logproto.ShardsRequest: // Set this to 0 since this is not used in Series/Labels/Index Request. limit = 0 default: diff --git a/pkg/querier/queryrange/splitters.go b/pkg/querier/queryrange/splitters.go index eddcc10edf491..e9f2de5b41034 100644 --- a/pkg/querier/queryrange/splitters.go +++ b/pkg/querier/queryrange/splitters.go @@ -75,6 +75,15 @@ func (s *defaultSplitter) split(execTime time.Time, tenantIDs []string, req quer Matchers: r.GetMatchers(), }) } + case *logproto.ShardsRequest: + factory = func(start, end time.Time) { + reqs = append(reqs, &logproto.ShardsRequest{ + From: model.TimeFromUnix(start.Unix()), + Through: model.TimeFromUnix(end.Unix()), + Query: r.Query, + TargetBytesPerShard: r.TargetBytesPerShard, + }) + } case *logproto.VolumeRequest: factory = func(start, end time.Time) { reqs = append(reqs, &logproto.VolumeRequest{ diff --git a/pkg/querier/queryrange/stats.go b/pkg/querier/queryrange/stats.go index 71f93959c3b69..4cc9de2f3d2c9 100644 --- a/pkg/querier/queryrange/stats.go +++ b/pkg/querier/queryrange/stats.go @@ -35,6 +35,7 @@ const ( queryTypeLabel = "label" queryTypeStats = "stats" queryTypeVolume = "volume" + queryTypeShards = "shards" ) var ( @@ -160,6 +161,9 @@ func StatsCollectorMiddleware() queryrangebase.Middleware { responseStats = &stats.Result{} // TODO: support stats in proto totalEntries = 1 queryType = queryTypeStats + case *ShardsResponse: + responseStats = &r.Response.Statistics + queryType = queryTypeShards default: level.Warn(logger).Log("msg", fmt.Sprintf("cannot compute stats, unexpected type: %T", resp)) } diff --git a/pkg/storage/bloom/v1/block.go b/pkg/storage/bloom/v1/block.go index c9eef5fa33027..b1e534df731bd 100644 --- a/pkg/storage/bloom/v1/block.go +++ b/pkg/storage/bloom/v1/block.go @@ -14,6 +14,7 @@ type BlockMetadata struct { } type Block struct { + metrics *Metrics // covers series pages index BlockIndex // covers bloom pages @@ -26,9 +27,10 @@ type Block struct { initialized bool } -func NewBlock(reader BlockReader) *Block { +func NewBlock(reader BlockReader, metrics *Metrics) *Block { return &Block{ - reader: reader, + reader: reader, + metrics: metrics, } } @@ -87,19 +89,6 @@ func combineChecksums(index, blooms uint32) uint32 { return index ^ blooms } -// convenience method -func (b *Block) Querier() *BlockQuerier { - return NewBlockQuerier(b) -} - -func (b *Block) Series() *LazySeriesIter { - return NewLazySeriesIter(b) -} - -func (b *Block) Blooms() *LazyBloomIter { - return NewLazyBloomIter(b) -} - func (b *Block) Metadata() (BlockMetadata, error) { if err := b.LoadHeaders(); err != nil { return BlockMetadata{}, err @@ -123,11 +112,16 @@ type BlockQuerier struct { cur *SeriesWithBloom } -func NewBlockQuerier(b *Block) *BlockQuerier { +// NewBlockQuerier returns a new BlockQuerier for the given block. +// WARNING: If noCapture is true, the underlying byte slice of the bloom page +// will be returned to the pool for efficiency. This can only safely be used +// when the underlying bloom bytes don't escape the decoder, i.e. +// when loading blooms for querying (bloom-gw) but not for writing (bloom-compactor). +func NewBlockQuerier(b *Block, noCapture bool) *BlockQuerier { return &BlockQuerier{ block: b, series: NewLazySeriesIter(b), - blooms: NewLazyBloomIter(b), + blooms: NewLazyBloomIter(b, noCapture), } } diff --git a/pkg/storage/bloom/v1/bloom.go b/pkg/storage/bloom/v1/bloom.go index 661c60970ab8e..d740c170fcc92 100644 --- a/pkg/storage/bloom/v1/bloom.go +++ b/pkg/storage/bloom/v1/bloom.go @@ -17,7 +17,7 @@ import ( // gateways to OOM. // Figure out a decent maximum page size that we can process. // TODO(chaudum): Make max page size configurable -var maxPageSize = 32 << 20 // 32MB +var maxPageSize = 64 << 20 // 64MB var ErrPageTooLarge = errors.Errorf("bloom page too large: size limit is %.1fMiB", float64(maxPageSize)/float64(1<<20)) type Bloom struct { @@ -86,7 +86,7 @@ func LazyDecodeBloomPage(r io.Reader, pool chunkenc.ReaderPool, page BloomPageHe } defer pool.PutReader(decompressor) - b := make([]byte, page.DecompressedLen) + b := BlockPool.Get(page.DecompressedLen)[:page.DecompressedLen] if _, err = io.ReadFull(decompressor, b); err != nil { return nil, errors.Wrap(err, "decompressing bloom page") @@ -97,11 +97,13 @@ func LazyDecodeBloomPage(r io.Reader, pool chunkenc.ReaderPool, page BloomPageHe return decoder, nil } +// shortcut to skip allocations when we know the page is not compressed func LazyDecodeBloomPageNoCompression(r io.Reader, page BloomPageHeader) (*BloomPageDecoder, error) { + // data + checksum if page.Len != page.DecompressedLen+4 { return nil, errors.New("the Len and DecompressedLen of the page do not match") } - data := make([]byte, page.Len) + data := BlockPool.Get(page.Len)[:page.Len] _, err := io.ReadFull(r, data) if err != nil { @@ -152,6 +154,21 @@ type BloomPageDecoder struct { err error } +// Relinquish returns the underlying byte slice to the pool +// for efficiency. It's intended to be used as a +// perf optimization. +// This can only safely be used when the underlying bloom +// bytes don't escape the decoder: +// on reads in the bloom-gw but not in the bloom-compactor +func (d *BloomPageDecoder) Relinquish() { + data := d.data + d.data = nil + + if cap(data) > 0 { + BlockPool.Put(data) + } +} + func (d *BloomPageDecoder) Reset() { d.err = nil d.cur = nil @@ -259,8 +276,10 @@ func (b *BloomBlock) DecodeHeaders(r io.ReadSeeker) (uint32, error) { return checksum, nil } -func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int) (*BloomPageDecoder, error) { +func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, metrics *Metrics) (res *BloomPageDecoder, err error) { if pageIdx < 0 || pageIdx >= len(b.pageHeaders) { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Add(float64(b.pageHeaders[pageIdx].DecompressedLen)) return nil, fmt.Errorf("invalid page (%d) for bloom page decoding", pageIdx) } @@ -268,16 +287,30 @@ func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int) (*BloomPageD // fmt.Printf("pageIdx=%d page=%+v size=%.2fMiB\n", pageIdx, page, float64(page.Len)/float64(1<<20)) if page.Len > maxPageSize { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonTooLarge).Add(float64(page.DecompressedLen)) return nil, ErrPageTooLarge } if _, err := r.Seek(int64(page.Offset), io.SeekStart); err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Add(float64(page.DecompressedLen)) return nil, errors.Wrap(err, "seeking to bloom page") } if b.schema.encoding == chunkenc.EncNone { - return LazyDecodeBloomPageNoCompression(r, page) + res, err = LazyDecodeBloomPageNoCompression(r, page) + } else { + res, err = LazyDecodeBloomPage(r, b.schema.DecompressorPool(), page) + } + + if err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Add(float64(page.DecompressedLen)) + return nil, errors.Wrap(err, "decoding bloom page") } - return LazyDecodeBloomPage(r, b.schema.DecompressorPool(), page) + metrics.pagesRead.WithLabelValues(pageTypeBloom).Inc() + metrics.bytesRead.WithLabelValues(pageTypeBloom).Add(float64(page.DecompressedLen)) + return res, nil } diff --git a/pkg/storage/bloom/v1/bloom_querier.go b/pkg/storage/bloom/v1/bloom_querier.go index d0dbdc1b3b550..535e2a379250b 100644 --- a/pkg/storage/bloom/v1/bloom_querier.go +++ b/pkg/storage/bloom/v1/bloom_querier.go @@ -7,6 +7,8 @@ type BloomQuerier interface { } type LazyBloomIter struct { + usePool bool + b *Block // state @@ -16,9 +18,15 @@ type LazyBloomIter struct { curPage *BloomPageDecoder } -func NewLazyBloomIter(b *Block) *LazyBloomIter { +// NewLazyBloomIter returns a new lazy bloom iterator. +// If pool is true, the underlying byte slice of the bloom page +// will be returned to the pool for efficiency. +// This can only safely be used when the underlying bloom +// bytes don't escape the decoder. +func NewLazyBloomIter(b *Block, pool bool) *LazyBloomIter { return &LazyBloomIter{ - b: b, + usePool: pool, + b: b, } } @@ -39,12 +47,18 @@ func (it *LazyBloomIter) Seek(offset BloomOffset) { // load the desired page if it.curPageIndex != offset.Page || it.curPage == nil { + // drop the current page if it exists and + // we're using the pool + if it.curPage != nil && it.usePool { + it.curPage.Relinquish() + } + r, err := it.b.reader.Blooms() if err != nil { it.err = errors.Wrap(err, "getting blooms reader") return } - decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page) + decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page, it.b.metrics) if err != nil { it.err = errors.Wrap(err, "loading bloom page") return @@ -83,6 +97,7 @@ func (it *LazyBloomIter) next() bool { it.curPage, err = it.b.blooms.BloomPageDecoder( r, it.curPageIndex, + it.b.metrics, ) if err != nil { it.err = err @@ -96,8 +111,14 @@ func (it *LazyBloomIter) next() bool { if it.curPage.Err() != nil { return false } + // we've exhausted the current page, progress to next it.curPageIndex++ + // drop the current page if it exists and + // we're using the pool + if it.usePool { + it.curPage.Relinquish() + } it.curPage = nil continue } diff --git a/pkg/storage/bloom/v1/bounds.go b/pkg/storage/bloom/v1/bounds.go index fd538ed161c09..542ba2c972790 100644 --- a/pkg/storage/bloom/v1/bounds.go +++ b/pkg/storage/bloom/v1/bounds.go @@ -3,12 +3,15 @@ package v1 import ( "fmt" "hash" + "math" "strings" + "unsafe" "github.com/pkg/errors" "github.com/prometheus/common/model" "golang.org/x/exp/slices" + "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/util/encoding" ) @@ -20,6 +23,30 @@ const ( After ) +var ( + // FullBounds is the bounds that covers the entire fingerprint space + FullBounds = NewBounds(0, model.Fingerprint(math.MaxUint64)) +) + +type FingerprintBounds struct { + Min, Max model.Fingerprint +} + +// Proto compat +// compiler check ensuring equal repr of underlying types +var _ FingerprintBounds = FingerprintBounds(logproto.FPBounds{}) + +func BoundsFromProto(pb logproto.FPBounds) FingerprintBounds { + return FingerprintBounds(pb) +} + +// Unsafe cast to avoid allocation. This _requires_ that the underlying types are the same +// which is checked by the compiler above +func MultiBoundsFromProto(pb []logproto.FPBounds) MultiFingerprintBounds { + //nolint:unconvert + return MultiFingerprintBounds(*(*MultiFingerprintBounds)(unsafe.Pointer(&pb))) +} + // ParseBoundsFromAddr parses a fingerprint bounds from a string func ParseBoundsFromAddr(s string) (FingerprintBounds, error) { parts := strings.Split(s, "-") @@ -40,10 +67,6 @@ func ParseBoundsFromParts(a, b string) (FingerprintBounds, error) { return NewBounds(minFingerprint, maxFingerprint), nil } -type FingerprintBounds struct { - Min, Max model.Fingerprint -} - func NewBounds(min, max model.Fingerprint) FingerprintBounds { return FingerprintBounds{Min: min, Max: max} } @@ -91,8 +114,16 @@ func (b FingerprintBounds) Match(fp model.Fingerprint) bool { return b.Cmp(fp) == Overlap } -// GetFromThrough implements TSDBs FingerprintFilter interface +// GetFromThrough implements TSDBs FingerprintFilter interface, +// NB(owen-d): adjusts to return `[from,through)` instead of `[from,through]` which the +// fingerprint bounds struct tracks. func (b FingerprintBounds) GetFromThrough() (model.Fingerprint, model.Fingerprint) { + from, through := b.Bounds() + return from, max(through+1, through) +} + +// Bounds returns the inclusive bounds [from,through] +func (b FingerprintBounds) Bounds() (model.Fingerprint, model.Fingerprint) { return b.Min, b.Max } diff --git a/pkg/storage/bloom/v1/bounds_test.go b/pkg/storage/bloom/v1/bounds_test.go index 4dd01e60c1238..98fec7b0aafa0 100644 --- a/pkg/storage/bloom/v1/bounds_test.go +++ b/pkg/storage/bloom/v1/bounds_test.go @@ -5,8 +5,29 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" + + "github.com/grafana/loki/pkg/logproto" ) +func TestBoundsFromProto(t *testing.T) { + bounds := BoundsFromProto(logproto.FPBounds{ + Min: 10, + Max: 2000, + }) + assert.Equal(t, NewBounds(10, 2000), bounds) +} + +func TestMultiBoundsFromProto(t *testing.T) { + bounds := MultiBoundsFromProto([]logproto.FPBounds{ + {Min: 10, Max: 2000}, + {Min: 2001, Max: 4000}, + }) + assert.Equal(t, MultiFingerprintBounds{ + NewBounds(10, 2000), + NewBounds(2001, 4000), + }, bounds) +} + func Test_ParseFingerprint(t *testing.T) { t.Parallel() fp, err := model.ParseFingerprint("7d0") diff --git a/pkg/storage/bloom/v1/builder_test.go b/pkg/storage/bloom/v1/builder_test.go index 8324475662c58..540d0a768ca7e 100644 --- a/pkg/storage/bloom/v1/builder_test.go +++ b/pkg/storage/bloom/v1/builder_test.go @@ -116,8 +116,8 @@ func TestBlockBuilder_RoundTrip(t *testing.T) { processedData = data[:lastProcessedIdx] } - block := NewBlock(tc.reader) - querier := NewBlockQuerier(block) + block := NewBlock(tc.reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false) err = block.LoadHeaders() require.Nil(t, err) @@ -218,7 +218,7 @@ func TestMergeBuilder(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](data[min:max]) _, err = builder.BuildFrom(itr) require.Nil(t, err) - blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader)))) + blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false))) } // We're not testing the ability to extend a bloom in this test @@ -251,8 +251,8 @@ func TestMergeBuilder(t *testing.T) { _, _, err = mergeBuilder.Build(builder) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false) EqualIterators[*SeriesWithBloom]( t, @@ -295,8 +295,8 @@ func TestBlockReset(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false) rounds := make([][]model.Fingerprint, 2) @@ -361,8 +361,8 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](xs[minIdx:maxIdx]) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, false) // rather than use the block querier directly, collect it's data // so we can use it in a few places later @@ -423,7 +423,7 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { // ensure the new block contains one copy of all the data // by comparing it against an iterator over the source data - mergedBlockQuerier := NewBlockQuerier(NewBlock(reader)) + mergedBlockQuerier := NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false) sourceItr := NewSliceIter[*SeriesWithBloom](PointerSlice[SeriesWithBloom](xs)) EqualIterators[*SeriesWithBloom]( diff --git a/pkg/storage/bloom/v1/fuse_test.go b/pkg/storage/bloom/v1/fuse_test.go index b5a5c22984559..7ca7267b6ecc7 100644 --- a/pkg/storage/bloom/v1/fuse_test.go +++ b/pkg/storage/bloom/v1/fuse_test.go @@ -48,8 +48,8 @@ func TestFusedQuerier(t *testing.T) { _, err = builder.BuildFrom(itr) require.NoError(t, err) require.False(t, itr.Next()) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, true) n := 2 nReqs := numSeries / n @@ -142,8 +142,8 @@ func setupBlockForBenchmark(b *testing.B) (*BlockQuerier, [][]Request, []chan Ou itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(b, err) - block := NewBlock(reader) - querier := NewBlockQuerier(block) + block := NewBlock(reader, NewMetrics(nil)) + querier := NewBlockQuerier(block, true) numRequestChains := 100 seriesPerRequest := 100 diff --git a/pkg/storage/bloom/v1/index.go b/pkg/storage/bloom/v1/index.go index 838ba891cb2f6..c69b4eb292be3 100644 --- a/pkg/storage/bloom/v1/index.go +++ b/pkg/storage/bloom/v1/index.go @@ -151,7 +151,16 @@ func (b *BlockIndex) DecodeHeaders(r io.ReadSeeker) (uint32, error) { } // decompress page and return an iterator over the bytes -func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHeaderWithOffset) (*SeriesPageDecoder, error) { +func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHeaderWithOffset, metrics *Metrics) (res *SeriesPageDecoder, err error) { + defer func() { + if err != nil { + metrics.pagesSkipped.WithLabelValues(pageTypeSeries, skipReasonErr).Inc() + metrics.bytesSkipped.WithLabelValues(pageTypeSeries).Add(float64(header.DecompressedLen)) + } else { + metrics.pagesRead.WithLabelValues(pageTypeSeries).Inc() + metrics.bytesRead.WithLabelValues(pageTypeSeries).Add(float64(header.DecompressedLen)) + } + }() if _, err := r.Seek(int64(header.Offset), io.SeekStart); err != nil { return nil, errors.Wrap(err, "seeking to series page") @@ -159,7 +168,7 @@ func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHead data := BlockPool.Get(header.Len)[:header.Len] defer BlockPool.Put(data) - _, err := io.ReadFull(r, data) + _, err = io.ReadFull(r, data) if err != nil { return nil, errors.Wrap(err, "reading series page") } @@ -180,7 +189,7 @@ func (b *BlockIndex) NewSeriesPageDecoder(r io.ReadSeeker, header SeriesPageHead return nil, errors.Wrap(err, "decompressing series page") } - res := &SeriesPageDecoder{ + res = &SeriesPageDecoder{ data: decompressed, header: header.SeriesHeader, } @@ -221,8 +230,8 @@ func aggregateHeaders(xs []SeriesHeader) SeriesHeader { return SeriesHeader{} } - fromFp, _ := xs[0].Bounds.GetFromThrough() - _, throughFP := xs[len(xs)-1].Bounds.GetFromThrough() + fromFp, _ := xs[0].Bounds.Bounds() + _, throughFP := xs[len(xs)-1].Bounds.Bounds() res := SeriesHeader{ Bounds: NewBounds(fromFp, throughFP), } diff --git a/pkg/storage/bloom/v1/index_querier.go b/pkg/storage/bloom/v1/index_querier.go index 142b6423185b6..8ba984d3df31c 100644 --- a/pkg/storage/bloom/v1/index_querier.go +++ b/pkg/storage/bloom/v1/index_querier.go @@ -74,6 +74,7 @@ func (it *LazySeriesIter) Seek(fp model.Fingerprint) error { it.curPage, err = it.b.index.NewSeriesPageDecoder( r, page, + it.b.metrics, ) if err != nil { return err @@ -107,6 +108,7 @@ func (it *LazySeriesIter) next() bool { it.curPage, err = it.b.index.NewSeriesPageDecoder( r, curHeader, + it.b.metrics, ) if err != nil { it.err = err diff --git a/pkg/storage/bloom/v1/metrics.go b/pkg/storage/bloom/v1/metrics.go index 138580a435054..c2204bf275505 100644 --- a/pkg/storage/bloom/v1/metrics.go +++ b/pkg/storage/bloom/v1/metrics.go @@ -3,6 +3,8 @@ package v1 import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/pkg/util/constants" ) type Metrics struct { @@ -15,6 +17,11 @@ type Metrics struct { blockSeriesIterated prometheus.Counter tokensTotal prometheus.Counter insertsTotal *prometheus.CounterVec + + pagesRead *prometheus.CounterVec + pagesSkipped *prometheus.CounterVec + bytesRead *prometheus.CounterVec + bytesSkipped *prometheus.CounterVec } const ( @@ -26,49 +33,86 @@ const ( collisionTypeFalse = "false" collisionTypeTrue = "true" collisionTypeCache = "cache" + + pageTypeBloom = "bloom" + pageTypeSeries = "series" + + skipReasonTooLarge = "too_large" + skipReasonErr = "err" + skipReasonOOB = "out_of_bounds" ) func NewMetrics(r prometheus.Registerer) *Metrics { return &Metrics{ sbfCreationTime: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_creation_time_total", - Help: "Time spent creating scalable bloom filters", + Namespace: constants.Loki, + Name: "bloom_creation_time_total", + Help: "Time spent creating scalable bloom filters", }), bloomSize: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_size", - Help: "Size of the bloom filter in bytes", - Buckets: prometheus.ExponentialBucketsRange(128, 16777216, 8), + Namespace: constants.Loki, + Name: "bloom_size", + Help: "Size of the bloom filter in bytes", + Buckets: prometheus.ExponentialBucketsRange(1<<10, 512<<20, 10), }), hammingWeightRatio: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_hamming_weight_ratio", - Help: "Ratio of the hamming weight of the bloom filter to the number of bits in the bloom filter", - Buckets: prometheus.ExponentialBucketsRange(0.001, 1, 12), + Namespace: constants.Loki, + Name: "bloom_hamming_weight_ratio", + Help: "Ratio of the hamming weight of the bloom filter to the number of bits in the bloom filter", + Buckets: prometheus.ExponentialBucketsRange(0.001, 1, 12), }), estimatedCount: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_estimated_count", - Help: "Estimated number of elements in the bloom filter", - Buckets: prometheus.ExponentialBucketsRange(1, 33554432, 10), + Namespace: constants.Loki, + Name: "bloom_estimated_count", + Help: "Estimated number of elements in the bloom filter", + Buckets: prometheus.ExponentialBucketsRange(1, 33554432, 10), }), chunksIndexed: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Name: "bloom_chunks_indexed_total", - Help: "Number of chunks indexed in bloom filters, partitioned by type. Type can be iterated or copied, where iterated indicates the chunk data was fetched and ngrams for it's contents generated whereas copied indicates the chunk already existed in another source block and was copied to the new block", + Namespace: constants.Loki, + Name: "bloom_chunks_indexed_total", + Help: "Number of chunks indexed in bloom filters, partitioned by type. Type can be iterated or copied, where iterated indicates the chunk data was fetched and ngrams for it's contents generated whereas copied indicates the chunk already existed in another source block and was copied to the new block", }, []string{"type"}), chunksPerSeries: promauto.With(r).NewHistogram(prometheus.HistogramOpts{ - Name: "bloom_chunks_per_series", - Help: "Number of chunks per series. Can be copied from an existing bloom or iterated", - Buckets: prometheus.ExponentialBucketsRange(1, 4096, 10), + Namespace: constants.Loki, + Name: "bloom_chunks_per_series", + Help: "Number of chunks per series. Can be copied from an existing bloom or iterated", + Buckets: prometheus.ExponentialBucketsRange(1, 4096, 10), }), blockSeriesIterated: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_block_series_iterated_total", - Help: "Number of series iterated in existing blocks while generating new blocks", + Namespace: constants.Loki, + Name: "bloom_block_series_iterated_total", + Help: "Number of series iterated in existing blocks while generating new blocks", }), tokensTotal: promauto.With(r).NewCounter(prometheus.CounterOpts{ - Name: "bloom_tokens_total", - Help: "Number of tokens processed", + Namespace: constants.Loki, + Name: "bloom_tokens_total", + Help: "Number of tokens processed", }), insertsTotal: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ - Name: "bloom_inserts_total", - Help: "Number of inserts into the bloom filter. collision type may be `false` (no collision), `cache` (found in token cache) or true (found in bloom filter). token_type may be either `raw` (the original ngram) or `chunk_prefixed` (the ngram with the chunk prefix)", + Namespace: constants.Loki, + Name: "bloom_inserts_total", + Help: "Number of inserts into the bloom filter. collision type may be `false` (no collision), `cache` (found in token cache) or true (found in bloom filter). token_type may be either `raw` (the original ngram) or `chunk_prefixed` (the ngram with the chunk prefix)", }, []string{"token_type", "collision"}), + + pagesRead: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_pages_read_total", + Help: "Number of bloom pages read", + }, []string{"type"}), + pagesSkipped: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_pages_skipped_total", + Help: "Number of bloom pages skipped during query iteration", + }, []string{"type", "reason"}), + bytesRead: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_bytes_read_total", + Help: "Number of bytes read from bloom pages", + }, []string{"type"}), + bytesSkipped: promauto.With(r).NewCounterVec(prometheus.CounterOpts{ + Namespace: constants.Loki, + Name: "bloom_bytes_skipped_total", + Help: "Number of bytes skipped during query iteration", + }, []string{"type", "reason"}), } } diff --git a/pkg/storage/bloom/v1/test_util.go b/pkg/storage/bloom/v1/test_util.go index 3cd021de5f5ee..ed1dd5cc978c1 100644 --- a/pkg/storage/bloom/v1/test_util.go +++ b/pkg/storage/bloom/v1/test_util.go @@ -41,7 +41,7 @@ func MakeBlock(t testing.TB, nth int, fromFp, throughFp model.Fingerprint, fromT itr := NewSliceIter[SeriesWithBloom](data) _, err = builder.BuildFrom(itr) require.Nil(t, err) - block := NewBlock(reader) + block := NewBlock(reader, NewMetrics(nil)) return block, data, keys } diff --git a/pkg/storage/bloom/v1/util.go b/pkg/storage/bloom/v1/util.go index e6ad69a248fed..6b83e298a3957 100644 --- a/pkg/storage/bloom/v1/util.go +++ b/pkg/storage/bloom/v1/util.go @@ -32,10 +32,10 @@ var ( }, } - // 4KB -> 64MB + // 4KB -> 128MB BlockPool = BytePool{ pool: pool.New( - 4<<10, 64<<20, 4, + 4<<10, 128<<20, 4, func(size int) interface{} { return make([]byte, size) }), diff --git a/pkg/storage/chunk/predicate.go b/pkg/storage/chunk/predicate.go index 62a91c7a46437..e200f28fb55e8 100644 --- a/pkg/storage/chunk/predicate.go +++ b/pkg/storage/chunk/predicate.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/loki/pkg/querier/plan" ) +// TODO(owen-d): rename. This is not a predicate and is confusing. type Predicate struct { Matchers []*labels.Matcher plan *plan.QueryPlan diff --git a/pkg/storage/store.go b/pkg/storage/store.go index 706f630931cd2..b582f7e6c7156 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -364,9 +364,11 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, model.Time, model.Time return matchers, from, through, nil } +// TODO(owen-d): refactor this. Injecting shard labels via matchers is a big hack and we shouldn't continue +// doing it, _but_ it requires adding `fingerprintfilter` support to much of our storage interfaces func injectShardLabel(shards []string, matchers []*labels.Matcher) ([]*labels.Matcher, error) { if shards != nil { - parsed, err := logql.ParseShards(shards) + parsed, _, err := logql.ParseShards(shards) if err != nil { return nil, err } diff --git a/pkg/storage/stores/composite_store.go b/pkg/storage/stores/composite_store.go index d0fb516dcbff4..7df10aa076480 100644 --- a/pkg/storage/stores/composite_store.go +++ b/pkg/storage/stores/composite_store.go @@ -13,6 +13,8 @@ import ( "github.com/grafana/loki/pkg/storage/stores/index" "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" "github.com/grafana/loki/pkg/storage/stores/index/stats" + tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util" ) @@ -206,6 +208,83 @@ func (c CompositeStore) Volume(ctx context.Context, userID string, from, through return res, err } +func (c CompositeStore) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + // TODO(owen-d): improve. Since shards aren't easily merge-able, + // we choose the store which returned the highest shard count. + // This is only used when a query crosses a schema boundary + var groups []*logproto.ShardsResponse + err := c.forStores(ctx, from, through, func(innerCtx context.Context, from, through model.Time, store Store) error { + shards, err := store.GetShards(innerCtx, userID, from, through, targetBytesPerShard, predicate) + if err != nil { + return err + } + groups = append(groups, shards) + return nil + }) + + if err != nil { + return nil, err + } + + switch { + case len(groups) == 1: + return groups[0], nil + case len(groups) == 0: + return nil, nil + default: + sort.Slice(groups, func(i, j int) bool { + return len(groups[i].Shards) > len(groups[j].Shards) + }) + return groups[0], nil + } +} + +func (c CompositeStore) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + var impls []sharding.ForSeries + _ = c.forStores(context.Background(), from, through, func(_ context.Context, from, through model.Time, store Store) error { + impl, ok := store.HasForSeries(from, through) + if ok { + impls = append(impls, impl) + } + return nil + }) + + if len(impls) == 0 { + return nil, false + } + + wrapped := sharding.ForSeriesFunc( + func( + ctx context.Context, + userID string, + fpFilter tsdb_index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []tsdb_index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error { + for _, impl := range impls { + if err := impl.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...); err != nil { + return err + } + } + return nil + }, + ) + + return wrapped, true +} + func (c CompositeStore) GetChunkFetcher(tm model.Time) *fetcher.Fetcher { // find the schema with the lowest start _after_ tm j := sort.Search(len(c.stores), func(j int) bool { diff --git a/pkg/storage/stores/composite_store_entry.go b/pkg/storage/stores/composite_store_entry.go index 7edbdab404fe6..200b701449a5d 100644 --- a/pkg/storage/stores/composite_store_entry.go +++ b/pkg/storage/stores/composite_store_entry.go @@ -19,6 +19,7 @@ import ( "github.com/grafana/loki/pkg/storage/errors" "github.com/grafana/loki/pkg/storage/stores/index" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" util_log "github.com/grafana/loki/pkg/util/log" "github.com/grafana/loki/pkg/util/spanlogger" "github.com/grafana/loki/pkg/util/validation" @@ -143,6 +144,25 @@ func (c *storeEntry) Volume(ctx context.Context, userID string, from, through mo return c.indexReader.Volume(ctx, userID, from, through, limit, targetLabels, aggregateBy, matchers...) } +func (c *storeEntry) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + _, err := c.validateQueryTimeRange(ctx, userID, &from, &through) + if err != nil { + return nil, err + } + + return c.indexReader.GetShards(ctx, userID, from, through, targetBytesPerShard, predicate) +} + +func (c *storeEntry) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + return c.indexReader.HasForSeries(from, through) +} + func (c *storeEntry) validateQueryTimeRange(ctx context.Context, userID string, from *model.Time, through *model.Time) (bool, error) { //nolint:ineffassign,staticcheck //Leaving ctx even though we don't currently use it, we want to make it available for when we might need it and hopefully will ensure us using the correct context at that time diff --git a/pkg/storage/stores/composite_store_test.go b/pkg/storage/stores/composite_store_test.go index b2878487f4606..d3fc6ba4dacfc 100644 --- a/pkg/storage/stores/composite_store_test.go +++ b/pkg/storage/stores/composite_store_test.go @@ -18,6 +18,7 @@ import ( "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/chunk/fetcher" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type mockStore int @@ -60,6 +61,14 @@ func (m mockStore) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, return nil, nil } +func (m mockStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (m mockStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (m mockStore) Stop() {} func TestCompositeStore(t *testing.T) { diff --git a/pkg/storage/stores/index/index.go b/pkg/storage/stores/index/index.go index 41746e346f20b..db3ccfd2578e6 100644 --- a/pkg/storage/stores/index/index.go +++ b/pkg/storage/stores/index/index.go @@ -11,6 +11,8 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" loki_instrument "github.com/grafana/loki/pkg/util/instrument" ) @@ -28,6 +30,17 @@ type BaseReader interface { type StatsReader interface { Stats(ctx context.Context, userID string, from, through model.Time, matchers ...*labels.Matcher) (*stats.Stats, error) Volume(ctx context.Context, userID string, from, through model.Time, limit int32, targetLabels []string, aggregateBy string, matchers ...*labels.Matcher) (*logproto.VolumeResponse, error) + GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, + ) (*logproto.ShardsResponse, error) + + // If the underlying index supports it, this will return the ForSeries interface + // which is used in bloom-filter accelerated sharding calculation optimization. + HasForSeries(from, through model.Time) (sharding.ForSeries, bool) } type Reader interface { @@ -137,6 +150,24 @@ func (m MonitoredReaderWriter) Volume(ctx context.Context, userID string, from, return vol, nil } +func (m MonitoredReaderWriter) GetShards( + ctx context.Context, + userID string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + var shards *logproto.ShardsResponse + if err := loki_instrument.TimeRequest(ctx, "shards", instrument.NewHistogramCollector(m.metrics.indexQueryLatency), instrument.ErrorCode, func(ctx context.Context) error { + var err error + shards, err = m.rw.GetShards(ctx, userID, from, through, targetBytesPerShard, predicate) + return err + }); err != nil { + return nil, err + } + return shards, nil +} + func (m MonitoredReaderWriter) SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) { m.rw.SetChunkFilterer(chunkFilter) } @@ -146,3 +177,29 @@ func (m MonitoredReaderWriter) IndexChunk(ctx context.Context, from, through mod return m.rw.IndexChunk(ctx, from, through, chk) }) } + +func (m MonitoredReaderWriter) HasForSeries(from, through model.Time) (sharding.ForSeries, bool) { + if impl, ok := m.rw.HasForSeries(from, through); ok { + wrapped := sharding.ForSeriesFunc( + func( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error { + return loki_instrument.TimeRequest(ctx, "for_series", instrument.NewHistogramCollector(m.metrics.indexQueryLatency), instrument.ErrorCode, func(ctx context.Context) error { + return impl.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) + }) + }, + ) + return wrapped, true + } + return nil, false +} diff --git a/pkg/storage/stores/series/series_index_gateway_store.go b/pkg/storage/stores/series/series_index_gateway_store.go index 00059fe16c1a3..c3af8c0c8d3c6 100644 --- a/pkg/storage/stores/series/series_index_gateway_store.go +++ b/pkg/storage/stores/series/series_index_gateway_store.go @@ -13,15 +13,29 @@ import ( "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) +// NB(owen-d): mostly modeled off of the proto-generated `logproto.IndexGatewayClient`, +// but decoupled from explicit GRPC dependencies to work well with streaming grpc methods +type GatewayClient interface { + GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) + GetSeries(ctx context.Context, in *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) + LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest) (*logproto.LabelResponse, error) + LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest) (*logproto.LabelResponse, error) + GetStats(ctx context.Context, in *logproto.IndexStatsRequest) (*logproto.IndexStatsResponse, error) + GetVolume(ctx context.Context, in *logproto.VolumeRequest) (*logproto.VolumeResponse, error) + + GetShards(ctx context.Context, in *logproto.ShardsRequest) (*logproto.ShardsResponse, error) +} + // IndexGatewayClientStore implements pkg/storage/stores/index.ReaderWriter type IndexGatewayClientStore struct { - client logproto.IndexGatewayClient + client GatewayClient logger log.Logger } -func NewIndexGatewayClientStore(client logproto.IndexGatewayClient, logger log.Logger) *IndexGatewayClientStore { +func NewIndexGatewayClientStore(client GatewayClient, logger log.Logger) *IndexGatewayClientStore { return &IndexGatewayClientStore{ client: client, logger: logger, @@ -111,6 +125,25 @@ func (c *IndexGatewayClientStore) Volume(ctx context.Context, _ string, from, th }) } +func (c *IndexGatewayClientStore) GetShards( + ctx context.Context, + _ string, + from, through model.Time, + targetBytesPerShard uint64, + predicate chunk.Predicate, +) (*logproto.ShardsResponse, error) { + resp, err := c.client.GetShards(ctx, &logproto.ShardsRequest{ + From: from, + Through: through, + Query: predicate.Plan().AST.String(), + TargetBytesPerShard: targetBytesPerShard, + }) + if err != nil { + return nil, err + } + return resp, nil +} + func (c *IndexGatewayClientStore) SetChunkFilterer(_ chunk.RequestChunkFilterer) { level.Warn(c.logger).Log("msg", "SetChunkFilterer called on index gateway client store, but it does not support it") } @@ -118,3 +151,10 @@ func (c *IndexGatewayClientStore) SetChunkFilterer(_ chunk.RequestChunkFilterer) func (c *IndexGatewayClientStore) IndexChunk(_ context.Context, _, _ model.Time, _ chunk.Chunk) error { return fmt.Errorf("index writes not supported on index gateway client") } + +// IndexGatewayClientStore does not implement tsdb.ForSeries; +// that is implemented by the index-gws themselves and will be +// called during the `GetShards() invocation` +func (c *IndexGatewayClientStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} diff --git a/pkg/storage/stores/series/series_index_gateway_store_test.go b/pkg/storage/stores/series/series_index_gateway_store_test.go index 125973eb49d42..8c65881d3c5b3 100644 --- a/pkg/storage/stores/series/series_index_gateway_store_test.go +++ b/pkg/storage/stores/series/series_index_gateway_store_test.go @@ -7,20 +7,15 @@ import ( "github.com/go-kit/log" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "google.golang.org/grpc" "github.com/grafana/loki/pkg/logproto" ) type fakeClient struct { - logproto.IndexGatewayClient + GatewayClient } -func (fakeClient) GetChunkRef(_ context.Context, _ *logproto.GetChunkRefRequest, _ ...grpc.CallOption) (*logproto.GetChunkRefResponse, error) { - return &logproto.GetChunkRefResponse{}, nil -} - -func (fakeClient) GetSeries(_ context.Context, _ *logproto.GetSeriesRequest, _ ...grpc.CallOption) (*logproto.GetSeriesResponse, error) { +func (fakeClient) GetSeries(_ context.Context, _ *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) { return &logproto.GetSeriesResponse{}, nil } diff --git a/pkg/storage/stores/series/series_index_store.go b/pkg/storage/stores/series/series_index_store.go index 5d17963180556..50a036db3762c 100644 --- a/pkg/storage/stores/series/series_index_store.go +++ b/pkg/storage/stores/series/series_index_store.go @@ -3,6 +3,7 @@ package series import ( "context" "fmt" + "math" "sort" "sync" @@ -26,6 +27,7 @@ import ( "github.com/grafana/loki/pkg/storage/stores" "github.com/grafana/loki/pkg/storage/stores/index/stats" series_index "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util" "github.com/grafana/loki/pkg/util/constants" "github.com/grafana/loki/pkg/util/extract" @@ -758,3 +760,30 @@ func (c *IndexReaderWriter) Stats(_ context.Context, _ string, _, _ model.Time, func (c *IndexReaderWriter) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return nil, nil } + +// old index stores do not implement dynamic sharidng -- skip +func (c *IndexReaderWriter) GetShards( + _ context.Context, + _ string, + _, _ model.Time, + _ uint64, + _ chunk.Predicate, +) (*logproto.ShardsResponse, error) { + // should not be called for legacy indices at all, so just return a single shard covering everything + // could be improved by reading schema shards + return &logproto.ShardsResponse{ + Shards: []logproto.Shard{ + { + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + }, + }, + }, nil +} + +// old index stores do not implement tsdb.ForSeries -- skip +func (c *IndexReaderWriter) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} diff --git a/pkg/storage/stores/shipper/bloomshipper/cache.go b/pkg/storage/stores/shipper/bloomshipper/cache.go index b4118a55f61fb..ebd2d6a53ff8b 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache.go @@ -95,8 +95,8 @@ type BlockDirectory struct { size int64 } -func (b BlockDirectory) Block() *v1.Block { - return v1.NewBlock(v1.NewDirectoryBlockReader(b.Path)) +func (b BlockDirectory) Block(metrics *v1.Metrics) *v1.Block { + return v1.NewBlock(v1.NewDirectoryBlockReader(b.Path), metrics) } func (b BlockDirectory) Size() int64 { @@ -120,9 +120,13 @@ func (b *BlockDirectory) resolveSize() error { // BlockQuerier returns a new block querier from the directory. // The passed function `close` is called when the the returned querier is closed. -func (b BlockDirectory) BlockQuerier(close func() error) *CloseableBlockQuerier { +func (b BlockDirectory) BlockQuerier( + usePool bool, + close func() error, + metrics *v1.Metrics, +) *CloseableBlockQuerier { return &CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(b.Block()), + BlockQuerier: v1.NewBlockQuerier(b.Block(metrics), usePool), BlockRef: b.BlockRef, close: close, } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index e5779a7294acf..f2e40534a0957 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -26,6 +26,10 @@ var downloadQueueCapacity = 10000 type options struct { ignoreNotFound bool // ignore 404s from object storage; default=true fetchAsync bool // dispatch downloading of block and return immediately; default=false + // return bloom blocks to pool after iteration; default=false + // NB(owen-d): this can only be safely used when blooms are not captured outside + // of iteration or it can introduce use-after-free bugs + usePool bool } func (o *options) apply(opts ...FetchOption) { @@ -48,6 +52,12 @@ func WithFetchAsync(v bool) FetchOption { } } +func WithPool(v bool) FetchOption { + return func(opts *options) { + opts.usePool = v + } +} + type fetcher interface { FetchMetas(ctx context.Context, refs []MetaRef) ([]Meta, error) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) @@ -66,12 +76,21 @@ type Fetcher struct { q *downloadQueue[BlockRef, BlockDirectory] - cfg bloomStoreConfig - metrics *fetcherMetrics - logger log.Logger + cfg bloomStoreConfig + metrics *fetcherMetrics + bloomMetrics *v1.Metrics + logger log.Logger } -func NewFetcher(cfg bloomStoreConfig, client Client, metasCache cache.Cache, blocksCache Cache, reg prometheus.Registerer, logger log.Logger) (*Fetcher, error) { +func NewFetcher( + cfg bloomStoreConfig, + client Client, + metasCache cache.Cache, + blocksCache Cache, + reg prometheus.Registerer, + logger log.Logger, + bloomMetrics *v1.Metrics, +) (*Fetcher, error) { fetcher := &Fetcher{ cfg: cfg, client: client, @@ -79,6 +98,7 @@ func NewFetcher(cfg bloomStoreConfig, client Client, metasCache cache.Cache, blo blocksCache: blocksCache, localFSResolver: NewPrefixedResolver(cfg.workingDir, defaultKeyResolver{}), metrics: newFetcherMetrics(reg, constants.Loki, "bloom_store"), + bloomMetrics: bloomMetrics, logger: logger, } q, err := newDownloadQueue[BlockRef, BlockDirectory](downloadQueueCapacity, cfg.numWorkers, fetcher.processTask, logger) @@ -176,7 +196,7 @@ func (f *Fetcher) writeBackMetas(ctx context.Context, metas []Meta) error { // FetchBlocks implements fetcher func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) { // apply fetch options - cfg := &options{ignoreNotFound: true, fetchAsync: false} + cfg := &options{ignoreNotFound: true, fetchAsync: false, usePool: false} cfg.apply(opts...) // first, resolve blocks from cache and enqueue missing blocks to download queue @@ -218,9 +238,13 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc } found++ f.metrics.blocksFound.Inc() - results[i] = dir.BlockQuerier(func() error { - return f.blocksCache.Release(ctx, key) - }) + results[i] = dir.BlockQuerier( + cfg.usePool, + func() error { + return f.blocksCache.Release(ctx, key) + }, + f.bloomMetrics, + ) } // fetchAsync defines whether the function may return early or whether it @@ -248,9 +272,13 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc case res := <-responses: found++ key := f.client.Block(refs[res.idx]).Addr() - results[res.idx] = res.item.BlockQuerier(func() error { - return f.blocksCache.Release(ctx, key) - }) + results[res.idx] = res.item.BlockQuerier( + cfg.usePool, + func() error { + return f.blocksCache.Release(ctx, key) + }, + f.bloomMetrics, + ) } } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go index 78a681dac5014..e51d153098381 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go @@ -108,7 +108,7 @@ func TestMetasFetcher(t *testing.T) { c, err := NewBloomClient(cfg, oc, logger) require.NoError(t, err) - fetcher, err := NewFetcher(cfg, c, metasCache, nil, nil, logger) + fetcher, err := NewFetcher(cfg, c, metasCache, nil, nil, logger, v1.NewMetrics(nil)) require.NoError(t, err) // prepare metas cache @@ -286,7 +286,7 @@ func TestFetcher_LoadBlocksFromFS(t *testing.T) { c, err := NewBloomClient(cfg, oc, log.NewNopLogger()) require.NoError(t, err) - fetcher, err := NewFetcher(cfg, c, nil, nil, nil, log.NewNopLogger()) + fetcher, err := NewFetcher(cfg, c, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) require.NoError(t, err) found, missing, err := fetcher.loadBlocksFromFS(context.Background(), refs) @@ -314,7 +314,7 @@ func createBlockDir(t *testing.T, path string) { func TestFetcher_IsBlockDir(t *testing.T) { cfg := bloomStoreConfig{numWorkers: 1} - fetcher, _ := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger()) + fetcher, _ := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) t.Run("path does not exist", func(t *testing.T) { base := t.TempDir() diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index 9f099d683cf4e..c8a162dbd66b9 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -35,6 +35,11 @@ type Store interface { Stop() } +type StoreWithMetrics interface { + Store + BloomMetrics() *v1.Metrics +} + type bloomStoreConfig struct { workingDir string numWorkers int @@ -123,8 +128,8 @@ func (b *bloomStoreEntry) FetchMetas(ctx context.Context, params MetaSearchParam } // FetchBlocks implements Store. -func (b *bloomStoreEntry) FetchBlocks(ctx context.Context, refs []BlockRef, _ ...FetchOption) ([]*CloseableBlockQuerier, error) { - return b.fetcher.FetchBlocks(ctx, refs) +func (b *bloomStoreEntry) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) { + return b.fetcher.FetchBlocks(ctx, refs, opts...) } // Fetcher implements Store. @@ -144,12 +149,14 @@ func (b bloomStoreEntry) Stop() { } // Compiler check to ensure BloomStore implements the Store interface -var _ Store = &BloomStore{} +var _ StoreWithMetrics = &BloomStore{} type BloomStore struct { - stores []*bloomStoreEntry - storageConfig storage.Config - metrics *storeMetrics + stores []*bloomStoreEntry + storageConfig storage.Config + metrics *storeMetrics + bloomMetrics *v1.Metrics + logger log.Logger defaultKeyResolver // TODO(owen-d): impl schema aware resolvers } @@ -166,6 +173,7 @@ func NewBloomStore( store := &BloomStore{ storageConfig: storageConfig, metrics: newStoreMetrics(reg, constants.Loki, "bloom_store"), + bloomMetrics: v1.NewMetrics(reg), logger: logger, } @@ -204,7 +212,7 @@ func NewBloomStore( } regWithLabels := prometheus.WrapRegistererWith(prometheus.Labels{"store": periodicConfig.From.String()}, reg) - fetcher, err := NewFetcher(cfg, bloomClient, metasCache, blocksCache, regWithLabels, logger) + fetcher, err := NewFetcher(cfg, bloomClient, metasCache, blocksCache, regWithLabels, logger, store.bloomMetrics) if err != nil { return nil, errors.Wrapf(err, "creating fetcher for period %s", periodicConfig.From) } @@ -221,6 +229,10 @@ func NewBloomStore( return store, nil } +func (b *BloomStore) BloomMetrics() *v1.Metrics { + return b.bloomMetrics +} + // Impements KeyResolver func (b *BloomStore) Meta(ref MetaRef) (loc Location) { _ = b.storeDo(ref.StartTimestamp, func(s *bloomStoreEntry) error { diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go index 69553fc34b6c3..a2054e806b0cf 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go @@ -10,6 +10,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" + "github.com/gogo/status" "github.com/grafana/dskit/concurrency" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/instrument" @@ -23,11 +24,14 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc" + "google.golang.org/grpc/codes" "github.com/grafana/loki/pkg/distributor/clientpool" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/storage/stores/series/index" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/constants" "github.com/grafana/loki/pkg/util/discovery" util_math "github.com/grafana/loki/pkg/util/math" @@ -237,78 +241,184 @@ func (s *GatewayClient) QueryIndex(_ context.Context, _ *logproto.QueryIndexRequ panic("not implemented") } -func (s *GatewayClient) GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest, opts ...grpc.CallOption) (*logproto.GetChunkRefResponse, error) { +func (s *GatewayClient) GetChunkRef(ctx context.Context, in *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) { var ( resp *logproto.GetChunkRefResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetChunkRef(ctx, in, opts...) + resp, err = client.GetChunkRef(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetSeries(ctx context.Context, in *logproto.GetSeriesRequest, opts ...grpc.CallOption) (*logproto.GetSeriesResponse, error) { +func (s *GatewayClient) GetSeries(ctx context.Context, in *logproto.GetSeriesRequest) (*logproto.GetSeriesResponse, error) { var ( resp *logproto.GetSeriesResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetSeries(ctx, in, opts...) + resp, err = client.GetSeries(ctx, in) return err }) return resp, err } -func (s *GatewayClient) LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest, opts ...grpc.CallOption) (*logproto.LabelResponse, error) { +func (s *GatewayClient) LabelNamesForMetricName(ctx context.Context, in *logproto.LabelNamesForMetricNameRequest) (*logproto.LabelResponse, error) { var ( resp *logproto.LabelResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.LabelNamesForMetricName(ctx, in, opts...) + resp, err = client.LabelNamesForMetricName(ctx, in) return err }) return resp, err } -func (s *GatewayClient) LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest, opts ...grpc.CallOption) (*logproto.LabelResponse, error) { +func (s *GatewayClient) LabelValuesForMetricName(ctx context.Context, in *logproto.LabelValuesForMetricNameRequest) (*logproto.LabelResponse, error) { var ( resp *logproto.LabelResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.LabelValuesForMetricName(ctx, in, opts...) + resp, err = client.LabelValuesForMetricName(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetStats(ctx context.Context, in *logproto.IndexStatsRequest, opts ...grpc.CallOption) (*logproto.IndexStatsResponse, error) { +func (s *GatewayClient) GetStats(ctx context.Context, in *logproto.IndexStatsRequest) (*logproto.IndexStatsResponse, error) { var ( resp *logproto.IndexStatsResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetStats(ctx, in, opts...) + resp, err = client.GetStats(ctx, in) return err }) return resp, err } -func (s *GatewayClient) GetVolume(ctx context.Context, in *logproto.VolumeRequest, opts ...grpc.CallOption) (*logproto.VolumeResponse, error) { +func (s *GatewayClient) GetVolume(ctx context.Context, in *logproto.VolumeRequest) (*logproto.VolumeResponse, error) { var ( resp *logproto.VolumeResponse err error ) err = s.poolDo(ctx, func(client logproto.IndexGatewayClient) error { - resp, err = client.GetVolume(ctx, in, opts...) + resp, err = client.GetVolume(ctx, in) return err }) return resp, err } +func (s *GatewayClient) GetShards( + ctx context.Context, + in *logproto.ShardsRequest, +) (res *logproto.ShardsResponse, err error) { + + // We try to get the shards from the index gateway, + // but if it's not implemented, we fall back to the stats. + // We limit the maximum number of errors to 2 to avoid + // cascading all requests to new node(s) when + // the idx-gw replicas start to update to a version + // which supports the new API. + var ( + maxErrs = 2 + errCt int + ) + + if err := s.poolDoWithStrategy( + ctx, + func(client logproto.IndexGatewayClient) error { + perReplicaResult := &logproto.ShardsResponse{} + streamer, err := client.GetShards(ctx, in) + if err != nil { + return errors.Wrap(err, "get shards") + } + + // TODO(owen-d): stream currently unused (buffered) because query planning doesn't expect a streamed response, + // but can be improved easily in the future by using a stream here. + for { + resp, err := streamer.Recv() + if err == io.EOF { + break + } + if err != nil { + return errors.WithStack(err) + } + perReplicaResult.Shards = append(perReplicaResult.Shards, resp.Shards...) + perReplicaResult.Statistics.Merge(resp.Statistics) + } + + // Since `poolDo` retries on error, we only want to set the response if we got a successful response. + // This avoids cases where we add duplicates to the response on retries. + res = perReplicaResult + + return nil + }, + func(err error) bool { + errCt++ + return errCt <= maxErrs + }, + ); err != nil { + if isUnimplementedCallError(err) { + return s.getShardsFromStatsFallback(ctx, in) + } + return nil, err + } + return res, nil +} + +func (s *GatewayClient) getShardsFromStatsFallback( + ctx context.Context, + in *logproto.ShardsRequest, +) (*logproto.ShardsResponse, error) { + userID, err := tenant.TenantID(ctx) + if err != nil { + return nil, errors.Wrap(err, "index gateway client get tenant ID") + } + + p, err := indexgateway.ExtractShardRequestMatchersAndAST(in.Query) + if err != nil { + return nil, errors.Wrap(err, "failure while falling back to stats for shard calculation") + + } + + stats, err := s.GetStats( + ctx, + &logproto.IndexStatsRequest{ + From: in.From, + Through: in.Through, + Matchers: (&syntax.MatchersExpr{Mts: p.Matchers}).String(), + }, + ) + if err != nil { + return nil, err + } + + var strategy sharding.PowerOfTwoSharding + shards := strategy.ShardsFor(stats.Bytes, uint64(s.limits.TSDBMaxBytesPerShard(userID))) + return &logproto.ShardsResponse{ + Shards: shards, + }, nil +} + +// TODO(owen-d): this was copied from ingester_querier.go -- move it to a shared pkg +// isUnimplementedCallError tells if the GRPC error is a gRPC error with code Unimplemented. +func isUnimplementedCallError(err error) bool { + if err == nil { + return false + } + + s, ok := status.FromError(err) + if !ok { + return false + } + return (s.Code() == codes.Unimplemented) +} + func (s *GatewayClient) doQueries(ctx context.Context, queries []index.Query, callback index.QueryPagesCallback) error { queryKeyQueryMap := make(map[string]index.Query, len(queries)) gatewayQueries := make([]*logproto.IndexQuery, 0, len(queries)) @@ -365,6 +475,14 @@ func (s *GatewayClient) clientDoQueries(ctx context.Context, gatewayQueries []*l // poolDo executes the given function for each Index Gateway instance in the ring mapping to the correct tenant in the index. // In case of callback failure, we'll try another member of the ring for that tenant ID. func (s *GatewayClient) poolDo(ctx context.Context, callback func(client logproto.IndexGatewayClient) error) error { + return s.poolDoWithStrategy(ctx, callback, func(error) bool { return true }) +} + +func (s *GatewayClient) poolDoWithStrategy( + ctx context.Context, + callback func(client logproto.IndexGatewayClient) error, + shouldRetry func(error) bool, +) error { userID, err := tenant.TenantID(ctx) if err != nil { return errors.Wrap(err, "index gateway client get tenant ID") @@ -395,6 +513,10 @@ func (s *GatewayClient) poolDo(ctx context.Context, callback func(client logprot if err := callback(client); err != nil { lastErr = err level.Error(s.logger).Log("msg", fmt.Sprintf("client do failed for instance %s", addr), "err", err) + + if !shouldRetry(err) { + return err + } continue } diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go index 161323defd9a1..350a95e8f988b 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go @@ -3,9 +3,11 @@ package indexgateway import ( "context" "fmt" + "math" "sort" "sync" + "github.com/c2h5oh/datasize" "github.com/go-kit/log" "github.com/go-kit/log/level" "github.com/grafana/dskit/services" @@ -17,6 +19,7 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/pkg/logqlmodel/stats" "github.com/grafana/loki/pkg/querier/plan" v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/chunk" @@ -25,6 +28,8 @@ import ( "github.com/grafana/loki/pkg/storage/stores/index" "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" seriesindex "github.com/grafana/loki/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/spanlogger" ) @@ -59,6 +64,7 @@ type Gateway struct { indexQuerier IndexQuerier indexClients []IndexClientWithRange bloomQuerier BloomQuerier + metrics *Metrics cfg Config log log.Logger @@ -68,13 +74,14 @@ type Gateway struct { // // In case it is configured to be in ring mode, a Basic Service wrapping the ring client is started. // Otherwise, it starts an Idle Service that doesn't have lifecycle hooks. -func NewIndexGateway(cfg Config, log log.Logger, _ prometheus.Registerer, indexQuerier IndexQuerier, indexClients []IndexClientWithRange, bloomQuerier BloomQuerier) (*Gateway, error) { +func NewIndexGateway(cfg Config, log log.Logger, r prometheus.Registerer, indexQuerier IndexQuerier, indexClients []IndexClientWithRange, bloomQuerier BloomQuerier) (*Gateway, error) { g := &Gateway{ indexQuerier: indexQuerier, bloomQuerier: bloomQuerier, cfg: cfg, log: log, indexClients: indexClients, + metrics: NewMetrics(r), } // query newer periods first @@ -195,7 +202,7 @@ func buildResponses(query seriesindex.Query, batch seriesindex.ReadBatchResult, return nil } -func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequest) (*logproto.GetChunkRefResponse, error) { +func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequest) (result *logproto.GetChunkRefResponse, err error) { instanceID, err := tenant.TenantID(ctx) if err != nil { return nil, err @@ -211,7 +218,7 @@ func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequ return nil, err } - result := &logproto.GetChunkRefResponse{ + result = &logproto.GetChunkRefResponse{ Refs: make([]*logproto.ChunkRef, 0, len(chunks)), } for _, cs := range chunks { @@ -221,6 +228,12 @@ func (g *Gateway) GetChunkRef(ctx context.Context, req *logproto.GetChunkRefRequ } initialChunkCount := len(result.Refs) + defer func() { + if err == nil { + g.metrics.preFilterChunks.WithLabelValues(routeChunkRefs).Observe(float64(initialChunkCount)) + g.metrics.postFilterChunks.WithLabelValues(routeChunkRefs).Observe(float64(len(result.Refs))) + } + }() // Return unfiltered results if there is no bloom querier (Bloom Gateway disabled) if g.bloomQuerier == nil { @@ -340,6 +353,298 @@ func (g *Gateway) GetVolume(ctx context.Context, req *logproto.VolumeRequest) (* return g.indexQuerier.Volume(ctx, instanceID, req.From, req.Through, req.GetLimit(), req.TargetLabels, req.AggregateBy, matchers...) } +func (g *Gateway) GetShards(request *logproto.ShardsRequest, server logproto.IndexGateway_GetShardsServer) error { + ctx := server.Context() + log, _ := spanlogger.New(context.Background(), "IndexGateway.GetShards") + defer log.Finish() + + instanceID, err := tenant.TenantID(ctx) + if err != nil { + return err + } + + p, err := ExtractShardRequestMatchersAndAST(request.Query) + if err != nil { + return err + } + + // Shards were requested, but blooms are not enabled or cannot be used due to lack of filters. + // That's ok; we can still return shard ranges without filtering + // which will be more effective than guessing power-of-2 shard ranges. + forSeries, ok := g.indexQuerier.HasForSeries(request.From, request.Through) + if g.bloomQuerier == nil || len(syntax.ExtractLineFilters(p.Plan().AST)) == 0 || !ok { + shards, err := g.indexQuerier.GetShards( + ctx, + instanceID, + request.From, request.Through, + request.TargetBytesPerShard, + p, + ) + + if err != nil { + return err + } + + return server.Send(shards) + } + + return g.getShardsWithBlooms(ctx, request, server, instanceID, p, forSeries) +} + +// getShardsWithBlooms is a helper function to get shards with blooms enabled. +func (g *Gateway) getShardsWithBlooms( + ctx context.Context, + req *logproto.ShardsRequest, + server logproto.IndexGateway_GetShardsServer, + instanceID string, + p chunk.Predicate, + forSeries sharding.ForSeries, +) error { + // TODO(owen-d): instead of using GetChunks which buffers _all_ the chunks + // (expensive when looking at the full fingerprint space), we should + // use the `ForSeries` implementation to accumulate batches of chunks to dedupe, + // but I'm leaving this as a future improvement. This may be difficult considering + // fingerprints aren't necessarily iterated in order because multiple underlying TSDBs + // can be queried independently. This could also result in the same chunks being present in + // multiple batches. However, this is all OK because we can dedupe them post-blooms and in + // many cases the majority of chunks will only be present in a single post-compacted TSDB, + // making this more of an edge case than a common occurrence (make sure to check this assumption + // as getting it _very_ wrong could harm some cache locality benefits on the bloom-gws by + // sending multiple requests to the entire keyspace). + + sp, ctx := spanlogger.NewWithLogger( + ctx, + log.With(g.log, "tenant", instanceID), + "indexgateway.getShardsWithBlooms", + ) + defer sp.Finish() + + // 1) for all bounds, get chunk refs + grps, _, err := g.indexQuerier.GetChunks(ctx, instanceID, req.From, req.Through, p) + if err != nil { + return err + } + + var ct int + for _, g := range grps { + ct += len(g) + } + // TODO(owen-d): pool + refs := make([]*logproto.ChunkRef, 0, ct) + + for _, cs := range grps { + for j := range cs { + refs = append(refs, &cs[j].ChunkRef) + } + } + + // 2) filter via blooms + filtered, err := g.bloomQuerier.FilterChunkRefs(ctx, instanceID, req.From, req.Through, refs, p.Plan()) + if err != nil { + return err + } + g.metrics.preFilterChunks.WithLabelValues(routeShards).Observe(float64(ct)) + g.metrics.postFilterChunks.WithLabelValues(routeShards).Observe(float64(len(filtered))) + + statistics := stats.Result{ + Index: stats.Index{ + TotalChunks: int64(ct), + PostFilterChunks: int64(len(filtered)), + }, + } + + resp := &logproto.ShardsResponse{ + Statistics: statistics, + } + + // Edge case: if there are no chunks after filtering, we still need to return a single shard + if len(filtered) == 0 { + resp.Shards = []logproto.Shard{ + { + Bounds: logproto.FPBounds{Min: 0, Max: math.MaxUint64}, + Stats: &logproto.IndexStatsResponse{}, + }, + } + } else { + shards, err := accumulateChunksToShards(ctx, instanceID, forSeries, req, p, filtered) + if err != nil { + return err + } + resp.Shards = shards + } + + level.Debug(g.log).Log( + "msg", "shards response", + "total_chunks", statistics.Index.TotalChunks, + "post_filter_chunks", statistics.Index.PostFilterChunks, + "shards", len(resp.Shards), + "query", req.Query, + "target_bytes_per_shard", datasize.ByteSize(req.TargetBytesPerShard).HumanReadable(), + ) + + level.Debug(sp).Log( + "msg", "shards response", + "total_chunks", statistics.Index.TotalChunks, + "post_filter_chunks", statistics.Index.PostFilterChunks, + "shards", len(resp.Shards), + "query", req.Query, + "target_bytes_per_shard", datasize.ByteSize(req.TargetBytesPerShard).HumanReadable(), + ) + + // 3) build shards + return server.Send(resp) +} + +// ExtractShardRequestMatchersAndAST extracts the matchers and AST from a query string. +// It errors if there is more than one matcher group in the AST as this is supposed to be +// split out during query planning before reaching this point. +func ExtractShardRequestMatchersAndAST(query string) (chunk.Predicate, error) { + expr, err := syntax.ParseExpr(query) + if err != nil { + return chunk.Predicate{}, err + } + + ms, err := syntax.MatcherGroups(expr) + if err != nil { + return chunk.Predicate{}, err + } + + var matchers []*labels.Matcher + switch len(ms) { + case 0: + // nothing to do + case 1: + matchers = ms[0].Matchers + default: + return chunk.Predicate{}, fmt.Errorf( + "multiple matcher groups are not supported in GetShards. This is likely an internal bug as binary operations should be dispatched separately in planning", + ) + } + + return chunk.NewPredicate(matchers, &plan.QueryPlan{ + AST: expr, + }), nil +} + +// TODO(owen-d): consider extending index impl to support returning chunkrefs _with_ sizing info +// TODO(owen-d): perf, this is expensive :( +func accumulateChunksToShards( + ctx context.Context, + user string, + forSeries sharding.ForSeries, + req *logproto.ShardsRequest, + p chunk.Predicate, + filtered []*logproto.ChunkRef, +) ([]logproto.Shard, error) { + // map for looking up post-filtered chunks in O(n) while iterating the index again for sizing info + filteredM := make(map[model.Fingerprint][]refWithSizingInfo, 1024) + for _, ref := range filtered { + x := refWithSizingInfo{ref: ref} + filteredM[model.Fingerprint(ref.Fingerprint)] = append(filteredM[model.Fingerprint(ref.Fingerprint)], x) + } + + var mtx sync.Mutex + + if err := forSeries.ForSeries( + ctx, + user, + v1.NewBounds(filtered[0].FingerprintModel(), filtered[len(filtered)-1].FingerprintModel()), + req.From, req.Through, + func(l labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) (stop bool) { + // check if this is a fingerprint we need + if _, ok := filteredM[fp]; !ok { + return false + } + mtx.Lock() + defer mtx.Unlock() + + filteredChks := filteredM[fp] + var j int + + outer: + for i := range filteredChks { + for j < len(chks) { + switch filteredChks[i].Cmp(chks[j]) { + case v1.Less: + // this chunk is not in the queried index, continue checking other chunks + continue outer + case v1.Greater: + // next chunk in index but didn't pass filter; continue + j++ + continue + case v1.Eq: + // a match; set the sizing info + filteredChks[i].KB = chks[j].KB + filteredChks[i].Entries = chks[j].Entries + j++ + continue outer + } + } + + // we've finished this index's chunks; no need to keep checking filtered chunks + break + } + + return false + }, + p.Matchers..., + ); err != nil { + return nil, err + } + + collectedSeries := sharding.SizedFPs(sharding.SizedFPsPool.Get(len(filteredM))) + defer sharding.SizedFPsPool.Put(collectedSeries) + + for fp, chks := range filteredM { + x := sharding.SizedFP{Fp: fp} + x.Stats.Chunks = uint64(len(chks)) + + for _, chk := range chks { + x.Stats.Entries += uint64(chk.Entries) + x.Stats.Bytes += uint64(chk.KB << 10) + } + collectedSeries = append(collectedSeries, x) + } + sort.Sort(collectedSeries) + + return collectedSeries.ShardsFor(req.TargetBytesPerShard), nil +} + +type refWithSizingInfo struct { + ref *logproto.ChunkRef + KB uint32 + Entries uint32 +} + +// careful: only checks from,through,checksum +func (r refWithSizingInfo) Cmp(chk tsdb_index.ChunkMeta) v1.Ord { + ref := *r.ref + chkFrom := model.Time(chk.MinTime) + if ref.From != chkFrom { + if ref.From < chkFrom { + return v1.Less + } + return v1.Greater + } + + chkThrough := model.Time(chk.MaxTime) + if ref.Through != chkThrough { + if ref.Through < chkThrough { + return v1.Less + } + return v1.Greater + } + + if ref.Checksum != chk.Checksum { + if ref.Checksum < chk.Checksum { + return v1.Less + } + return v1.Greater + } + + return v1.Eq +} + type failingIndexClient struct{} func (f failingIndexClient) QueryPages(_ context.Context, _ []seriesindex.Query, _ seriesindex.QueryPagesCallback) error { diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go index 48bb4d2c8c383..52518f3995b77 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go @@ -15,8 +15,12 @@ import ( "google.golang.org/grpc" "github.com/grafana/loki/pkg/logproto" + v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" util_test "github.com/grafana/loki/pkg/util" util_log "github.com/grafana/loki/pkg/util/log" util_math "github.com/grafana/loki/pkg/util/math" @@ -284,3 +288,305 @@ func (i *indexQuerierMock) Volume(_ context.Context, userID string, from, throug return args.Get(0).(*logproto.VolumeResponse), args.Error(1) } + +// Tests for various cases of the `refWithSizingInfo.Cmp` function +func TestRefWithSizingInfo(t *testing.T) { + for _, tc := range []struct { + desc string + a refWithSizingInfo + b tsdb_index.ChunkMeta + exp v1.Ord + }{ + { + desc: "less by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 1, + }, + exp: v1.Eq, + }, + { + desc: "gt by from", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + From: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MinTime: 1, + }, + exp: v1.Greater, + }, + { + desc: "less by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 2, + }, + exp: v1.Eq, + }, + { + desc: "gt by through", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Through: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + MaxTime: 1, + }, + exp: v1.Greater, + }, + { + desc: "less by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 1, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 2, + }, + exp: v1.Less, + }, + { + desc: "eq by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 2, + }, + exp: v1.Eq, + }, + { + desc: "gt by checksum", + a: refWithSizingInfo{ + ref: &logproto.ChunkRef{ + Checksum: 2, + }, + }, + b: tsdb_index.ChunkMeta{ + Checksum: 1, + }, + exp: v1.Greater, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, tc.exp, tc.a.Cmp(tc.b)) + }) + } +} + +// TODO(owen-d): more testing for specific cases +func TestAccumulateChunksToShards(t *testing.T) { + // only check eq by checksum for convenience -- we're not testing the comparison function here + mkRef := func(fp model.Fingerprint, checksum uint32) *logproto.ChunkRef { + return &logproto.ChunkRef{ + Fingerprint: uint64(fp), + Checksum: checksum, + } + } + + sized := func(ref *logproto.ChunkRef, kb, entries uint32) refWithSizingInfo { + return refWithSizingInfo{ + ref: ref, + KB: kb, + Entries: entries, + } + + } + + fsImpl := func(series [][]refWithSizingInfo) sharding.ForSeriesFunc { + return sharding.ForSeriesFunc( + func( + ctx context.Context, + _ string, + _ tsdb_index.FingerprintFilter, + _, _ model.Time, + fn func( + _ labels.Labels, + fp model.Fingerprint, + chks []tsdb_index.ChunkMeta, + ) (stop bool), matchers ...*labels.Matcher) error { + + for _, s := range series { + chks := []tsdb_index.ChunkMeta{} + for _, r := range s { + chks = append(chks, tsdb_index.ChunkMeta{ + Checksum: r.ref.Checksum, + KB: r.KB, + Entries: r.Entries, + }) + } + + if stop := fn(nil, s[0].ref.FingerprintModel(), chks); stop { + return nil + } + } + return nil + }, + ) + } + + filtered := []*logproto.ChunkRef{ + // shard 0 + mkRef(1, 0), + mkRef(1, 1), + mkRef(1, 2), + + // shard 1 + mkRef(2, 10), + mkRef(2, 20), + mkRef(2, 30), + + // shard 2 split across multiple series + mkRef(3, 10), + mkRef(4, 10), + mkRef(4, 20), + + // last shard contains leftovers + skip a few fps in between + mkRef(7, 10), + } + + series := [][]refWithSizingInfo{ + { + // first series creates one shard since a shard can't contain partial series. + // no chunks were filtered out + sized(mkRef(1, 0), 100, 1), + sized(mkRef(1, 1), 100, 1), + sized(mkRef(1, 2), 100, 1), + }, + { + // second shard also contains one series, but this series has chunks filtered out. + sized(mkRef(2, 0), 100, 1), // filtered out + sized(mkRef(2, 10), 100, 1), // included + sized(mkRef(2, 11), 100, 1), // filtered out + sized(mkRef(2, 20), 100, 1), // included + sized(mkRef(2, 21), 100, 1), // filtered out + sized(mkRef(2, 30), 100, 1), // included + sized(mkRef(2, 31), 100, 1), // filtered out + }, + + // third shard contains multiple series. + // combined they have 110kb, which is above the target of 100kb + // but closer than leaving the second series out which would create + // a shard with 50kb + { + // first series, 50kb + sized(mkRef(3, 10), 50, 1), // 50kb + sized(mkRef(3, 11), 50, 1), // 50kb, not included + }, + { + // second series + sized(mkRef(4, 10), 30, 1), // 30kb + sized(mkRef(4, 11), 30, 1), // 30kb, not included + sized(mkRef(4, 20), 30, 1), // 30kb + }, + + // Fourth shard contains a single series with 25kb, + // but iterates over non-included fp(s) before it + { + // register a series in the index which is not included in the filtered list + sized(mkRef(6, 10), 100, 1), // not included + sized(mkRef(6, 11), 100, 1), // not included + }, + { + // last shard contains leftovers + sized(mkRef(7, 10), 25, 1), + sized(mkRef(7, 11), 100, 1), // not included + }, + } + + shards, err := accumulateChunksToShards( + context.Background(), + "", + fsImpl(series), + &logproto.ShardsRequest{ + TargetBytesPerShard: 100 << 10, + }, + chunk.NewPredicate(nil, nil), // we're not checking matcher injection here + filtered, + ) + + exp := []logproto.Shard{ + { + Bounds: logproto.FPBounds{Min: 0, Max: 1}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 3, + Entries: 3, + Bytes: 300 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 2, Max: 2}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 3, + Entries: 3, + Bytes: 300 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 3, Max: 6}, + Stats: &logproto.IndexStatsResponse{ + Streams: 2, + Chunks: 3, + Entries: 3, + Bytes: 110 << 10, + }, + }, + { + Bounds: logproto.FPBounds{Min: 7, Max: math.MaxUint64}, + Stats: &logproto.IndexStatsResponse{ + Streams: 1, + Chunks: 1, + Entries: 1, + Bytes: 25 << 10, + }, + }, + } + + require.NoError(t, err) + + for i := range shards { + require.Equal(t, exp[i], shards[i], "invalid shard at index %d", i) + } + require.Equal(t, len(exp), len(shards)) + +} diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go new file mode 100644 index 0000000000000..dcf517ea468b7 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go @@ -0,0 +1,37 @@ +package indexgateway + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + + "github.com/grafana/loki/pkg/util/constants" +) + +const ( + routeChunkRefs = "chunk_refs" + routeShards = "shards" +) + +type Metrics struct { + preFilterChunks *prometheus.HistogramVec + postFilterChunks *prometheus.HistogramVec +} + +func NewMetrics(r prometheus.Registerer) *Metrics { + return &Metrics{ + preFilterChunks: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "index_gateway", + Name: "prefilter_chunks", + Help: "Number of chunks before filtering", + Buckets: prometheus.ExponentialBuckets(1, 4, 10), + }, []string{"route"}), + postFilterChunks: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: constants.Loki, + Subsystem: "index_gateway", + Name: "postfilter_chunks", + Help: "Number of chunks after filtering", + Buckets: prometheus.ExponentialBuckets(1, 4, 10), + }, []string{"route"}), + } +} diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go index 50a1ae33b1390..c82efac95f025 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go @@ -21,6 +21,7 @@ var ( type Limits interface { IndexGatewayShardSize(tenantID string) int + TSDBMaxBytesPerShard(string) int } type ShardingStrategy interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go index 15ee7f1a1d675..c88b0e124c9b5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go @@ -53,8 +53,9 @@ func (i indexProcessor) OpenCompactedIndexFile(ctx context.Context, path, tableN } builder := NewBuilder(indexFormat) - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err @@ -212,8 +213,9 @@ func setupBuilder(ctx context.Context, indexType int, userID string, sourceIndex // add users index from multi-tenant indexes to the builder for _, idx := range multiTenantIndexes { - err := idx.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err := idx.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(withoutTenantLabel(lbls.Copy()), fp, chks) + return false }, withTenantLabelMatcher(userID, []*labels.Matcher{})...) if err != nil { return nil, err @@ -244,8 +246,9 @@ func setupBuilder(ctx context.Context, indexType int, userID string, sourceIndex } }() - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go index a2dfaa2271b6a..5032f6df085f5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go @@ -609,8 +609,9 @@ func TestCompactor_Compact(t *testing.T) { require.NoError(t, err) actualChunks = map[string]index.ChunkMetas{} - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { actualChunks[lbls.String()] = chks + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) require.NoError(t, err) @@ -823,8 +824,9 @@ func TestCompactedIndex(t *testing.T) { require.NoError(t, err) foundChunks := map[string]index.ChunkMetas{} - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(context.Background(), "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { foundChunks[lbls.String()] = append(index.ChunkMetas{}, chks...) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) require.NoError(t, err) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go index 7342fe851c577..ad285bc32b158 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go @@ -800,6 +800,14 @@ func (t *tenantHeads) Volume(ctx context.Context, userID string, from, through m return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } +func (t *tenantHeads) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + idx, ok := t.tenantIndex(userID, from, through) + if !ok { + return nil + } + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} + // helper only used in building TSDBs func (t *tenantHeads) forAll(fn func(user string, ls labels.Labels, fp uint64, chks index.ChunkMetas) error) error { for i, shard := range t.tenants { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go index bb294fb13f450..475446b15090f 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/loki/pkg/storage/chunk" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type Series struct { @@ -22,13 +23,22 @@ type ChunkRef struct { Checksum uint32 } -// Compares by (Start, End) +// Compares by (Fp, Start, End, checksum) // Assumes User is equivalent func (r ChunkRef) Less(x ChunkRef) bool { + if r.Fingerprint != x.Fingerprint { + return r.Fingerprint < x.Fingerprint + } + if r.Start != x.Start { return r.Start < x.Start } - return r.End <= x.End + + if r.End != x.End { + return r.End < x.End + } + + return r.Checksum < x.Checksum } type shouldIncludeChunk func(index.ChunkMeta) bool @@ -37,6 +47,7 @@ type Index interface { Bounded SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) Close() error + sharding.ForSeries // GetChunkRefs accepts an optional []ChunkRef argument. // If not nil, it will use that slice to build the result, // allowing us to avoid unnecessary allocations at the caller's discretion. @@ -84,3 +95,7 @@ func (NoopIndex) SetChunkFilterer(_ chunk.RequestChunkFilterer) {} func (NoopIndex) Volume(_ context.Context, _ string, _, _ model.Time, _ VolumeAccumulator, _ index.FingerprintFilter, _ shouldIncludeChunk, _ []string, _ string, _ ...*labels.Matcher) error { return nil } + +func (NoopIndex) ForSeries(_ context.Context, _ string, _ index.FingerprintFilter, _ model.Time, _ model.Time, _ func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), _ ...*labels.Matcher) error { + return nil +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go index b188ebbcb24ed..e32d39f167f9b 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/shard.go @@ -18,7 +18,10 @@ const ( var errDisallowedIdentityShard = errors.New("shard with factor of 1 is explicitly disallowed. It's equivalent to no sharding") type FingerprintFilter interface { + // TODO(owen-d): Match() is redundant and can be inferred from GetFromThrough() + // TODO(owen-d): GetFromThrough should just return FingerprintBounds as it's a better utility struct. Match(model.Fingerprint) bool + // GetFromThrough shows the [minimum, maximum) fingerprints. If there is no maximum, math.MaxUint64 may be used GetFromThrough() (model.Fingerprint, model.Fingerprint) } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go index d609dc0ed27f7..1a2115b26cdbc 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go @@ -2,8 +2,12 @@ package tsdb import ( "context" + "sort" + "sync" "time" + "github.com/grafana/loki/pkg/logql" + v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" "github.com/opentracing/opentracing-go" @@ -17,6 +21,7 @@ import ( "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/index/stats" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util" ) @@ -33,6 +38,9 @@ type IndexClientOptions struct { // duplicates when chunks are written to multiple // index buckets, which is of use in the (index-gateway|querier) // but not worth the memory costs in the ingesters. + // NB(owen-d): This is NOT the bloom-filter feature developed late 2023 onwards, + // but a smaller bloom filter used internally for probabalistic deduping of series counts + // in the index stats() method across index buckets (which can have the same series) UseBloomFilters bool } @@ -65,6 +73,20 @@ func NewIndexClient(idx Index, opts IndexClientOptions, l Limits) *IndexClient { } } +func shardFromMatchers(matchers []*labels.Matcher) (cleaned []*labels.Matcher, res logql.Shard, found bool, err error) { + for i, matcher := range matchers { + if matcher.Name == astmapper.ShardLabel && matcher.Type == labels.MatchEqual { + shard, _, err := logql.ParseShard(matcher.Value) + if err != nil { + return nil, shard, true, err + } + return append(matchers[:i], matchers[i+1:]...), shard, true, nil + } + } + + return matchers, logql.Shard{}, false, nil +} + // TODO(owen-d): This is a hack for compatibility with how the current query-mapping works. // Historically, Loki will read the index shard factor and the query planner will inject shard // labels accordingly. @@ -74,32 +96,21 @@ func NewIndexClient(idx Index, opts IndexClientOptions, l Limits) *IndexClient { func cleanMatchers(matchers ...*labels.Matcher) ([]*labels.Matcher, index.FingerprintFilter, error) { // first use withoutNameLabel to make a copy with the name label removed matchers = withoutNameLabel(matchers) - s, shardLabelIndex, err := astmapper.ShardFromMatchers(matchers) + + matchers, shard, found, err := shardFromMatchers(matchers) if err != nil { return nil, nil, err } - var fpFilter index.FingerprintFilter - if s != nil { - matchers = append(matchers[:shardLabelIndex], matchers[shardLabelIndex+1:]...) - shard := index.ShardAnnotation{ - Shard: uint32(s.Shard), - Of: uint32(s.Of), - } - fpFilter = shard - - if err := shard.Validate(); err != nil { - return nil, nil, err - } - } - if len(matchers) == 0 { // hack to query all data matchers = append(matchers, labels.MustNewMatcher(labels.MatchEqual, "", "")) } - return matchers, fpFilter, err - + if found { + return matchers, &shard, nil + } + return matchers, nil, nil } // TODO(owen-d): synchronize logproto.ChunkRef and tsdb.ChunkRef so we don't have to convert. @@ -269,6 +280,45 @@ func (c *IndexClient) Volume(ctx context.Context, userID string, from, through m return acc.Volumes(), nil } +func (c *IndexClient) GetShards(ctx context.Context, userID string, from, through model.Time, targetBytesPerShard uint64, predicate chunk.Predicate) (*logproto.ShardsResponse, error) { + + // TODO(owen-d): perf, this is expensive :( + var mtx sync.Mutex + + m := make(map[model.Fingerprint]index.ChunkMetas, 1024) + if err := c.idx.ForSeries(ctx, userID, v1.FullBounds, from, through, func(_ labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { + mtx.Lock() + m[fp] = append(m[fp], chks...) + mtx.Unlock() + return false + }, predicate.Matchers...); err != nil { + return nil, err + } + + resp := &logproto.ShardsResponse{} + + series := sharding.SizedFPs(sharding.SizedFPsPool.Get(len(m))) + defer sharding.SizedFPsPool.Put(series) + + for fp, chks := range m { + x := sharding.SizedFP{Fp: fp} + deduped := chks.Finalize() + x.Stats.Chunks = uint64(len(deduped)) + resp.Statistics.Index.TotalChunks += int64(len(deduped)) + + for _, chk := range deduped { + x.Stats.Entries += uint64(chk.Entries) + x.Stats.Bytes += uint64(chk.KB << 10) + } + + series = append(series, x) + } + sort.Sort(series) + resp.Shards = series.ShardsFor(targetBytesPerShard) + + return resp, nil +} + // SetChunkFilterer sets a chunk filter to be used when retrieving chunks. // This is only used for GetSeries implementation. // Todo we might want to pass it as a parameter to GetSeries instead. @@ -293,3 +343,7 @@ func withoutNameLabel(matchers []*labels.Matcher) []*labels.Matcher { return dst } + +func (c *IndexClient) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return c.idx, true +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go index acace60c1e4b2..dbc94b18c027a 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go @@ -134,31 +134,46 @@ func (i *indexShipperQuerier) Volume(ctx context.Context, userID string, from, t return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } -type resultAccumulator struct { +func (i *indexShipperQuerier) ForSeries(ctx context.Context, userID string, fpFilter tsdbindex.FingerprintFilter, from, through model.Time, fn func(labels.Labels, model.Fingerprint, []tsdbindex.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + idx, err := i.indices(ctx, from, through, userID) + if err != nil { + return err + } + + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} + +type resultAccumulator[T any] struct { mtx sync.Mutex - items []interface{} - merge func(xs []interface{}) (interface{}, error) + items []T + merge func(xs []T) (T, error) } -func newResultAccumulator(merge func(xs []interface{}) (interface{}, error)) *resultAccumulator { - return &resultAccumulator{ +// TODO(owen-d): make generic to avoid casting at runtime. +func newResultAccumulator[T any](merge func(xs []T) (T, error)) *resultAccumulator[T] { + return &resultAccumulator[T]{ merge: merge, } } -func (acc *resultAccumulator) Add(item interface{}) { +func (acc *resultAccumulator[T]) Add(item T) { acc.mtx.Lock() defer acc.mtx.Unlock() acc.items = append(acc.items, item) } -func (acc *resultAccumulator) Merge() (interface{}, error) { +func (acc *resultAccumulator[T]) Merge() (res T, err error) { acc.mtx.Lock() defer acc.mtx.Unlock() - if len(acc.items) == 0 { - return nil, ErrEmptyAccumulator + ln := len(acc.items) + if ln == 0 { + return res, ErrEmptyAccumulator + } + + if ln == 1 { + return acc.items[0], nil } return acc.merge(acc.items) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go index 327566f1a0ecc..db7b0e04720ed 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go @@ -80,3 +80,11 @@ func (f LazyIndex) Volume(ctx context.Context, userID string, from, through mode } return i.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) } + +func (f LazyIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + i, err := f() + if err != nil { + return err + } + return i.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go index 08bf6bf4ff01e..c0d2080bcbe35 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go @@ -4,6 +4,7 @@ import ( "context" "math" "runtime" + "sort" "sync" "github.com/prometheus/common/model" @@ -132,7 +133,7 @@ func (i *MultiIndex) forMatchingIndices(ctx context.Context, from, through model } func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, through model.Time, res []ChunkRef, fpFilter index.FingerprintFilter, matchers ...*labels.Matcher) ([]ChunkRef, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]ChunkRef) ([]ChunkRef, error) { if res == nil { res = ChunkRefsPool.Get() } @@ -143,9 +144,12 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro // TODO(owen-d): Do this more efficiently, // not all indices overlap each other + // TODO(owen-d): loser-tree or some other heap? + for _, group := range xs { - g := group.([]ChunkRef) + g := group for _, ref := range g { + _, ok := seen[ref] if ok { continue @@ -154,9 +158,10 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro res = append(res, ref) } ChunkRefsPool.Put(g) - } + sort.Slice(res, func(i, j int) bool { return res[i].Less(res[j]) }) + return res, nil }) @@ -183,12 +188,12 @@ func (i *MultiIndex) GetChunkRefs(ctx context.Context, userID string, from, thro } return nil, err } - return merged.([]ChunkRef), nil + return merged, nil } func (i *MultiIndex) Series(ctx context.Context, userID string, from, through model.Time, res []Series, fpFilter index.FingerprintFilter, matchers ...*labels.Matcher) ([]Series, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]Series) ([]Series, error) { if res == nil { res = SeriesPool.Get() } @@ -197,7 +202,7 @@ func (i *MultiIndex) Series(ctx context.Context, userID string, from, through mo seen := make(map[model.Fingerprint]struct{}) for _, x := range xs { - seriesSet := x.([]Series) + seriesSet := x for _, s := range seriesSet { _, ok := seen[s.Fingerprint] if ok { @@ -235,17 +240,17 @@ func (i *MultiIndex) Series(ctx context.Context, userID string, from, through mo } return nil, err } - return merged.([]Series), nil + return merged, nil } func (i *MultiIndex) LabelNames(ctx context.Context, userID string, from, through model.Time, matchers ...*labels.Matcher) ([]string, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]string) ([]string, error) { var ( maxLn int // maximum number of lNames, assuming no duplicates lists [][]string ) for _, group := range xs { - x := group.([]string) + x := group maxLn += len(x) lists = append(lists, x) } @@ -293,17 +298,17 @@ func (i *MultiIndex) LabelNames(ctx context.Context, userID string, from, throug } return nil, err } - return merged.([]string), nil + return merged, nil } func (i *MultiIndex) LabelValues(ctx context.Context, userID string, from, through model.Time, name string, matchers ...*labels.Matcher) ([]string, error) { - acc := newResultAccumulator(func(xs []interface{}) (interface{}, error) { + acc := newResultAccumulator(func(xs [][]string) ([]string, error) { var ( maxLn int // maximum number of lValues, assuming no duplicates lists [][]string ) for _, group := range xs { - x := group.([]string) + x := group maxLn += len(x) lists = append(lists, x) } @@ -351,7 +356,7 @@ func (i *MultiIndex) LabelValues(ctx context.Context, userID string, from, throu } return nil, err } - return merged.([]string), nil + return merged, nil } func (i *MultiIndex) Stats(ctx context.Context, userID string, from, through model.Time, acc IndexStatsAccumulator, fpFilter index.FingerprintFilter, shouldIncludeChunk shouldIncludeChunk, matchers ...*labels.Matcher) error { @@ -365,3 +370,9 @@ func (i *MultiIndex) Volume(ctx context.Context, userID string, from, through mo return idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, matchers...) }) } + +func (i MultiIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + return i.forMatchingIndices(ctx, from, through, func(ctx context.Context, idx Index) error { + return idx.ForSeries(ctx, userID, fpFilter, from, through, fn, matchers...) + }) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go index ec582b6e21489..52de31078673a 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go @@ -96,3 +96,7 @@ func (m *MultiTenantIndex) Stats(ctx context.Context, userID string, from, throu func (m *MultiTenantIndex) Volume(ctx context.Context, userID string, from, through model.Time, acc VolumeAccumulator, fpFilter index.FingerprintFilter, shouldIncludeChunk shouldIncludeChunk, targetLabels []string, aggregateBy string, matchers ...*labels.Matcher) error { return m.idx.Volume(ctx, userID, from, through, acc, fpFilter, shouldIncludeChunk, targetLabels, aggregateBy, withTenantLabelMatcher(userID, matchers)...) } + +func (m *MultiTenantIndex) ForSeries(ctx context.Context, userID string, fpFilter index.FingerprintFilter, from, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { + return m.idx.ForSeries(ctx, userID, fpFilter, from, through, fn, withTenantLabelMatcher(userID, matchers)...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go new file mode 100644 index 0000000000000..362665a022776 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go @@ -0,0 +1,66 @@ +package sharding + +import ( + "context" + + "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" + + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" +) + +// General purpose iteration over series. Makes it easier to build custom functionality on top of indices +// of different types without them all implementing the same feature. +// The passed callback must _not_ capture its arguments. They're reused for each call for performance. +// The passed callback may be executed concurrently, +// so any shared state must be protected by the caller. +// NB: This is a low-level API and should be used with caution. +// NB: It's possible for the callback to be called multiple times for the same series but possibly different chunks, +// such as when the Index is backed by multiple files with the same series present. +// NB(owen-d): mainly in this package to avoid circular dependencies elsewhere +type ForSeries interface { + ForSeries( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, + ) error +} + +// function Adapter for ForSeries implementation +type ForSeriesFunc func( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, +) error + +func (f ForSeriesFunc) ForSeries( + ctx context.Context, + userID string, + fpFilter index.FingerprintFilter, + from model.Time, + through model.Time, + fn func( + labels.Labels, + model.Fingerprint, + []index.ChunkMeta, + ) (stop bool), + matchers ...*labels.Matcher, +) error { + return f(ctx, userID, fpFilter, from, through, fn, matchers...) +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go new file mode 100644 index 0000000000000..299cc21ea197b --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go @@ -0,0 +1,117 @@ +package sharding + +import ( + "math" + + "github.com/prometheus/common/model" + + "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/storage/stores/index/stats" +) + +const ( + DefaultTSDBMaxBytesPerShard = 600 << 20 // 600MB +) + +// PowerOfTwoSharding is a slimmed down legacy sharding implementation +// designed for use as a fallback when the newer impls aren't found +// (i.e. during a system upgrade to support the new impl) +type PowerOfTwoSharding struct { + MaxShards int +} + +func (p PowerOfTwoSharding) ShardsFor(bytes uint64, maxBytesPerShard uint64) []logproto.Shard { + factor := GuessShardFactor(bytes, maxBytesPerShard, p.MaxShards) + + if factor < 2 { + return []logproto.Shard{{ + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + Stats: &stats.Stats{ + Bytes: bytes, + }, + }} + } + + return LinearShards(factor, bytes) + +} + +// LinearShards is a sharding implementation that splits the data into +// equal sized shards covering the entire keyspace. It populates +// the `bytes` of each shard's stats with a proportional estimation +func LinearShards(n int, bytes uint64) []logproto.Shard { + if n < 2 { + return []logproto.Shard{ + { + Bounds: logproto.FPBounds{ + Min: 0, + Max: math.MaxUint64, + }, + Stats: &stats.Stats{ + Bytes: bytes, + }, + }, + } + } + + bytesPerShard := bytes / uint64(n) + fpPerShard := model.Fingerprint(math.MaxUint64) / model.Fingerprint(n) + + shards := make([]logproto.Shard, n) + for i := range shards { + shards[i] = logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: model.Fingerprint(i) * fpPerShard, + Max: model.Fingerprint(i+1) * fpPerShard, + }, + Stats: &stats.Stats{ + Bytes: bytesPerShard, + }, + } + + // The last shard should have the remainder of the bytes + // and the max bound should be math.MaxUint64 + // NB(owen-d): this can only happen when maxShards is used + // and the maxShards isn't a factor of 2 + shards[len(shards)-1].Stats.Bytes += bytes % uint64(n) + shards[len(shards)-1].Bounds.Max = math.MaxUint64 + } + + return shards + +} + +// Since we shard by powers of two and we increase shard factor +// once each shard surpasses maxBytesPerShard, if the shard factor +// is at least two, the range of data per shard is (maxBytesPerShard/2, maxBytesPerShard] +// For instance, for a maxBytesPerShard of 500MB and a query touching 1000MB, we split into two shards of 500MB. +// If there are 1004MB, we split into four shards of 251MB. +func GuessShardFactor(bytes, maxBytesPerShard uint64, maxShards int) int { + // If maxBytesPerShard is 0, we use the default value + // to avoid division by zero + if maxBytesPerShard < 1 { + maxBytesPerShard = DefaultTSDBMaxBytesPerShard + } + + minShards := float64(bytes) / float64(maxBytesPerShard) + + // round up to nearest power of 2 + power := math.Ceil(math.Log2(minShards)) + + // Since x^0 == 1 and we only support factors of 2 + // reset this edge case manually + factor := int(math.Pow(2, power)) + if maxShards > 0 { + factor = min(factor, maxShards) + } + + // shortcut: no need to run any sharding logic when factor=1 + // as it's the same as no sharding + if factor == 1 { + factor = 0 + } + return factor +} diff --git a/pkg/querier/queryrange/shard_resolver_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go similarity index 59% rename from pkg/querier/queryrange/shard_resolver_test.go rename to pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go index 8e963b065955f..5134b25c5cde0 100644 --- a/pkg/querier/queryrange/shard_resolver_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go @@ -1,4 +1,4 @@ -package queryrange +package sharding import ( "fmt" @@ -7,7 +7,6 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/validation" ) func TestGuessShardFactor(t *testing.T) { @@ -23,52 +22,52 @@ func TestGuessShardFactor(t *testing.T) { { exp: 4, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { // round up shard factor exp: 16, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 15, + Bytes: DefaultTSDBMaxBytesPerShard * 15, }, }, { exp: 2, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard + 1, + Bytes: DefaultTSDBMaxBytesPerShard + 1, }, }, { exp: 0, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard, + Bytes: DefaultTSDBMaxBytesPerShard, }, }, { maxShards: 8, exp: 4, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { maxShards: 2, exp: 2, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, { maxShards: 1, exp: 0, stats: stats.Stats{ - Bytes: validation.DefaultTSDBMaxBytesPerShard * 4, + Bytes: DefaultTSDBMaxBytesPerShard * 4, }, }, } { t.Run(fmt.Sprintf("%+v", tc.stats), func(t *testing.T) { - require.Equal(t, tc.exp, guessShardFactor(tc.stats, validation.DefaultTSDBMaxBytesPerShard, tc.maxShards)) + require.Equal(t, tc.exp, GuessShardFactor(tc.stats.Bytes, uint64(DefaultTSDBMaxBytesPerShard), tc.maxShards)) }) } } diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go new file mode 100644 index 0000000000000..284468a9de315 --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go @@ -0,0 +1,102 @@ +package sharding + +import ( + "math" + + "github.com/prometheus/common/model" + + "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/queue" + "github.com/grafana/loki/pkg/storage/stores/index/stats" +) + +var ( + SizedFPsPool = queue.NewSlicePool[SizedFP](1<<8, 1<<16, 4) // 256->65536 +) + +type SizedFP struct { + Fp model.Fingerprint + Stats stats.Stats +} + +type SizedFPs []SizedFP + +func (xs SizedFPs) Len() int { + return len(xs) +} + +func (xs SizedFPs) Less(i, j int) bool { + return xs[i].Fp < xs[j].Fp +} + +func (xs SizedFPs) Swap(i, j int) { + xs[i], xs[j] = xs[j], xs[i] +} + +func (xs SizedFPs) newShard(minFP model.Fingerprint) logproto.Shard { + return logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: minFP, + }, + Stats: &stats.Stats{}, + } +} + +func (xs SizedFPs) ShardsFor(targetShardBytes uint64) (res []logproto.Shard) { + if len(xs) == 0 { + full := xs.newShard(0) + full.Bounds.Max = model.Fingerprint(math.MaxUint64) + return []logproto.Shard{full} + } + + var ( + cur = xs.newShard(0) + ) + + for _, x := range xs { + + // easy path, there's space -- continue + if cur.SpaceFor(&x.Stats, targetShardBytes) { + cur.Stats.Streams++ + cur.Stats.Chunks += x.Stats.Chunks + cur.Stats.Entries += x.Stats.Entries + cur.Stats.Bytes += x.Stats.Bytes + + cur.Bounds.Max = x.Fp + continue + } + + // we've hit a stream larger than the target; + // create a shard with 1 stream + if cur.Stats.Streams == 0 { + cur.Stats = &stats.Stats{ + Streams: 1, + Chunks: x.Stats.Chunks, + Bytes: x.Stats.Bytes, + Entries: x.Stats.Entries, + } + cur.Bounds.Max = x.Fp + res = append(res, cur) + cur = xs.newShard(x.Fp + 1) + continue + } + + // Otherwise we've hit a stream that's too large but the current shard isn't empty; create a new shard + cur.Bounds.Max = x.Fp - 1 + res = append(res, cur) + cur = xs.newShard(x.Fp) + cur.Stats = &stats.Stats{ + Streams: 1, + Chunks: x.Stats.Chunks, + Bytes: x.Stats.Bytes, + Entries: x.Stats.Entries, + } + } + + if cur.Stats.Streams > 0 { + res = append(res, cur) + } + + res[len(res)-1].Bounds.Max = model.Fingerprint(math.MaxUint64) + return res +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go new file mode 100644 index 0000000000000..49a3b12a8ff6e --- /dev/null +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go @@ -0,0 +1,153 @@ +package sharding + +import ( + "math" + "sort" + "testing" + + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/storage/stores/index/stats" +) + +func TestSizedFPs_Sort(t *testing.T) { + xs := SizedFPs{ + {Fp: 3}, + {Fp: 1}, + {Fp: 6}, + {Fp: 10}, + {Fp: 2}, + {Fp: 0}, + {Fp: 4}, + {Fp: 5}, + {Fp: 7}, + {Fp: 9}, + {Fp: 8}, + } + + sort.Sort(xs) + exp := []model.Fingerprint{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} + + for i, x := range xs { + require.Equal(t, exp[i], x.Fp) + } +} + +func TestSizedFPs_ShardsFor(t *testing.T) { + mkShard := func(min, max model.Fingerprint, streams, chks, entries, bytes uint64) logproto.Shard { + return logproto.Shard{ + Bounds: logproto.FPBounds{ + Min: min, + Max: max, + }, + Stats: &stats.Stats{ + Streams: streams, + Chunks: chks, + Entries: entries, + Bytes: bytes, + }, + } + } + + mkFP := func(fp model.Fingerprint, chks, entries, bytes uint64) SizedFP { + return SizedFP{ + Fp: fp, + Stats: stats.Stats{ + Chunks: chks, + Entries: entries, + Bytes: bytes, + }, + } + } + + for _, tc := range []struct { + desc string + xs SizedFPs + exp []logproto.Shard + targetShardBytes uint64 + }{ + { + desc: "empty", + targetShardBytes: 100, + xs: SizedFPs{}, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 0, 0, 0, 0), + }, + }, + { + desc: "single stream", + targetShardBytes: 100, + xs: SizedFPs{ + mkFP(1, 1, 1, 1), + }, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 1, 1, 1, 1), + }, + }, + { + desc: "single stream too large", + targetShardBytes: 100, + xs: SizedFPs{ + mkFP(1, 1, 1, 201), + }, + exp: []logproto.Shard{ + mkShard(0, math.MaxUint64, 1, 1, 1, 201), + }, + }, + { + desc: "4 streams 2 shards", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 45 bytes; can only fit 2 in a shard + mkFP(1, 1, 1, 45), + mkFP(2, 1, 1, 45), + mkFP(3, 1, 1, 45), + mkFP(4, 1, 1, 45), + }, + exp: []logproto.Shard{ + mkShard(0, 2, 2, 2, 2, 90), + mkShard(3, math.MaxUint64, 2, 2, 2, 90), + }, + }, + { + desc: "5 streams 3 shards (one leftover)", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 45 bytes; can only fit 2 in a shard + mkFP(1, 1, 1, 45), + mkFP(2, 1, 1, 45), + mkFP(3, 1, 1, 45), + mkFP(4, 1, 1, 45), + mkFP(5, 1, 1, 45), + }, + exp: []logproto.Shard{ + mkShard(0, 2, 2, 2, 2, 90), + mkShard(3, 4, 2, 2, 2, 90), + mkShard(5, math.MaxUint64, 1, 1, 1, 45), + }, + }, + { + desc: "allowed overflow", + targetShardBytes: 100, + xs: SizedFPs{ + // each has 40 bytes; can fit 3 in a shard + // since overflow == underflow + mkFP(1, 1, 1, 40), + mkFP(2, 1, 1, 40), + mkFP(3, 1, 1, 40), + mkFP(4, 1, 1, 40), + mkFP(5, 1, 1, 40), + }, + exp: []logproto.Shard{ + mkShard(0, 3, 3, 3, 3, 120), + mkShard(4, math.MaxUint64, 2, 2, 2, 80), + }, + }, + } { + t.Run(tc.desc, func(t *testing.T) { + require.Equal(t, tc.exp, tc.xs.ShardsFor(tc.targetShardBytes)) + }) + } +} diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go index 63809f6b1356e..b7bff50e52d2d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go @@ -55,8 +55,9 @@ func RebuildWithVersion(ctx context.Context, path string, desiredVer int) (shipp } builder := NewBuilder(desiredVer) - err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + err = indexFile.(*TSDBFile).Index.(*TSDBIndex).ForSeries(ctx, "", nil, 0, math.MaxInt64, func(lbls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { builder.AddSeries(lbls.Copy(), fp, chks) + return false }, labels.MustNewMatcher(labels.MatchEqual, "", "")) if err != nil { return nil, err @@ -157,9 +158,10 @@ func (i *TSDBIndex) SetChunkFilterer(chunkFilter chunk.RequestChunkFilterer) { // fn must NOT capture it's arguments. They're reused across series iterations and returned to // a pool after completion. -// TODO(owen-d): have callback return a bool whether to continue or not in order to short-circuit -// when applicable -func (i *TSDBIndex) ForSeries(ctx context.Context, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta), matchers ...*labels.Matcher) error { +// Iteration will stop if the callback returns true. +// Accepts a userID argument in order to implement `Index` interface, but since this is a single tenant index, +// it is ignored (it's enforced elsewhere in index selection) +func (i *TSDBIndex) ForSeries(ctx context.Context, _ string, fpFilter index.FingerprintFilter, from model.Time, through model.Time, fn func(labels.Labels, model.Fingerprint, []index.ChunkMeta) (stop bool), matchers ...*labels.Matcher) error { // TODO(owen-d): use pool var ls labels.Labels @@ -187,7 +189,9 @@ func (i *TSDBIndex) ForSeries(ctx context.Context, fpFilter index.FingerprintFil continue } - fn(ls, model.Fingerprint(hash), chks) + if stop := fn(ls, model.Fingerprint(hash), chks); stop { + break + } } return p.Err() }) @@ -214,7 +218,7 @@ func (i *TSDBIndex) GetChunkRefs(ctx context.Context, userID string, from, throu } res = res[:0] - if err := i.ForSeries(ctx, fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + if err := i.ForSeries(ctx, "", fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { for _, chk := range chks { res = append(res, ChunkRef{ @@ -225,6 +229,7 @@ func (i *TSDBIndex) GetChunkRefs(ctx context.Context, userID string, from, throu Checksum: chk.Checksum, }) } + return false }, matchers...); err != nil { return nil, err } @@ -238,7 +243,7 @@ func (i *TSDBIndex) Series(ctx context.Context, _ string, from, through model.Ti } res = res[:0] - if err := i.ForSeries(ctx, fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) { + if err := i.ForSeries(ctx, "", fpFilter, from, through, func(ls labels.Labels, fp model.Fingerprint, chks []index.ChunkMeta) (stop bool) { if len(chks) == 0 { return } @@ -246,6 +251,7 @@ func (i *TSDBIndex) Series(ctx context.Context, _ string, from, through model.Ti Labels: ls.Copy(), Fingerprint: fp, }) + return false }, matchers...); err != nil { return nil, err } diff --git a/pkg/storage/util_test.go b/pkg/storage/util_test.go index 759b2e336aa29..12e8168473af9 100644 --- a/pkg/storage/util_test.go +++ b/pkg/storage/util_test.go @@ -25,6 +25,7 @@ import ( "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores" index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" loki_util "github.com/grafana/loki/pkg/util" "github.com/grafana/loki/pkg/util/constants" util_log "github.com/grafana/loki/pkg/util/log" @@ -270,6 +271,14 @@ func (m *mockChunkStore) Stats(_ context.Context, _ string, _, _ model.Time, _ . return nil, nil } +func (m *mockChunkStore) GetShards(_ context.Context, _ string, _, _ model.Time, _ uint64, _ chunk.Predicate) (*logproto.ShardsResponse, error) { + return nil, nil +} + +func (m *mockChunkStore) HasForSeries(_, _ model.Time) (sharding.ForSeries, bool) { + return nil, false +} + func (m *mockChunkStore) Volume(_ context.Context, _ string, _, _ model.Time, _ int32, _ []string, _ string, _ ...*labels.Matcher) (*logproto.VolumeResponse, error) { return nil, nil } diff --git a/pkg/util/marshal/legacy/marshal_test.go b/pkg/util/marshal/legacy/marshal_test.go index 88375ad842ab1..de308abb2b276 100644 --- a/pkg/util/marshal/legacy/marshal_test.go +++ b/pkg/util/marshal/legacy/marshal_test.go @@ -56,6 +56,10 @@ var queryTests = []struct { } ], "stats" : { + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, diff --git a/pkg/util/marshal/marshal.go b/pkg/util/marshal/marshal.go index bb961039cdd35..8b9f71ecc5782 100644 --- a/pkg/util/marshal/marshal.go +++ b/pkg/util/marshal/marshal.go @@ -155,6 +155,16 @@ func WriteIndexStatsResponseJSON(r *indexStats.Stats, w io.Writer) error { return s.Flush() } +// WriteIndexShardsResponseJSON marshals a indexgatewaypb.ShardsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteIndexShardsResponseJSON(r *logproto.ShardsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} + // WriteVolumeResponseJSON marshals a logproto.VolumeResponse to JSON and then // writes it to the provided io.Writer. func WriteVolumeResponseJSON(r *logproto.VolumeResponse, w io.Writer) error { diff --git a/pkg/util/marshal/marshal_test.go b/pkg/util/marshal/marshal_test.go index ca932064ca6c3..3a56617fb55b3 100644 --- a/pkg/util/marshal/marshal_test.go +++ b/pkg/util/marshal/marshal_test.go @@ -24,6 +24,10 @@ import ( ) const emptyStats = `{ + "index": { + "postFilterChunks": 0, + "totalChunks": 0 + }, "ingester" : { "store": { "chunksDownloadTime": 0, diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index e159fbf018f1b..e8f2ab2d994b3 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -23,9 +23,11 @@ import ( "github.com/grafana/loki/pkg/compactor/deletionmode" "github.com/grafana/loki/pkg/distributor/shardstreams" "github.com/grafana/loki/pkg/loghttp/push" + "github.com/grafana/loki/pkg/logql" "github.com/grafana/loki/pkg/logql/syntax" ruler_config "github.com/grafana/loki/pkg/ruler/config" "github.com/grafana/loki/pkg/ruler/util" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" "github.com/grafana/loki/pkg/util/flagext" util_log "github.com/grafana/loki/pkg/util/log" "github.com/grafana/loki/pkg/util/validation" @@ -49,8 +51,8 @@ const ( bytesInMB = 1048576 - defaultPerStreamRateLimit = 3 << 20 // 3MB - DefaultTSDBMaxBytesPerShard = 600 << 20 // 600MB + defaultPerStreamRateLimit = 3 << 20 // 3MB + DefaultTSDBMaxBytesPerShard = sharding.DefaultTSDBMaxBytesPerShard defaultPerStreamBurstLimit = 5 * defaultPerStreamRateLimit DefaultPerTenantQueryTimeout = "1m" @@ -95,6 +97,7 @@ type Limits struct { MaxQueryParallelism int `yaml:"max_query_parallelism" json:"max_query_parallelism"` TSDBMaxQueryParallelism int `yaml:"tsdb_max_query_parallelism" json:"tsdb_max_query_parallelism"` TSDBMaxBytesPerShard flagext.ByteSize `yaml:"tsdb_max_bytes_per_shard" json:"tsdb_max_bytes_per_shard"` + TSDBShardingStrategy string `yaml:"tsdb_sharding_strategy" json:"tsdb_sharding_strategy"` CardinalityLimit int `yaml:"cardinality_limit" json:"cardinality_limit"` MaxStreamsMatchersPerQuery int `yaml:"max_streams_matchers_per_query" json:"max_streams_matchers_per_query"` MaxConcurrentTailRequests int `yaml:"max_concurrent_tail_requests" json:"max_concurrent_tail_requests"` @@ -270,7 +273,16 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.IntVar(&l.MaxQueryParallelism, "querier.max-query-parallelism", 32, "Maximum number of queries that will be scheduled in parallel by the frontend.") f.IntVar(&l.TSDBMaxQueryParallelism, "querier.tsdb-max-query-parallelism", 128, "Maximum number of queries will be scheduled in parallel by the frontend for TSDB schemas.") _ = l.TSDBMaxBytesPerShard.Set(strconv.Itoa(DefaultTSDBMaxBytesPerShard)) - f.Var(&l.TSDBMaxBytesPerShard, "querier.tsdb-max-bytes-per-shard", "Maximum number of bytes assigned to a single sharded query. Also expressible in human readable forms (1GB, etc).") + f.Var(&l.TSDBMaxBytesPerShard, "querier.tsdb-max-bytes-per-shard", "Target maximum number of bytes assigned to a single sharded query. Also expressible in human readable forms (1GB, etc). Note: This is a _target_ and not an absolute limit. The actual limit can be higher, but the query planner will try to build shards up to this limit.") + f.StringVar( + &l.TSDBShardingStrategy, + "limits.tsdb-sharding-strategy", + logql.PowerOfTwoVersion.String(), + fmt.Sprintf( + "sharding strategy to use in query planning. Suggested to use %s once all nodes can recognize it.", + logql.BoundedVersion.String(), + ), + ) f.IntVar(&l.CardinalityLimit, "store.cardinality-limit", 1e5, "Cardinality limit for index queries.") f.IntVar(&l.MaxStreamsMatchersPerQuery, "querier.max-streams-matcher-per-query", 1000, "Maximum number of stream matchers per query.") f.IntVar(&l.MaxConcurrentTailRequests, "querier.max-concurrent-tail-requests", 10, "Maximum number of concurrent tail requests.") @@ -432,6 +444,10 @@ func (l *Limits) Validate() error { return err } + if _, err := logql.ParseShardVersion(l.TSDBShardingStrategy); err != nil { + return errors.Wrap(err, "invalid tsdb sharding strategy") + } + if _, err := chunkenc.ParseEncoding(l.BloomBlockEncoding); err != nil { return err } @@ -595,6 +611,11 @@ func (o *Overrides) TSDBMaxBytesPerShard(userID string) int { return o.getOverridesForUser(userID).TSDBMaxBytesPerShard.Val() } +// TSDBShardingStrategy returns the sharding strategy to use in query planning. +func (o *Overrides) TSDBShardingStrategy(userID string) string { + return o.getOverridesForUser(userID).TSDBShardingStrategy +} + // MaxQueryParallelism returns the limit to the number of sub-queries the // frontend will process in parallel. func (o *Overrides) MaxQueryParallelism(_ context.Context, userID string) int { diff --git a/pkg/validation/limits_test.go b/pkg/validation/limits_test.go index 59626aeb8cdbe..9096d9b179444 100644 --- a/pkg/validation/limits_test.go +++ b/pkg/validation/limits_test.go @@ -15,6 +15,7 @@ import ( "github.com/grafana/loki/pkg/chunkenc" "github.com/grafana/loki/pkg/compactor/deletionmode" "github.com/grafana/loki/pkg/loghttp/push" + "github.com/grafana/loki/pkg/logql" ) func TestLimitsTagsYamlMatchJson(t *testing.T) { @@ -338,6 +339,7 @@ func TestLimitsValidation(t *testing.T) { } { desc := fmt.Sprintf("%s/%s", tc.limits.DeletionMode, tc.limits.BloomBlockEncoding) t.Run(desc, func(t *testing.T) { + tc.limits.TSDBShardingStrategy = logql.PowerOfTwoVersion.String() // hacky but needed for test if tc.expected == nil { require.NoError(t, tc.limits.Validate()) } else { diff --git a/tools/bloom/inspector/main.go b/tools/bloom/inspector/main.go index bb81d02b260b1..d7f1a7c89bf32 100644 --- a/tools/bloom/inspector/main.go +++ b/tools/bloom/inspector/main.go @@ -17,8 +17,8 @@ func main() { fmt.Printf("Block directory: %s\n", path) r := v1.NewDirectoryBlockReader(path) - b := v1.NewBlock(r) - q := v1.NewBlockQuerier(b) + b := v1.NewBlock(r, v1.NewMetrics(nil)) + q := v1.NewBlockQuerier(b, true) md, err := q.Metadata() if err != nil { diff --git a/tools/tsdb/bloom-tester/lib.go b/tools/tsdb/bloom-tester/lib.go index 2512a3e66bee4..c7608f0046bb2 100644 --- a/tools/tsdb/bloom-tester/lib.go +++ b/tools/tsdb/bloom-tester/lib.go @@ -281,8 +281,8 @@ func analyze(metrics *Metrics, sampler Sampler, indexShipper indexshipper.IndexS casted := idx.(*tsdb.TSDBFile).Index.(*tsdb.TSDBIndex) _ = casted.ForSeries( context.Background(), - nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + "", nil, model.Earliest, model.Latest, + func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { seriesString := ls.String() seriesStringHash := FNV32a(seriesString) pos, _ := strconv.Atoi(seriesStringHash) @@ -399,6 +399,7 @@ func analyze(metrics *Metrics, sampler Sampler, indexShipper indexshipper.IndexS )*/ } // for each series + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) diff --git a/tools/tsdb/bloom-tester/readlib.go b/tools/tsdb/bloom-tester/readlib.go index 6e5c492f120de..e2a21754e865c 100644 --- a/tools/tsdb/bloom-tester/readlib.go +++ b/tools/tsdb/bloom-tester/readlib.go @@ -141,8 +141,9 @@ func analyzeRead(metrics *Metrics, sampler Sampler, shipper indexshipper.IndexSh casted := idx.(*tsdb.TSDBFile).Index.(*tsdb.TSDBIndex) _ = casted.ForSeries( context.Background(), - nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) { + "", nil, + model.Earliest, model.Latest, + func(ls labels.Labels, fp model.Fingerprint, chks []tsdbindex.ChunkMeta) (stop bool) { seriesString := ls.String() seriesStringHash := FNV32a(seriesString) pos, _ := strconv.Atoi(seriesStringHash) @@ -272,6 +273,7 @@ func analyzeRead(metrics *Metrics, sampler Sampler, shipper indexshipper.IndexSh ) */ } // For every series + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) diff --git a/tools/tsdb/index-analyzer/analytics.go b/tools/tsdb/index-analyzer/analytics.go index 7558c17fc8030..d9baeedc69533 100644 --- a/tools/tsdb/index-analyzer/analytics.go +++ b/tools/tsdb/index-analyzer/analytics.go @@ -70,16 +70,17 @@ func analyze(indexShipper indexshipper.IndexShipper, tableName string, tenants [ err = casted.Index.(*tsdb.TSDBIndex).ForSeries( context.Background(), - nil, + "", nil, model.Earliest, model.Latest, - func(ls labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) { + func(ls labels.Labels, fp model.Fingerprint, chks []tsdb_index.ChunkMeta) (stop bool) { if len(chks) > maxChunksPerSeries { maxChunksPerSeries = len(chks) if len(chks) > 1000 { seriesOver1kChunks++ } } + return false }, labels.MustNewMatcher(labels.MatchEqual, "", ""), ) From 64c7812015e770d439b7037620c41816beac431a Mon Sep 17 00:00:00 2001 From: Ashwanth Date: Tue, 26 Mar 2024 18:55:12 +0530 Subject: [PATCH 05/54] fix(codec): inject disable wrappers in ctx (#12352) --- pkg/querier/queryrange/codec.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 76d726c240fa8..fb23c9a0ecd5d 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -425,6 +425,11 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) ctx = httpreq.InjectQueryTags(ctx, queryTags) } + // Add disable pipleine wrappers + if disableWrappers := httpReq.Header.Get(httpreq.LokiDisablePipelineWrappersHeader); disableWrappers != "" { + httpreq.InjectHeader(ctx, httpreq.LokiDisablePipelineWrappersHeader, disableWrappers) + } + // Add query metrics if queueTimeHeader := httpReq.Header.Get(string(httpreq.QueryQueueTimeHTTPHeader)); queueTimeHeader != "" { queueTime, err := time.ParseDuration(queueTimeHeader) From 5c6a031c08071b7b4c74eefc3b18d52295d914ea Mon Sep 17 00:00:00 2001 From: Salva Corts Date: Tue, 26 Mar 2024 14:59:26 +0100 Subject: [PATCH 06/54] fix(blooms): Not filters should always match (#12358) --- pkg/storage/bloom/v1/bloom_tester.go | 23 ++++++++------- pkg/storage/bloom/v1/bloom_tester_test.go | 34 +++++++++++++---------- 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/pkg/storage/bloom/v1/bloom_tester.go b/pkg/storage/bloom/v1/bloom_tester.go index ab9cbcc64a653..99b76c3a1a0d6 100644 --- a/pkg/storage/bloom/v1/bloom_tester.go +++ b/pkg/storage/bloom/v1/bloom_tester.go @@ -90,13 +90,16 @@ func FiltersToBloomTest(b NGramBuilder, filters ...syntax.LineFilterExpr) BloomT func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest { switch filter.Ty { - case labels.MatchEqual, labels.MatchNotEqual: - var test BloomTest = newStringTest(b, filter.Match) - if filter.Ty == labels.MatchNotEqual { - test = newNotTest(test) - } - return test - case labels.MatchRegexp, labels.MatchNotRegexp: + case labels.MatchNotEqual, labels.MatchNotRegexp: + // We cannot test _negated_ filters with a bloom filter since blooms are probabilistic + // filters that can only tell us if a string _might_ exist. + // For example, for `!= "foo"`, the bloom filter might tell us that the string "foo" might exist + // but because we are not sure, we cannot discard that chunk because it might actually not be there. + // Therefore, we return a test that always returns true. + return MatchAll + case labels.MatchEqual: + return newStringTest(b, filter.Match) + case labels.MatchRegexp: reg, err := regexpsyntax.Parse(filter.Match, regexpsyntax.Perl) if err != nil { // TODO: log error @@ -111,11 +114,7 @@ func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest return MatchAll } - var test BloomTest = matcherFilterWrapper{filter: matcher} - if filter.Ty == labels.MatchNotRegexp { - test = newNotTest(test) - } - return test + return matcherFilterWrapper{filter: matcher} default: return MatchAll } diff --git a/pkg/storage/bloom/v1/bloom_tester_test.go b/pkg/storage/bloom/v1/bloom_tester_test.go index 46884140ad59e..991ac092dee39 100644 --- a/pkg/storage/bloom/v1/bloom_tester_test.go +++ b/pkg/storage/bloom/v1/bloom_tester_test.go @@ -47,16 +47,16 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: false, }, { - name: "notEq match", + name: "notEq doesnt exist", query: `{app="fake"} != "nope"`, bloom: fakeBloom{"foo", "bar"}, expectMatch: true, }, { - name: "notEq no match", + name: "notEq exists", query: `{app="fake"} != "foo"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "or filter both match", @@ -89,22 +89,22 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "Not or filter right no match", + name: "NotEq OR filter right exists", query: `{app="fake"} != "nope" or "bar"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { - name: "Not or filter left no match", + name: "Not OR filter left exists", query: `{app="fake"} != "foo" or "nope"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { - name: "Not or filter no match", + name: "NotEq OR filter both exists", query: `{app="fake"} != "foo" or "bar"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "complex filter match", @@ -125,10 +125,10 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "regex match none", + name: "regex match all notEq", query: `{app="fake"} !~ ".*"`, bloom: fakeBloom{"foo", "bar"}, - expectMatch: false, + expectMatch: true, // Still should match, }, { name: "regex match", @@ -138,10 +138,16 @@ func TestFiltersToBloomTests(t *testing.T) { }, { name: "regex no match", - query: `{app="fake"} !~ "nope|.*foo.*"`, + query: `{app="fake"} |~ ".*not.*"`, bloom: fakeBloom{"foo", "bar"}, expectMatch: false, }, + { + name: "regex notEq right exists", + query: `{app="fake"} !~ "nope|.*foo.*"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, // Still should match because it's NotEQ + }, { name: "complex regex match", query: `{app="fake"} |~ "(nope|.*not.*|.*foo.*)" or "(no|ba)" !~ "noz.*" or "(nope|not)"`, @@ -149,10 +155,10 @@ func TestFiltersToBloomTests(t *testing.T) { expectMatch: true, }, { - name: "complex regex no match", + name: "complex regex with notEq exists", query: `{app="fake"} |~ "(nope|.*not.*|.*foo.*)" or "(no|ba)" !~ "noz.*"`, bloom: fakeBloom{"foo", "bar", "baz", "fuzz", "noz"}, - expectMatch: false, + expectMatch: true, // Still should match because it's NotEQ }, { name: "line filter after line format", From 9af191f3d44ebd0723eec5ced19f397f5134d299 Mon Sep 17 00:00:00 2001 From: Christian Haudum Date: Tue, 26 Mar 2024 15:09:40 +0100 Subject: [PATCH 07/54] feat(bloom-gw): Add `metrics.go` style log line to bloom gateway `FilterChunks` call (#12354) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a "metrics.go" style log line to the bloom gateway that contains latencies for queueing, processing, post-processing, as well as number of chunks/series requested/filtered. This log line should give operators a better understanding where time is spent for individual requests. Example log line: ``` ts=2024-03-26T13:16:11.869619964Z caller=spanlogger.go:109 component=bloom-gateway tenant=XXX method=bloomgateway.FilterChunkRefs user=XXX traceID=6239d49e1e88f88645bd7f1f6f1b85c8 level=info status=success tasks=1 series_requested=35 series_filtered=31 chunks_requested=103 chunks_filtered=93 queue_time=4.108128936s metas_fetch_time=22.040408ms blocks_fetch_time=1.284575ms processing_time=30.215863002s post_processing_time=16.084µs ``` Signed-off-by: Christian Haudum --- pkg/bloomgateway/bloomgateway.go | 46 +++++++++------- pkg/bloomgateway/processor.go | 47 +++++++++++------ pkg/bloomgateway/stats.go | 90 ++++++++++++++++++++++++++++++++ pkg/bloomgateway/worker.go | 1 + 4 files changed, 149 insertions(+), 35 deletions(-) create mode 100644 pkg/bloomgateway/stats.go diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 3b556b7dd7f99..0bd8bf2895f05 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -56,7 +56,6 @@ import ( "go.uber.org/atomic" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/queue" v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" @@ -206,10 +205,16 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk log.With(g.logger, "tenant", tenantID), "bloomgateway.FilterChunkRefs", ) - defer sp.Finish() + + stats, ctx := ContextWithEmptyStats(ctx) + defer func() { + level.Info(sp).Log(stats.KVArgs()...) + sp.Finish() + }() // start time == end time --> empty response if req.From.Equal(req.Through) { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: []*logproto.GroupedChunkRefs{}, }, nil @@ -217,23 +222,28 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // start time > end time --> error response if req.Through.Before(req.From) { + stats.Status = labelFailure return nil, errors.New("from time must not be after through time") } filters := v1.ExtractTestableLineFilters(req.Plan.AST) + stats.NumFilters = len(filters) g.metrics.receivedFilters.Observe(float64(len(filters))) // Shortcut if request does not contain filters if len(filters) == 0 { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: req.Refs, }, nil } seriesByDay := partitionRequest(req) + stats.NumTasks = len(seriesByDay) // no tasks --> empty response if len(seriesByDay) == 0 { + stats.Status = labelSuccess return &logproto.FilterChunkRefResponse{ ChunkRefs: []*logproto.GroupedChunkRefs{}, }, nil @@ -255,15 +265,6 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // TODO(owen-d): include capacity in constructor? task.responses = responsesPool.Get(len(seriesForDay.series)) - - level.Debug(sp).Log( - "msg", "created task for day", - "task", task.ID, - "day", seriesForDay.day, - "interval", seriesForDay.interval.String(), - "nSeries", len(seriesForDay.series), - "filters", JoinFunc(filters, ";", func(e syntax.LineFilterExpr) string { return e.String() }), - ) tasks = append(tasks, task) } @@ -285,13 +286,14 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk // When enqueuing, we also add the task to the pending tasks _ = g.pendingTasks.Inc() }); err != nil { + stats.Status = labelFailure return nil, errors.Wrap(err, "failed to enqueue task") } // TODO(owen-d): use `concurrency` lib, bound parallelism go g.consumeTask(ctx, task, tasksCh) } - sp.Log("enqueue_duration", time.Since(queueStart).String()) + sp.Log("msg", "enqueued tasks", "duration", time.Since(queueStart).String()) remaining := len(tasks) @@ -305,10 +307,12 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk for remaining > 0 { select { case <-ctx.Done(): + stats.Status = "cancel" return nil, errors.Wrap(ctx.Err(), "request failed") case task := <-tasksCh: level.Info(sp).Log("msg", "task done", "task", task.ID, "err", task.Err()) if task.Err() != nil { + stats.Status = labelFailure return nil, errors.Wrap(task.Err(), "request failed") } responses = append(responses, task.responses) @@ -318,7 +322,10 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk sp.Log("msg", "received all responses") + start := time.Now() filtered := filterChunkRefs(req, responses) + duration := time.Since(start) + stats.AddPostProcessingTime(duration) // free up the responses for _, resp := range responses { @@ -335,13 +342,14 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk g.metrics.requestedChunks.Observe(float64(preFilterChunks)) g.metrics.filteredChunks.Observe(float64(preFilterChunks - postFilterChunks)) - level.Info(sp).Log( - "msg", "return filtered chunk refs", - "requested_series", preFilterSeries, - "filtered_series", preFilterSeries-postFilterSeries, - "requested_chunks", preFilterChunks, - "filtered_chunks", preFilterChunks-postFilterChunks, - ) + stats.Status = "success" + stats.SeriesRequested = preFilterSeries + stats.SeriesFiltered = preFilterSeries - postFilterSeries + stats.ChunksRequested = preFilterChunks + stats.ChunksFiltered = preFilterChunks - postFilterChunks + + sp.Log("msg", "return filtered chunk refs") + return &logproto.FilterChunkRefResponse{ChunkRefs: filtered}, nil } diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index dc58d0a0664ca..74cb19d06911b 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -45,7 +45,7 @@ func (p *processor) runWithBounds(ctx context.Context, tasks []Task, bounds v1.M "msg", "process tasks with bounds", "tenant", tenant, "tasks", len(tasks), - "bounds", JoinFunc(bounds, ",", func(e v1.FingerprintBounds) string { return e.String() }), + "bounds", len(bounds), ) for ts, tasks := range group(tasks, func(t Task) config.DayTime { return t.table }) { @@ -73,23 +73,30 @@ func (p *processor) processTasks(ctx context.Context, tenant string, day config. Interval: interval, Keyspace: v1.NewBounds(minFpRange.Min, maxFpRange.Max), } + + start := time.Now() metas, err := p.store.FetchMetas(ctx, metaSearch) + duration := time.Since(start) + level.Debug(p.logger).Log("msg", "fetched metas", "count", len(metas), "duration", duration, "err", err) + + for _, t := range tasks { + FromContext(t.ctx).AddMetasFetchTime(duration) + } + if err != nil { return err } blocksRefs := bloomshipper.BlocksForMetas(metas, interval, keyspaces) - level.Info(p.logger).Log("msg", "blocks for metas", "num_metas", len(metas), "num_blocks", len(blocksRefs)) - return p.processBlocks(ctx, partitionTasks(tasks, blocksRefs)) -} -func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) error { + data := partitionTasks(tasks, blocksRefs) + refs := make([]bloomshipper.BlockRef, 0, len(data)) for _, block := range data { refs = append(refs, block.ref) } - start := time.Now() + start = time.Now() bqs, err := p.store.FetchBlocks( ctx, refs, @@ -101,12 +108,21 @@ func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) er // the underlying bloom []byte outside of iteration bloomshipper.WithPool(true), ) - level.Debug(p.logger).Log("msg", "fetch blocks", "count", len(bqs), "duration", time.Since(start), "err", err) + duration = time.Since(start) + level.Debug(p.logger).Log("msg", "fetched blocks", "count", len(refs), "duration", duration, "err", err) + + for _, t := range tasks { + FromContext(t.ctx).AddBlocksFetchTime(duration) + } if err != nil { return err } + return p.processBlocks(ctx, bqs, data) +} + +func (p *processor) processBlocks(ctx context.Context, bqs []*bloomshipper.CloseableBlockQuerier, data []blockWithTasks) error { defer func() { for i := range bqs { if bqs[i] == nil { @@ -124,13 +140,6 @@ func (p *processor) processBlocks(ctx context.Context, data []blockWithTasks) er } block := data[i] - level.Debug(p.logger).Log( - "msg", "process block with tasks", - "job", i+1, - "of_jobs", len(bqs), - "block", block.ref, - "num_tasks", len(block.tasks), - ) if !block.ref.Bounds.Equal(bq.Bounds) { return errors.Errorf("block and querier bounds differ: %s vs %s", block.ref.Bounds, bq.Bounds) @@ -178,10 +187,16 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie start := time.Now() err = fq.Run() + duration := time.Since(start) + if err != nil { - p.metrics.blockQueryLatency.WithLabelValues(p.id, labelFailure).Observe(time.Since(start).Seconds()) + p.metrics.blockQueryLatency.WithLabelValues(p.id, labelFailure).Observe(duration.Seconds()) } else { - p.metrics.blockQueryLatency.WithLabelValues(p.id, labelSuccess).Observe(time.Since(start).Seconds()) + p.metrics.blockQueryLatency.WithLabelValues(p.id, labelSuccess).Observe(duration.Seconds()) + } + + for _, task := range tasks { + FromContext(task.ctx).AddProcessingTime(duration) } return err diff --git a/pkg/bloomgateway/stats.go b/pkg/bloomgateway/stats.go new file mode 100644 index 0000000000000..308657a944214 --- /dev/null +++ b/pkg/bloomgateway/stats.go @@ -0,0 +1,90 @@ +package bloomgateway + +import ( + "context" + "time" + + "go.uber.org/atomic" +) + +type Stats struct { + Status string + NumTasks, NumFilters int + ChunksRequested, ChunksFiltered, SeriesRequested, SeriesFiltered int + QueueTime, MetasFetchTime, BlocksFetchTime, ProcessingTime, PostProcessingTime atomic.Duration +} + +type statsKey int + +var ctxKey = statsKey(0) + +// ContextWithEmptyStats returns a context with empty stats. +func ContextWithEmptyStats(ctx context.Context) (*Stats, context.Context) { + stats := &Stats{Status: "unknown"} + ctx = context.WithValue(ctx, ctxKey, stats) + return stats, ctx +} + +// FromContext gets the Stats out of the Context. Returns nil if stats have not +// been initialised in the context. +func FromContext(ctx context.Context) *Stats { + o := ctx.Value(ctxKey) + if o == nil { + return nil + } + return o.(*Stats) +} + +func (s *Stats) KVArgs() []any { + if s == nil { + return []any{} + } + return []any{ + "status", s.Status, + "tasks", s.NumTasks, + "series_requested", s.SeriesRequested, + "series_filtered", s.SeriesFiltered, + "chunks_requested", s.ChunksRequested, + "chunks_filtered", s.ChunksFiltered, + "queue_time", s.QueueTime.Load(), + "metas_fetch_time", s.MetasFetchTime.Load(), + "blocks_fetch_time", s.BlocksFetchTime.Load(), + "processing_time", s.ProcessingTime.Load(), + "post_processing_time", s.PostProcessingTime.Load(), + } +} + +func (s *Stats) AddQueueTime(t time.Duration) { + if s == nil { + return + } + s.QueueTime.Add(t) +} + +func (s *Stats) AddMetasFetchTime(t time.Duration) { + if s == nil { + return + } + s.MetasFetchTime.Add(t) +} + +func (s *Stats) AddBlocksFetchTime(t time.Duration) { + if s == nil { + return + } + s.BlocksFetchTime.Add(t) +} + +func (s *Stats) AddProcessingTime(t time.Duration) { + if s == nil { + return + } + s.ProcessingTime.Add(t) +} + +func (s *Stats) AddPostProcessingTime(t time.Duration) { + if s == nil { + return + } + s.PostProcessingTime.Add(t) +} diff --git a/pkg/bloomgateway/worker.go b/pkg/bloomgateway/worker.go index 52de8155d7783..eadbd2fa33c91 100644 --- a/pkg/bloomgateway/worker.go +++ b/pkg/bloomgateway/worker.go @@ -103,6 +103,7 @@ func (w *worker) running(_ context.Context) error { level.Debug(w.logger).Log("msg", "dequeued task", "task", task.ID) _ = w.pending.Dec() w.metrics.queueDuration.WithLabelValues(w.id).Observe(time.Since(task.enqueueTime).Seconds()) + FromContext(task.ctx).AddQueueTime(time.Since(task.enqueueTime)) tasks = append(tasks, task) first, last := getFirstLast(task.series) From c0c7a194be31e6096cc9c992f4bfdb0216d5813c Mon Sep 17 00:00:00 2001 From: J Stickler Date: Tue, 26 Mar 2024 11:30:00 -0400 Subject: [PATCH 08/54] docs: Update release notes for recent 2.9 releases (#12349) --- docs/sources/release-notes/v2-9.md | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/docs/sources/release-notes/v2-9.md b/docs/sources/release-notes/v2-9.md index 68d3da85bc4dd..cae79d6344d4b 100644 --- a/docs/sources/release-notes/v2-9.md +++ b/docs/sources/release-notes/v2-9.md @@ -32,14 +32,38 @@ Grafana Labs is excited to announce the release of Loki 2.9.0 Here's a summary o - The `-ingester.unordered-writes` CLI flag is deprecated and will always default to `true` in the next major release. - For the full list of deprecations, see CHANGELOG.md - ## Bug fixes +### 2.9.5 (2024-02-28) + +* Bump base images and Go dependencies to address CVEs ([#12092](https://github.com/grafana/loki/issues/12092)) ([eee3598](https://github.com/grafana/loki/commit/eee35983f38fe04543b169ffa8ece76c23c4217b)). + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + +### 2.9.4 (2024-01-24) + +- Fixed a couple of data races that can cause panics due to concurrent read-write access of tenant configs. +- Fixed a bug in the log results cache. +- Fixed the cache to atomically check background cache size limit correctly. +- Fixed the discrepancy between the semantics of logs and metrics queries. +- Fixed promtail default scrape config causing CPU and memory load. +- Update golang.org/x/crypto to v0.18.0. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + +### 2.9.3 (2023-12-11) + +* Upgrade otelhttp from 0.40.0 -> 0.44.0 and base alpine image from 3.18.3 -> 3.18.5 to fix a few CVES (CVE-2023-45142, CVE-2022-21698, CVE-2023-5363). +* Fix querying ingester for label values with a matcher (previously didn't respect the matcher). +* Ensure all lifecycler cfgs ref a valid IPv6 addr and port combination. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + ### 2.9.2 (2023-10-16) * Upgrade go to v1.21.3, golang.org/x/net to v0.17.0 and grpc-go to v1.56.3 to patch CVE-2023-39325 / CVE-2023-44487 -For a full list of all changes and fixes, look at the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). ### 2.9.1 (2023-09-14) From d3266a185c1bf2f7b5aceb9ef8d123f5c15eb5e7 Mon Sep 17 00:00:00 2001 From: Pangidoan Butar <38452094+doanbutar@users.noreply.github.com> Date: Wed, 27 Mar 2024 01:53:05 +0800 Subject: [PATCH 09/54] docs: Update structured_metadata.md (#12355) Co-authored-by: J Stickler --- docs/sources/send-data/promtail/stages/structured_metadata.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/sources/send-data/promtail/stages/structured_metadata.md b/docs/sources/send-data/promtail/stages/structured_metadata.md index 70e671bb8ae17..7337bef023b9a 100644 --- a/docs/sources/send-data/promtail/stages/structured_metadata.md +++ b/docs/sources/send-data/promtail/stages/structured_metadata.md @@ -14,6 +14,8 @@ modifies the [structured metadata]({{< relref "../../../get-started/labels/struc {{% admonition type="warning" %}} Structured metadata will be rejected by Loki unless you enable the `allow_structured_metadata` per tenant configuration (in the `limits_config`). + +Structured metadata was added to chunk format V4 which is used if the schema version is greater or equal to **13**. (See Schema Config for more details about schema versions. ) {{% /admonition %}} ## Schema @@ -47,7 +49,7 @@ For the given pipeline: Given the following log line: ```json -{"log":"log message\n","stream":"stderr","traceID":"0242ac120002",time":"2019-04-30T02:12:41.8443515Z"} +{"log":"log message\n","stream":"stderr","traceID":"0242ac120002","time":"2019-04-30T02:12:41.8443515Z"} ``` The first stage would extract `stream` with a value of `stderr` and `traceID` with a value of `0242ac120002` into From 30ac88bc26667043de283fb6c719c7f871968651 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Tue, 26 Mar 2024 11:00:47 -0700 Subject: [PATCH 10/54] chore(blooms): increase blockpool by factor-of-2 (#12363) --- pkg/storage/bloom/v1/util.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/storage/bloom/v1/util.go b/pkg/storage/bloom/v1/util.go index 6b83e298a3957..06cf1f6add227 100644 --- a/pkg/storage/bloom/v1/util.go +++ b/pkg/storage/bloom/v1/util.go @@ -35,7 +35,7 @@ var ( // 4KB -> 128MB BlockPool = BytePool{ pool: pool.New( - 4<<10, 128<<20, 4, + 4<<10, 128<<20, 2, func(size int) interface{} { return make([]byte, size) }), From 6b3bbb3851a881bdc103bdf0386cbae41e865a25 Mon Sep 17 00:00:00 2001 From: J Stickler Date: Tue, 26 Mar 2024 14:22:48 -0400 Subject: [PATCH 11/54] docs: Update release notes for 2.9.6 (#12365) --- docs/sources/release-notes/v2-9.md | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/docs/sources/release-notes/v2-9.md b/docs/sources/release-notes/v2-9.md index cae79d6344d4b..4140643316297 100644 --- a/docs/sources/release-notes/v2-9.md +++ b/docs/sources/release-notes/v2-9.md @@ -34,9 +34,17 @@ Grafana Labs is excited to announce the release of Loki 2.9.0 Here's a summary o ## Bug fixes +### 2.9.6 (2024-03-21) + +* Fixed Promtail failures connecting to local Loki installation ([#12184](https://github.com/grafana/loki/issues/12184)) ([8585e35](https://github.com/grafana/loki/commit/8585e3537375c0deb11462d7256f5da23228f5e1)). +* Fixed an issue when using IPv6 where IPv6 addresses were not properly joined with ports. Use `net.JoinHostPort` to support IPv6 addresses. ([#10650](https://github.com/grafana/loki/issues/10650)) ([#11870](https://github.com/grafana/loki/issues/11870)) ([7def3b4](https://github.com/grafana/loki/commit/7def3b4e774252e13ba154ca13f72816a84da7dd)). +* Updated google.golang.org/protobuf to v1.33.0 ([#12269](https://github.com/grafana/loki/issues/12269)) ([#12287](https://github.com/grafana/loki/issues/12287)) ([3186520](https://github.com/grafana/loki/commit/318652035059fdaa40405f263fc9e37b4d38b157)). + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). + ### 2.9.5 (2024-02-28) -* Bump base images and Go dependencies to address CVEs ([#12092](https://github.com/grafana/loki/issues/12092)) ([eee3598](https://github.com/grafana/loki/commit/eee35983f38fe04543b169ffa8ece76c23c4217b)). +* Bumped base images and Go dependencies to address CVEs ([#12092](https://github.com/grafana/loki/issues/12092)) ([eee3598](https://github.com/grafana/loki/commit/eee35983f38fe04543b169ffa8ece76c23c4217b)). For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). @@ -47,25 +55,27 @@ For a full list of all changes and fixes, refer to the [CHANGELOG](https://githu - Fixed the cache to atomically check background cache size limit correctly. - Fixed the discrepancy between the semantics of logs and metrics queries. - Fixed promtail default scrape config causing CPU and memory load. -- Update golang.org/x/crypto to v0.18.0. +- Updated golang.org/x/crypto to v0.18.0. For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). ### 2.9.3 (2023-12-11) -* Upgrade otelhttp from 0.40.0 -> 0.44.0 and base alpine image from 3.18.3 -> 3.18.5 to fix a few CVES (CVE-2023-45142, CVE-2022-21698, CVE-2023-5363). -* Fix querying ingester for label values with a matcher (previously didn't respect the matcher). -* Ensure all lifecycler cfgs ref a valid IPv6 addr and port combination. +* Upgraded otelhttp from 0.40.0 -> 0.44.0 and base alpine image from 3.18.3 -> 3.18.5 to fix a few CVES (CVE-2023-45142, CVE-2022-21698, CVE-2023-5363). +* Fixed querying ingester for label values with a matcher (previously didn't respect the matcher). +* Ensured all lifecycler cfgs ref a valid IPv6 addr and port combination. For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). ### 2.9.2 (2023-10-16) -* Upgrade go to v1.21.3, golang.org/x/net to v0.17.0 and grpc-go to v1.56.3 to patch CVE-2023-39325 / CVE-2023-44487 +* Upgraded go to v1.21.3, golang.org/x/net to v0.17.0 and grpc-go to v1.56.3 to patch CVE-2023-39325 / CVE-2023-44487 For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). ### 2.9.1 (2023-09-14) -* Update Docker base images to mitigate security vulnerability CVE-2022-48174 -* Fix bugs in indexshipper (`tsdb`, `boltdb-shipper`) that could result in not showing all ingested logs in query results. +* Updated Docker base images to mitigate security vulnerability CVE-2022-48174 +* Fixed bugs in indexshipper (`tsdb`, `boltdb-shipper`) that could result in not showing all ingested logs in query results. + +For a full list of all changes and fixes, refer to the [CHANGELOG](https://github.com/grafana/loki/blob/release-2.9.x/CHANGELOG.md). \ No newline at end of file From 9810e8ec63d490cd62f3d9bfceea768d83bd48e5 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Tue, 26 Mar 2024 11:57:27 -0700 Subject: [PATCH 12/54] chore(blooms): computed fields for bloomgw stats logging (#12367) --- pkg/bloomgateway/stats.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pkg/bloomgateway/stats.go b/pkg/bloomgateway/stats.go index 308657a944214..bc7373d4d653a 100644 --- a/pkg/bloomgateway/stats.go +++ b/pkg/bloomgateway/stats.go @@ -35,10 +35,23 @@ func FromContext(ctx context.Context) *Stats { return o.(*Stats) } +// aggregates the total duration +func (s *Stats) Duration() (dur time.Duration) { + dur += s.QueueTime.Load() + dur += s.MetasFetchTime.Load() + dur += s.BlocksFetchTime.Load() + dur += s.ProcessingTime.Load() + dur += s.PostProcessingTime.Load() + return +} + func (s *Stats) KVArgs() []any { if s == nil { return []any{} } + chunksRemaining := s.ChunksRequested - s.ChunksFiltered + filterRatio := float64(s.ChunksFiltered) / float64(max(s.ChunksRequested, 1)) + return []any{ "status", s.Status, "tasks", s.NumTasks, @@ -46,11 +59,14 @@ func (s *Stats) KVArgs() []any { "series_filtered", s.SeriesFiltered, "chunks_requested", s.ChunksRequested, "chunks_filtered", s.ChunksFiltered, + "chunks_remaining", chunksRemaining, + "filter_ratio", filterRatio, "queue_time", s.QueueTime.Load(), "metas_fetch_time", s.MetasFetchTime.Load(), "blocks_fetch_time", s.BlocksFetchTime.Load(), "processing_time", s.ProcessingTime.Load(), "post_processing_time", s.PostProcessingTime.Load(), + "duration", s.Duration(), } } From 19c046f14b9f2ffd985aacb6ab197ab90314a038 Mon Sep 17 00:00:00 2001 From: Christian Haudum Date: Tue, 26 Mar 2024 23:07:02 +0100 Subject: [PATCH 13/54] chore(blooms): Make max bloom page size for querying configurable (#12337) Signed-off-by: Christian Haudum --- docs/sources/configure/_index.md | 5 +++++ pkg/bloomcompactor/spec_test.go | 4 ++-- pkg/bloomgateway/util_test.go | 2 +- pkg/storage/bloom/v1/block.go | 4 ++-- pkg/storage/bloom/v1/bloom.go | 11 +++++------ pkg/storage/bloom/v1/bloom_querier.go | 7 +++++-- pkg/storage/bloom/v1/builder_test.go | 12 ++++++------ pkg/storage/bloom/v1/fuse_test.go | 4 ++-- pkg/storage/stores/shipper/bloomshipper/cache.go | 4 +++- .../stores/shipper/bloomshipper/config/config.go | 3 +++ pkg/storage/stores/shipper/bloomshipper/fetcher.go | 2 ++ pkg/storage/stores/shipper/bloomshipper/store.go | 10 ++++++---- tools/bloom/inspector/main.go | 2 +- 13 files changed, 43 insertions(+), 27 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index d5411f8b1c9d0..c1ad2e29eff94 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2362,6 +2362,11 @@ bloom_shipper: # CLI flag: -bloom.shipper.working-directory [working_directory: | default = "bloom-shipper"] + # Maximum size of bloom pages that should be queried. Larger pages than this + # limit are skipped when querying blooms to limit memory usage. + # CLI flag: -bloom.max-query-page-size + [max_query_page_size: | default = 64MiB] + blocks_downloading_queue: # The count of parallel workers that download Bloom Blocks. # CLI flag: -bloom.shipper.blocks-downloading-queue.workers-count diff --git a/pkg/bloomcompactor/spec_test.go b/pkg/bloomcompactor/spec_test.go index fe1b2a09b5a07..e9a403ac6929f 100644 --- a/pkg/bloomcompactor/spec_test.go +++ b/pkg/bloomcompactor/spec_test.go @@ -74,7 +74,7 @@ func dummyBloomGen(t *testing.T, opts v1.BlockOptions, store v1.Iterator[*v1.Ser for i, b := range blocks { bqs = append(bqs, &bloomshipper.CloseableBlockQuerier{ BlockRef: refs[i], - BlockQuerier: v1.NewBlockQuerier(b, false), + BlockQuerier: v1.NewBlockQuerier(b, false, v1.DefaultMaxPageSize), }) } @@ -152,7 +152,7 @@ func TestSimpleBloomGenerator(t *testing.T) { expectedRefs := v1.PointerSlice(data) outputRefs := make([]*v1.SeriesWithBloom, 0, len(data)) for _, block := range outputBlocks { - bq := v1.NewBlockQuerier(block, false) + bq := v1.NewBlockQuerier(block, false, v1.DefaultMaxPageSize) for bq.Next() { outputRefs = append(outputRefs, bq.At()) } diff --git a/pkg/bloomgateway/util_test.go b/pkg/bloomgateway/util_test.go index 48d0f99f9004d..9bd158219e134 100644 --- a/pkg/bloomgateway/util_test.go +++ b/pkg/bloomgateway/util_test.go @@ -334,7 +334,7 @@ func createBlocks(t *testing.T, tenant string, n int, from, through model.Time, // } // } querier := &bloomshipper.CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(block, false), + BlockQuerier: v1.NewBlockQuerier(block, false, v1.DefaultMaxPageSize), BlockRef: blockRef, } queriers = append(queriers, querier) diff --git a/pkg/storage/bloom/v1/block.go b/pkg/storage/bloom/v1/block.go index b1e534df731bd..91ba171b272c0 100644 --- a/pkg/storage/bloom/v1/block.go +++ b/pkg/storage/bloom/v1/block.go @@ -117,11 +117,11 @@ type BlockQuerier struct { // will be returned to the pool for efficiency. This can only safely be used // when the underlying bloom bytes don't escape the decoder, i.e. // when loading blooms for querying (bloom-gw) but not for writing (bloom-compactor). -func NewBlockQuerier(b *Block, noCapture bool) *BlockQuerier { +func NewBlockQuerier(b *Block, noCapture bool, maxPageSize int) *BlockQuerier { return &BlockQuerier{ block: b, series: NewLazySeriesIter(b), - blooms: NewLazyBloomIter(b, noCapture), + blooms: NewLazyBloomIter(b, noCapture, maxPageSize), } } diff --git a/pkg/storage/bloom/v1/bloom.go b/pkg/storage/bloom/v1/bloom.go index d740c170fcc92..058ac68818d5f 100644 --- a/pkg/storage/bloom/v1/bloom.go +++ b/pkg/storage/bloom/v1/bloom.go @@ -15,10 +15,9 @@ import ( // NB(chaudum): Some block pages are way bigger than others (400MiB and // bigger), and loading multiple pages into memory in parallel can cause the // gateways to OOM. -// Figure out a decent maximum page size that we can process. -// TODO(chaudum): Make max page size configurable -var maxPageSize = 64 << 20 // 64MB -var ErrPageTooLarge = errors.Errorf("bloom page too large: size limit is %.1fMiB", float64(maxPageSize)/float64(1<<20)) +// Figure out a decent default maximum page size that we can process. +var DefaultMaxPageSize = 64 << 20 // 64MB +var ErrPageTooLarge = errors.Errorf("bloom page too large") type Bloom struct { filter.ScalableBloomFilter @@ -276,7 +275,7 @@ func (b *BloomBlock) DecodeHeaders(r io.ReadSeeker) (uint32, error) { return checksum, nil } -func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, metrics *Metrics) (res *BloomPageDecoder, err error) { +func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, maxPageSize int, metrics *Metrics) (res *BloomPageDecoder, err error) { if pageIdx < 0 || pageIdx >= len(b.pageHeaders) { metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Inc() metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonOOB).Add(float64(b.pageHeaders[pageIdx].DecompressedLen)) @@ -292,7 +291,7 @@ func (b *BloomBlock) BloomPageDecoder(r io.ReadSeeker, pageIdx int, metrics *Met return nil, ErrPageTooLarge } - if _, err := r.Seek(int64(page.Offset), io.SeekStart); err != nil { + if _, err = r.Seek(int64(page.Offset), io.SeekStart); err != nil { metrics.pagesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Inc() metrics.bytesSkipped.WithLabelValues(pageTypeBloom, skipReasonErr).Add(float64(page.DecompressedLen)) return nil, errors.Wrap(err, "seeking to bloom page") diff --git a/pkg/storage/bloom/v1/bloom_querier.go b/pkg/storage/bloom/v1/bloom_querier.go index 535e2a379250b..01c0216c36f0a 100644 --- a/pkg/storage/bloom/v1/bloom_querier.go +++ b/pkg/storage/bloom/v1/bloom_querier.go @@ -10,6 +10,7 @@ type LazyBloomIter struct { usePool bool b *Block + m int // max page size in bytes // state initialized bool @@ -23,10 +24,11 @@ type LazyBloomIter struct { // will be returned to the pool for efficiency. // This can only safely be used when the underlying bloom // bytes don't escape the decoder. -func NewLazyBloomIter(b *Block, pool bool) *LazyBloomIter { +func NewLazyBloomIter(b *Block, pool bool, maxSize int) *LazyBloomIter { return &LazyBloomIter{ usePool: pool, b: b, + m: maxSize, } } @@ -58,7 +60,7 @@ func (it *LazyBloomIter) Seek(offset BloomOffset) { it.err = errors.Wrap(err, "getting blooms reader") return } - decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page, it.b.metrics) + decoder, err := it.b.blooms.BloomPageDecoder(r, offset.Page, it.m, it.b.metrics) if err != nil { it.err = errors.Wrap(err, "loading bloom page") return @@ -97,6 +99,7 @@ func (it *LazyBloomIter) next() bool { it.curPage, err = it.b.blooms.BloomPageDecoder( r, it.curPageIndex, + it.m, it.b.metrics, ) if err != nil { diff --git a/pkg/storage/bloom/v1/builder_test.go b/pkg/storage/bloom/v1/builder_test.go index 540d0a768ca7e..481c8ec9f915e 100644 --- a/pkg/storage/bloom/v1/builder_test.go +++ b/pkg/storage/bloom/v1/builder_test.go @@ -117,7 +117,7 @@ func TestBlockBuilder_RoundTrip(t *testing.T) { } block := NewBlock(tc.reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, false) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) err = block.LoadHeaders() require.Nil(t, err) @@ -218,7 +218,7 @@ func TestMergeBuilder(t *testing.T) { itr := NewSliceIter[SeriesWithBloom](data[min:max]) _, err = builder.BuildFrom(itr) require.Nil(t, err) - blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false))) + blocks = append(blocks, NewPeekingIter[*SeriesWithBloom](NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false, DefaultMaxPageSize))) } // We're not testing the ability to extend a bloom in this test @@ -252,7 +252,7 @@ func TestMergeBuilder(t *testing.T) { require.Nil(t, err) block := NewBlock(reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, false) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) EqualIterators[*SeriesWithBloom]( t, @@ -296,7 +296,7 @@ func TestBlockReset(t *testing.T) { _, err = builder.BuildFrom(itr) require.Nil(t, err) block := NewBlock(reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, false) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) rounds := make([][]model.Fingerprint, 2) @@ -362,7 +362,7 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { _, err = builder.BuildFrom(itr) require.Nil(t, err) block := NewBlock(reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, false) + querier := NewBlockQuerier(block, false, DefaultMaxPageSize) // rather than use the block querier directly, collect it's data // so we can use it in a few places later @@ -423,7 +423,7 @@ func TestMergeBuilder_Roundtrip(t *testing.T) { // ensure the new block contains one copy of all the data // by comparing it against an iterator over the source data - mergedBlockQuerier := NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false) + mergedBlockQuerier := NewBlockQuerier(NewBlock(reader, NewMetrics(nil)), false, DefaultMaxPageSize) sourceItr := NewSliceIter[*SeriesWithBloom](PointerSlice[SeriesWithBloom](xs)) EqualIterators[*SeriesWithBloom]( diff --git a/pkg/storage/bloom/v1/fuse_test.go b/pkg/storage/bloom/v1/fuse_test.go index 7ca7267b6ecc7..5c9f2f06f0478 100644 --- a/pkg/storage/bloom/v1/fuse_test.go +++ b/pkg/storage/bloom/v1/fuse_test.go @@ -49,7 +49,7 @@ func TestFusedQuerier(t *testing.T) { require.NoError(t, err) require.False(t, itr.Next()) block := NewBlock(reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, true) + querier := NewBlockQuerier(block, true, DefaultMaxPageSize) n := 2 nReqs := numSeries / n @@ -143,7 +143,7 @@ func setupBlockForBenchmark(b *testing.B) (*BlockQuerier, [][]Request, []chan Ou _, err = builder.BuildFrom(itr) require.Nil(b, err) block := NewBlock(reader, NewMetrics(nil)) - querier := NewBlockQuerier(block, true) + querier := NewBlockQuerier(block, true, DefaultMaxPageSize) numRequestChains := 100 seriesPerRequest := 100 diff --git a/pkg/storage/stores/shipper/bloomshipper/cache.go b/pkg/storage/stores/shipper/bloomshipper/cache.go index ebd2d6a53ff8b..3097822fccf75 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache.go @@ -120,13 +120,15 @@ func (b *BlockDirectory) resolveSize() error { // BlockQuerier returns a new block querier from the directory. // The passed function `close` is called when the the returned querier is closed. + func (b BlockDirectory) BlockQuerier( usePool bool, close func() error, + maxPageSize int, metrics *v1.Metrics, ) *CloseableBlockQuerier { return &CloseableBlockQuerier{ - BlockQuerier: v1.NewBlockQuerier(b.Block(metrics), usePool), + BlockQuerier: v1.NewBlockQuerier(b.Block(metrics), usePool, maxPageSize), BlockRef: b.BlockRef, close: close, } diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index a37a3028e66e8..eda55e8fbbeb3 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -14,6 +14,7 @@ import ( type Config struct { WorkingDirectory string `yaml:"working_directory"` + MaxQueryPageSize flagext.Bytes `yaml:"max_query_page_size"` BlocksDownloadingQueue DownloadingQueueConfig `yaml:"blocks_downloading_queue"` BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` MetasCache cache.Config `yaml:"metas_cache"` @@ -31,6 +32,8 @@ func (cfg *DownloadingQueueConfig) RegisterFlagsWithPrefix(prefix string, f *fla func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { f.StringVar(&c.WorkingDirectory, prefix+"shipper.working-directory", "bloom-shipper", "Working directory to store downloaded Bloom Blocks.") + _ = c.MaxQueryPageSize.Set("64MiB") // default should match the one set in pkg/storage/bloom/v1/bloom.go + f.Var(&c.MaxQueryPageSize, prefix+"max-query-page-size", "Maximum size of bloom pages that should be queried. Larger pages than this limit are skipped when querying blooms to limit memory usage.") c.BlocksDownloadingQueue.RegisterFlagsWithPrefix(prefix+"shipper.blocks-downloading-queue.", f) c.BlocksCache.RegisterFlagsWithPrefixAndDefaults(prefix+"blocks-cache.", "Cache for bloom blocks. ", f, 24*time.Hour) c.MetasCache.RegisterFlagsWithPrefix(prefix+"metas-cache.", "Cache for bloom metas. ", f) diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index f2e40534a0957..74fb9a177a667 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -243,6 +243,7 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc func() error { return f.blocksCache.Release(ctx, key) }, + f.cfg.maxBloomPageSize, f.bloomMetrics, ) } @@ -277,6 +278,7 @@ func (f *Fetcher) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...Fetc func() error { return f.blocksCache.Release(ctx, key) }, + f.cfg.maxBloomPageSize, f.bloomMetrics, ) } diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index c8a162dbd66b9..8daa94bddf00d 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -41,8 +41,9 @@ type StoreWithMetrics interface { } type bloomStoreConfig struct { - workingDir string - numWorkers int + workingDir string + numWorkers int + maxBloomPageSize int } // Compiler check to ensure bloomStoreEntry implements the Store interface @@ -192,8 +193,9 @@ func NewBloomStore( // TODO(chaudum): Remove wrapper cfg := bloomStoreConfig{ - workingDir: storageConfig.BloomShipperConfig.WorkingDirectory, - numWorkers: storageConfig.BloomShipperConfig.BlocksDownloadingQueue.WorkersCount, + workingDir: storageConfig.BloomShipperConfig.WorkingDirectory, + numWorkers: storageConfig.BloomShipperConfig.BlocksDownloadingQueue.WorkersCount, + maxBloomPageSize: int(storageConfig.BloomShipperConfig.MaxQueryPageSize), } if err := util.EnsureDirectory(cfg.workingDir); err != nil { diff --git a/tools/bloom/inspector/main.go b/tools/bloom/inspector/main.go index d7f1a7c89bf32..36d1523714292 100644 --- a/tools/bloom/inspector/main.go +++ b/tools/bloom/inspector/main.go @@ -18,7 +18,7 @@ func main() { r := v1.NewDirectoryBlockReader(path) b := v1.NewBlock(r, v1.NewMetrics(nil)) - q := v1.NewBlockQuerier(b, true) + q := v1.NewBlockQuerier(b, true, v1.DefaultMaxPageSize) md, err := q.Metadata() if err != nil { From 120b76d63e7fd26ea67dfb9713816d594a78e4c4 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Wed, 27 Mar 2024 00:16:07 -0700 Subject: [PATCH 14/54] chore(blooms): misc fixes (#12369) unbounds bloom-gw-client concurrency removes unused log_gateway_requests param adds note wrt bloom-gw results cache keygen issue add msg field to stats log sets bloomshipper download workers count to 16 default --- docs/sources/configure/_index.md | 7 +------ pkg/bloomgateway/cache.go | 1 + pkg/bloomgateway/client.go | 10 +--------- pkg/bloomgateway/stats.go | 1 + .../stores/shipper/bloomshipper/config/config.go | 2 +- 5 files changed, 5 insertions(+), 16 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index c1ad2e29eff94..158bca6c00585 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -1915,11 +1915,6 @@ client: # bloom-gateway-client.grpc [grpc_client_config: ] - # Flag to control whether requests sent to the gateway should be logged or - # not. - # CLI flag: -bloom-gateway-client.log-gateway-requests - [log_gateway_requests: | default = false] - results_cache: # The cache block configures the cache backend. # The CLI flags prefix for this block configuration is: @@ -2370,7 +2365,7 @@ bloom_shipper: blocks_downloading_queue: # The count of parallel workers that download Bloom Blocks. # CLI flag: -bloom.shipper.blocks-downloading-queue.workers-count - [workers_count: | default = 100] + [workers_count: | default = 16] # Maximum number of task in queue per tenant per bloom-gateway. Enqueuing # the tasks above this limit will fail an error. diff --git a/pkg/bloomgateway/cache.go b/pkg/bloomgateway/cache.go index 6c573cb47d6de..aec04333368d0 100644 --- a/pkg/bloomgateway/cache.go +++ b/pkg/bloomgateway/cache.go @@ -46,6 +46,7 @@ func newCacheKeyGen(limits CacheLimits) keyGen { return keyGen{limits} } +// TODO(owen-d): need to implement our own key-generation which accounts for fingerprint ranges requested. func (k keyGen) GenerateCacheKey(ctx context.Context, tenant string, r resultscache.Request) string { return resultscache.ConstSplitter(k.BloomGatewayCacheKeyInterval(tenant)).GenerateCacheKey(ctx, tenant, r) } diff --git a/pkg/bloomgateway/client.go b/pkg/bloomgateway/client.go index 05eae0360952c..f08397693f86f 100644 --- a/pkg/bloomgateway/client.go +++ b/pkg/bloomgateway/client.go @@ -56,9 +56,6 @@ var ( } }, } - - // NB(chaudum): Should probably be configurable, but I don't want yet another user setting. - maxQueryParallelism = 10 ) type ringGetBuffers struct { @@ -107,10 +104,6 @@ type ClientConfig struct { // GRPCClientConfig configures the gRPC connection between the Bloom Gateway client and the server. GRPCClientConfig grpcclient.Config `yaml:"grpc_client_config"` - // LogGatewayRequests configures if requests sent to the gateway should be logged or not. - // The log messages are of type debug and contain the address of the gateway and the relevant tenant. - LogGatewayRequests bool `yaml:"log_gateway_requests"` - // Ring is the Bloom Gateway ring used to find the appropriate Bloom Gateway instance // this client should talk to. Ring ring.ReadRing `yaml:"-"` @@ -130,7 +123,6 @@ func (i *ClientConfig) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { i.GRPCClientConfig.RegisterFlagsWithPrefix(prefix+"grpc", f) i.Cache.RegisterFlagsWithPrefix(prefix+"cache.", f) f.BoolVar(&i.CacheResults, prefix+"cache_results", false, "Flag to control whether to cache bloom gateway client requests/responses.") - f.BoolVar(&i.LogGatewayRequests, prefix+"log-gateway-requests", false, "Flag to control whether requests sent to the gateway should be logged or not.") } func (i *ClientConfig) Validate() error { @@ -258,7 +250,7 @@ func (c *GatewayClient) FilterChunks(ctx context.Context, tenant string, from, t results := make([][]*logproto.GroupedChunkRefs, len(servers)) count := 0 - err = concurrency.ForEachJob(ctx, len(servers), maxQueryParallelism, func(ctx context.Context, i int) error { + err = concurrency.ForEachJob(ctx, len(servers), len(servers), func(ctx context.Context, i int) error { rs := servers[i] // randomize order of addresses so we don't hotspot the first server in the list diff --git a/pkg/bloomgateway/stats.go b/pkg/bloomgateway/stats.go index bc7373d4d653a..a855547b9124c 100644 --- a/pkg/bloomgateway/stats.go +++ b/pkg/bloomgateway/stats.go @@ -53,6 +53,7 @@ func (s *Stats) KVArgs() []any { filterRatio := float64(s.ChunksFiltered) / float64(max(s.ChunksRequested, 1)) return []any{ + "msg", "stats-report", "status", s.Status, "tasks", s.NumTasks, "series_requested", s.SeriesRequested, diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index eda55e8fbbeb3..791c97bfe1e4d 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -26,7 +26,7 @@ type DownloadingQueueConfig struct { } func (cfg *DownloadingQueueConfig) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { - f.IntVar(&cfg.WorkersCount, prefix+"workers-count", 100, "The count of parallel workers that download Bloom Blocks.") + f.IntVar(&cfg.WorkersCount, prefix+"workers-count", 16, "The count of parallel workers that download Bloom Blocks.") f.IntVar(&cfg.MaxTasksEnqueuedPerTenant, prefix+"max_tasks_enqueued_per_tenant", 10_000, "Maximum number of task in queue per tenant per bloom-gateway. Enqueuing the tasks above this limit will fail an error.") } From ca667aa66e0e3d1b7388813fc52ac79e112f8306 Mon Sep 17 00:00:00 2001 From: Christian Haudum Date: Wed, 27 Mar 2024 09:53:33 +0100 Subject: [PATCH 15/54] chore(bloom-gw): Cleanup tracing implementation (#12368) The huge amount of `bloomgateway.ProcessTask` spans caused problem with Tempo's span limit, resulting in dropped/dangling spans. Since the processing time is now recorded also in a metrics.go-like stats log line, we can remove the spans and only add a log event with the summary to the main span of the request handler. This PR also unifies the event logging in the index gateway, and adds the real processing time (not aggregated processing time) to the request stats. --- Signed-off-by: Christian Haudum --- pkg/bloomgateway/bloomgateway.go | 24 +++++------ pkg/bloomgateway/processor.go | 35 ++++++++-------- pkg/bloomgateway/processor_test.go | 4 ++ pkg/bloomgateway/stats.go | 40 ++++++++++++++++--- .../indexshipper/indexgateway/gateway.go | 25 ++++-------- 5 files changed, 76 insertions(+), 52 deletions(-) diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 0bd8bf2895f05..482d6d8ef8660 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -51,6 +51,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/services" "github.com/grafana/dskit/tenant" + "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "go.uber.org/atomic" @@ -61,7 +62,6 @@ import ( "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" "github.com/grafana/loki/pkg/util" "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/spanlogger" ) var errGatewayUnhealthy = errors.New("bloom-gateway is unhealthy in the ring") @@ -200,15 +200,13 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk return nil, err } - sp, ctx := spanlogger.NewWithLogger( - ctx, - log.With(g.logger, "tenant", tenantID), - "bloomgateway.FilterChunkRefs", - ) + logger := log.With(g.logger, "tenant", tenantID) + sp, ctx := opentracing.StartSpanFromContext(ctx, "bloomgateway.FilterChunkRefs") stats, ctx := ContextWithEmptyStats(ctx) defer func() { - level.Info(sp).Log(stats.KVArgs()...) + level.Info(logger).Log(stats.KVArgs()...) + sp.LogKV(stats.KVArgs()...) sp.Finish() }() @@ -249,7 +247,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk }, nil } - sp.Log( + sp.LogKV( "filters", len(filters), "days", len(seriesByDay), "series_requested", len(req.Refs), @@ -279,7 +277,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk for _, task := range tasks { task := task task.enqueueTime = time.Now() - level.Info(sp).Log("msg", "enqueue task", "task", task.ID, "table", task.table, "series", len(task.series)) + level.Info(logger).Log("msg", "enqueue task", "task", task.ID, "table", task.table, "series", len(task.series)) // TODO(owen-d): gracefully handle full queues if err := g.queue.Enqueue(tenantID, nil, task, func() { @@ -293,7 +291,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk go g.consumeTask(ctx, task, tasksCh) } - sp.Log("msg", "enqueued tasks", "duration", time.Since(queueStart).String()) + sp.LogKV("msg", "enqueued tasks", "duration", time.Since(queueStart).String()) remaining := len(tasks) @@ -310,7 +308,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk stats.Status = "cancel" return nil, errors.Wrap(ctx.Err(), "request failed") case task := <-tasksCh: - level.Info(sp).Log("msg", "task done", "task", task.ID, "err", task.Err()) + level.Info(logger).Log("msg", "task done", "task", task.ID, "err", task.Err()) if task.Err() != nil { stats.Status = labelFailure return nil, errors.Wrap(task.Err(), "request failed") @@ -320,7 +318,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk } } - sp.Log("msg", "received all responses") + sp.LogKV("msg", "received all responses") start := time.Now() filtered := filterChunkRefs(req, responses) @@ -348,7 +346,7 @@ func (g *Gateway) FilterChunkRefs(ctx context.Context, req *logproto.FilterChunk stats.ChunksRequested = preFilterChunks stats.ChunksFiltered = preFilterChunks - postFilterChunks - sp.Log("msg", "return filtered chunk refs") + sp.LogKV("msg", "return filtered chunk refs") return &logproto.FilterChunkRefResponse{ChunkRefs: filtered}, nil } diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index 74cb19d06911b..5cf805b11a74d 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -119,10 +119,19 @@ func (p *processor) processTasks(ctx context.Context, tenant string, day config. return err } - return p.processBlocks(ctx, bqs, data) + start = time.Now() + res := p.processBlocks(ctx, bqs, data) + duration = time.Since(start) + + for _, t := range tasks { + FromContext(t.ctx).AddProcessingTime(duration) + } + + return res } func (p *processor) processBlocks(ctx context.Context, bqs []*bloomshipper.CloseableBlockQuerier, data []blockWithTasks) error { + defer func() { for i := range bqs { if bqs[i] == nil { @@ -162,22 +171,12 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie tokenizer := v1.NewNGramTokenizer(schema.NGramLen(), 0) iters := make([]v1.PeekingIterator[v1.Request], 0, len(tasks)) - // collect spans & run single defer to avoid blowing call stack - // if there are many tasks - spans := make([]opentracing.Span, 0, len(tasks)) - defer func() { - for _, sp := range spans { - sp.Finish() - } - }() - for _, task := range tasks { - // add spans for each task context for this block - sp, _ := opentracing.StartSpanFromContext(task.ctx, "bloomgateway.ProcessBlock") - spans = append(spans, sp) - md, _ := blockQuerier.Metadata() - blk := bloomshipper.BlockRefFrom(task.Tenant, task.table.String(), md) - sp.LogKV("block", blk.String()) + if sp := opentracing.SpanFromContext(task.ctx); sp != nil { + md, _ := blockQuerier.Metadata() + blk := bloomshipper.BlockRefFrom(task.Tenant, task.table.String(), md) + sp.LogKV("process block", blk.String()) + } it := v1.NewPeekingIter(task.RequestIter(tokenizer)) iters = append(iters, it) @@ -196,7 +195,9 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie } for _, task := range tasks { - FromContext(task.ctx).AddProcessingTime(duration) + stats := FromContext(task.ctx) + stats.AddTotalProcessingTime(duration) + stats.IncProcessedBlocks() } return err diff --git a/pkg/bloomgateway/processor_test.go b/pkg/bloomgateway/processor_test.go index d9e6a799045e3..d70451a127867 100644 --- a/pkg/bloomgateway/processor_test.go +++ b/pkg/bloomgateway/processor_test.go @@ -7,6 +7,7 @@ import ( "time" "github.com/go-kit/log" + "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" @@ -88,6 +89,9 @@ func (s *dummyStore) FetchBlocks(_ context.Context, refs []bloomshipper.BlockRef func TestProcessor(t *testing.T) { ctx := context.Background() + sp, ctx := opentracing.StartSpanFromContext(ctx, "TestProcessor") + t.Cleanup(sp.Finish) + tenant := "fake" now := mktime("2024-01-27 12:00") metrics := newWorkerMetrics(prometheus.NewPedanticRegistry(), constants.Loki, "bloom_gatway") diff --git a/pkg/bloomgateway/stats.go b/pkg/bloomgateway/stats.go index a855547b9124c..09f78841e544a 100644 --- a/pkg/bloomgateway/stats.go +++ b/pkg/bloomgateway/stats.go @@ -8,10 +8,15 @@ import ( ) type Stats struct { - Status string - NumTasks, NumFilters int - ChunksRequested, ChunksFiltered, SeriesRequested, SeriesFiltered int - QueueTime, MetasFetchTime, BlocksFetchTime, ProcessingTime, PostProcessingTime atomic.Duration + Status string + NumTasks, NumFilters int + ChunksRequested, ChunksFiltered int + SeriesRequested, SeriesFiltered int + QueueTime *atomic.Duration + MetasFetchTime, BlocksFetchTime *atomic.Duration + ProcessingTime, TotalProcessingTime *atomic.Duration + PostProcessingTime *atomic.Duration + ProcessedBlocks *atomic.Int32 } type statsKey int @@ -20,7 +25,16 @@ var ctxKey = statsKey(0) // ContextWithEmptyStats returns a context with empty stats. func ContextWithEmptyStats(ctx context.Context) (*Stats, context.Context) { - stats := &Stats{Status: "unknown"} + stats := &Stats{ + Status: "unknown", + ProcessedBlocks: atomic.NewInt32(0), + QueueTime: atomic.NewDuration(0), + MetasFetchTime: atomic.NewDuration(0), + BlocksFetchTime: atomic.NewDuration(0), + ProcessingTime: atomic.NewDuration(0), + TotalProcessingTime: atomic.NewDuration(0), + PostProcessingTime: atomic.NewDuration(0), + } ctx = context.WithValue(ctx, ctxKey, stats) return stats, ctx } @@ -56,6 +70,8 @@ func (s *Stats) KVArgs() []any { "msg", "stats-report", "status", s.Status, "tasks", s.NumTasks, + "filters", s.NumFilters, + "blocks_processed", s.ProcessedBlocks.Load(), "series_requested", s.SeriesRequested, "series_filtered", s.SeriesFiltered, "chunks_requested", s.ChunksRequested, @@ -99,9 +115,23 @@ func (s *Stats) AddProcessingTime(t time.Duration) { s.ProcessingTime.Add(t) } +func (s *Stats) AddTotalProcessingTime(t time.Duration) { + if s == nil { + return + } + s.TotalProcessingTime.Add(t) +} + func (s *Stats) AddPostProcessingTime(t time.Duration) { if s == nil { return } s.PostProcessingTime.Add(t) } + +func (s *Stats) IncProcessedBlocks() { + if s == nil { + return + } + s.ProcessedBlocks.Inc() +} diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go index 350a95e8f988b..60bf8f1f7c113 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go @@ -12,6 +12,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/services" "github.com/grafana/dskit/tenant" + "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" @@ -355,8 +356,8 @@ func (g *Gateway) GetVolume(ctx context.Context, req *logproto.VolumeRequest) (* func (g *Gateway) GetShards(request *logproto.ShardsRequest, server logproto.IndexGateway_GetShardsServer) error { ctx := server.Context() - log, _ := spanlogger.New(context.Background(), "IndexGateway.GetShards") - defer log.Finish() + sp, ctx := opentracing.StartSpanFromContext(ctx, "indexgateway.GetShards") + defer sp.Finish() instanceID, err := tenant.TenantID(ctx) if err != nil { @@ -412,11 +413,8 @@ func (g *Gateway) getShardsWithBlooms( // as getting it _very_ wrong could harm some cache locality benefits on the bloom-gws by // sending multiple requests to the entire keyspace). - sp, ctx := spanlogger.NewWithLogger( - ctx, - log.With(g.log, "tenant", instanceID), - "indexgateway.getShardsWithBlooms", - ) + logger := log.With(g.log, "tenant", instanceID) + sp, ctx := opentracing.StartSpanFromContext(ctx, "indexgateway.getShardsWithBlooms") defer sp.Finish() // 1) for all bounds, get chunk refs @@ -473,17 +471,10 @@ func (g *Gateway) getShardsWithBlooms( resp.Shards = shards } - level.Debug(g.log).Log( - "msg", "shards response", - "total_chunks", statistics.Index.TotalChunks, - "post_filter_chunks", statistics.Index.PostFilterChunks, - "shards", len(resp.Shards), - "query", req.Query, - "target_bytes_per_shard", datasize.ByteSize(req.TargetBytesPerShard).HumanReadable(), - ) + sp.LogKV("msg", "send shards response", "shards", len(resp.Shards)) - level.Debug(sp).Log( - "msg", "shards response", + level.Debug(logger).Log( + "msg", "send shards response", "total_chunks", statistics.Index.TotalChunks, "post_filter_chunks", statistics.Index.PostFilterChunks, "shards", len(resp.Shards), From bdad1d331d79b7b6cb156f91f26f31f6aee358cc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 27 Mar 2024 10:52:30 +0000 Subject: [PATCH 16/54] chore(deps): update module google.golang.org/protobuf to v1.33.0 [security] (main) (#12137) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Michel Hollands <42814411+MichelHollands@users.noreply.github.com> --- pkg/push/go.mod | 2 +- pkg/push/go.sum | 4 ++-- tools/lambda-promtail/go.mod | 2 +- tools/lambda-promtail/go.sum | 3 ++- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pkg/push/go.mod b/pkg/push/go.mod index 9cddcb5f2e342..067ca8f02c80c 100644 --- a/pkg/push/go.mod +++ b/pkg/push/go.mod @@ -18,7 +18,7 @@ require ( golang.org/x/sys v0.13.0 // indirect golang.org/x/text v0.13.0 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect - google.golang.org/protobuf v1.30.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/push/go.sum b/pkg/push/go.sum index e7e0e8118dbff..3ed0ecc1d4b45 100644 --- a/pkg/push/go.sum +++ b/pkg/push/go.sum @@ -67,8 +67,8 @@ google.golang.org/grpc v1.56.3 h1:8I4C0Yq1EjstUzUJzpcRVbuYA2mODtEmpWiQoN/b2nc= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/tools/lambda-promtail/go.mod b/tools/lambda-promtail/go.mod index e6a8e27578c21..2a40fa4e71bf5 100644 --- a/tools/lambda-promtail/go.mod +++ b/tools/lambda-promtail/go.mod @@ -111,7 +111,7 @@ require ( google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect google.golang.org/grpc v1.52.3 // indirect - google.golang.org/protobuf v1.28.1 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/tools/lambda-promtail/go.sum b/tools/lambda-promtail/go.sum index bedf3022d5312..dbc6c52383867 100644 --- a/tools/lambda-promtail/go.sum +++ b/tools/lambda-promtail/go.sum @@ -1066,8 +1066,9 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From aca2a38ad6bf1cf7625d41d130b63ee276eb0a70 Mon Sep 17 00:00:00 2001 From: Paul Rogers <129207811+paul1r@users.noreply.github.com> Date: Wed, 27 Mar 2024 07:50:25 -0400 Subject: [PATCH 17/54] test: gcs object client test data race (#12324) --- pkg/storage/chunk/client/gcp/gcs_object_client.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkg/storage/chunk/client/gcp/gcs_object_client.go b/pkg/storage/chunk/client/gcp/gcs_object_client.go index d4b35e48d9df5..2e340c038e723 100644 --- a/pkg/storage/chunk/client/gcp/gcs_object_client.go +++ b/pkg/storage/chunk/client/gcp/gcs_object_client.go @@ -319,8 +319,7 @@ func gcsTransport(ctx context.Context, scope string, insecure bool, http2 bool, transportOptions := []option.ClientOption{option.WithScopes(scope)} if insecure { customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} - // When using `insecure` (testing only), we add a fake API key as well to skip credential chain lookups. - transportOptions = append(transportOptions, option.WithAPIKey("insecure")) + transportOptions = append(transportOptions, option.WithoutAuthentication()) } if serviceAccount.String() != "" { transportOptions = append(transportOptions, option.WithCredentialsJSON([]byte(serviceAccount.String()))) From 016daa51d5fa9037710a7ff8bd43eb17d59b3289 Mon Sep 17 00:00:00 2001 From: Karsten Jeschkies Date: Wed, 27 Mar 2024 14:28:32 +0100 Subject: [PATCH 18/54] fix: Track all OTLP metadata bytes in usage tracker. (#12376) Co-authored-by: Vladyslav Diachenko <82767850+vlad-diachenko@users.noreply.github.com> --- pkg/loghttp/push/otlp.go | 8 +++++++ pkg/loghttp/push/otlp_test.go | 39 ++++++++++------------------------- pkg/loghttp/push/push_test.go | 9 ++++++++ 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/pkg/loghttp/push/otlp.go b/pkg/loghttp/push/otlp.go index 8136d6995dc62..a001b52b210f6 100644 --- a/pkg/loghttp/push/otlp.go +++ b/pkg/loghttp/push/otlp.go @@ -151,6 +151,10 @@ func otlpToLokiPushRequest(ctx context.Context, ld plog.Logs, userID string, ten retentionPeriodForUser := tenantsRetention.RetentionPeriodFor(userID, lbs) stats.StructuredMetadataBytes[retentionPeriodForUser] += int64(resourceAttributesAsStructuredMetadataSize) + if tracker != nil { + tracker.ReceivedBytesAdd(ctx, userID, retentionPeriodForUser, lbs, float64(resourceAttributesAsStructuredMetadataSize)) + } + stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser] = append(stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser], resourceAttributesAsStructuredMetadata...) for j := 0; j < sls.Len(); j++ { @@ -202,6 +206,10 @@ func otlpToLokiPushRequest(ctx context.Context, ld plog.Logs, userID string, ten scopeAttributesAsStructuredMetadataSize := labelsSize(scopeAttributesAsStructuredMetadata) stats.StructuredMetadataBytes[retentionPeriodForUser] += int64(scopeAttributesAsStructuredMetadataSize) + if tracker != nil { + tracker.ReceivedBytesAdd(ctx, userID, retentionPeriodForUser, lbs, float64(scopeAttributesAsStructuredMetadataSize)) + } + stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser] = append(stats.ResourceAndSourceMetadataLabels[retentionPeriodForUser], scopeAttributesAsStructuredMetadata...) for k := 0; k < logs.Len(); k++ { log := logs.At(k) diff --git a/pkg/loghttp/push/otlp_test.go b/pkg/loghttp/push/otlp_test.go index 5202505fd1bf1..c711c85905cf2 100644 --- a/pkg/loghttp/push/otlp_test.go +++ b/pkg/loghttp/push/otlp_test.go @@ -26,7 +26,6 @@ func TestOTLPToLokiPushRequest(t *testing.T) { expectedPushRequest logproto.PushRequest expectedStats Stats otlpConfig OTLPConfig - tracker UsageTracker }{ { name: "no logs", @@ -129,7 +128,6 @@ func TestOTLPToLokiPushRequest(t *testing.T) { { name: "service.name not defined in resource attributes", otlpConfig: DefaultOTLPConfig(defaultGlobalOTLPConfig), - tracker: NewMockTracker(), generateLogs: func() plog.Logs { ld := plog.NewLogs() ld.ResourceLogs().AppendEmpty().Resource().Attributes().PutStr("service.namespace", "foo") @@ -164,32 +162,7 @@ func TestOTLPToLokiPushRequest(t *testing.T) { }, StreamLabelsSize: 47, MostRecentEntryTimestamp: now, - /* - logLinesBytesCustomTrackers: []customTrackerPair{ - { - Labels: []labels.Label{ - {Name: "service_namespace", Value: "foo"}, - {Name: "tracker", Value: "foo"}, - }, - Bytes: map[time.Duration]int64{ - time.Hour: 9, - }, - }, - }, - structuredMetadataBytesCustomTrackers: []customTrackerPair{ - { - Labels: []labels.Label{ - {Name: "service_namespace", Value: "foo"}, - {Name: "tracker", Value: "foo"}, - }, - Bytes: map[time.Duration]int64{ - time.Hour: 0, - }, - }, - }, - */ }, - //expectedTrackedUsaged: }, { name: "resource attributes and scope attributes stored as structured metadata", @@ -518,9 +491,19 @@ func TestOTLPToLokiPushRequest(t *testing.T) { } { t.Run(tc.name, func(t *testing.T) { stats := newPushStats() - pushReq := otlpToLokiPushRequest(context.Background(), tc.generateLogs(), "foo", fakeRetention{}, tc.otlpConfig, tc.tracker, stats) + tracker := NewMockTracker() + pushReq := otlpToLokiPushRequest(context.Background(), tc.generateLogs(), "foo", fakeRetention{}, tc.otlpConfig, tracker, stats) require.Equal(t, tc.expectedPushRequest, *pushReq) require.Equal(t, tc.expectedStats, *stats) + + totalBytes := 0.0 + for _, b := range stats.LogLinesBytes { + totalBytes += float64(b) + } + for _, b := range stats.StructuredMetadataBytes { + totalBytes += float64(b) + } + require.Equal(t, totalBytes, tracker.Total(), "Total tracked bytes must equal total bytes of the stats.") }) } } diff --git a/pkg/loghttp/push/push_test.go b/pkg/loghttp/push/push_test.go index 9f470fc0eb9e4..8c4768615ce6c 100644 --- a/pkg/loghttp/push/push_test.go +++ b/pkg/loghttp/push/push_test.go @@ -226,6 +226,7 @@ func TestParseRequest(t *testing.T) { assert.NotNil(t, data, "Should give data for %d", index) require.Equal(t, test.expectedStructuredMetadataBytes, structuredMetadataBytesReceived) require.Equal(t, test.expectedBytes, bytesReceived) + require.Equalf(t, tracker.Total(), float64(bytesReceived), "tracked usage bytes must equal bytes received metric") require.Equal(t, test.expectedLines, linesReceived) require.Equal(t, float64(test.expectedStructuredMetadataBytes), testutil.ToFloat64(structuredMetadataBytesIngested.WithLabelValues("fake", ""))) require.Equal(t, float64(test.expectedBytes), testutil.ToFloat64(bytesIngested.WithLabelValues("fake", ""))) @@ -257,6 +258,14 @@ func NewMockTracker() *MockCustomTracker { } } +func (t *MockCustomTracker) Total() float64 { + total := float64(0) + for _, v := range t.receivedBytes { + total += v + } + return total +} + // DiscardedBytesAdd implements CustomTracker. func (t *MockCustomTracker) DiscardedBytesAdd(_ context.Context, _, _ string, labels labels.Labels, value float64) { t.discardedBytes[labels.String()] += value From f99e6ac4b3cda953e09785a09d076a8091a2ab14 Mon Sep 17 00:00:00 2001 From: Dylan Guedes Date: Wed, 27 Mar 2024 10:51:21 -0300 Subject: [PATCH 19/54] docs: Add missing changelog entry for mTLS memcached addition (#12377) --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 61dd741057c9e..ca0216ae345e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -61,6 +61,7 @@ * [11817](https://github.com/grafana/loki/pull/11817) **ashwanthgoli** Ruler: Add support for filtering results of `/prometheus/api/v1/rules` endpoint by rule_name, rule_group, file and type. * [11897](https://github.com/grafana/loki/pull/11897) **ashwanthgoli** Metadata: Introduces a separate split interval of `split_recent_metadata_queries_by_interval` for `recent_metadata_query_window` to help with caching recent metadata query results. * [11970](https://github.com/grafana/loki/pull/11897) **masslessparticle** Ksonnet: Introduces memory limits to the compactor configuration to avoid unbounded memory usage. +* [12318](https://github.com/grafana/loki/pull/12318) **DylanGuedes** Memcached: Add mTLS support. ##### Fixes * [11074](https://github.com/grafana/loki/pull/11074) **hainenber** Fix panic in lambda-promtail due to mishandling of empty DROP_LABELS env var. From 2b8db8b691d1fd3c1896598eb504175b1642e95f Mon Sep 17 00:00:00 2001 From: Travis Patterson Date: Wed, 27 Mar 2024 08:48:04 -0600 Subject: [PATCH 20/54] fix: attempt non-string label filtering when errors are present (#12378) --- pkg/logql/log/label_filter.go | 45 +++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go index a89f324008e16..e7e10b404d612 100644 --- a/pkg/logql/log/label_filter.go +++ b/pkg/logql/log/label_filter.go @@ -173,10 +173,6 @@ func NewBytesLabelFilter(t LabelFilterType, name string, b uint64) *BytesLabelFi } func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter it out. - return line, true - } v, ok := lbs.Get(d.Name) if !ok { // we have not found this label. @@ -184,8 +180,11 @@ func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([] } value, err := humanize.ParseBytes(v) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch d.Type { @@ -202,7 +201,9 @@ func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([] case LabelFilterLesserThanOrEqual: return line, value <= d.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } } @@ -240,10 +241,6 @@ func NewDurationLabelFilter(t LabelFilterType, name string, d time.Duration) *Du } func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter out. - return line, true - } v, ok := lbs.Get(d.Name) if !ok { // we have not found this label. @@ -251,8 +248,11 @@ func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) } value, err := time.ParseDuration(v) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch d.Type { @@ -269,7 +269,9 @@ func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) case LabelFilterLesserThanOrEqual: return line, value <= d.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } } @@ -302,10 +304,6 @@ func NewNumericLabelFilter(t LabelFilterType, name string, v float64) *NumericLa } func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) { - if lbs.HasErr() { - // if there's an error only the string matchers can filter out. - return line, true - } v, ok := lbs.Get(n.Name) if !ok { // we have not found this label. @@ -313,8 +311,11 @@ func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ( } value, err := strconv.ParseFloat(v, 64) if err != nil { - lbs.SetErr(errLabelFilter) - lbs.SetErrorDetails(err.Error()) + // Don't overwrite what might be a more useful error + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + lbs.SetErrorDetails(err.Error()) + } return line, true } switch n.Type { @@ -331,7 +332,9 @@ func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ( case LabelFilterLesserThanOrEqual: return line, value <= n.Value default: - lbs.SetErr(errLabelFilter) + if !lbs.HasErr() { + lbs.SetErr(errLabelFilter) + } return line, true } From c9e5c7f991abb7a6b24f9155480bea9e74f7c633 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Wed, 27 Mar 2024 10:27:44 -0700 Subject: [PATCH 21/54] chore(blooms): removes bloom-gw & bloom-compactor from all non-microservice targets (#12381) --- pkg/loki/loki.go | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index 5eab58e357c53..4e2b7df3ae35f 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -663,12 +663,9 @@ func (t *Loki) setupModuleManager() error { Read: {QueryFrontend, Querier}, Write: {Ingester, Distributor}, - Backend: {QueryScheduler, Ruler, Compactor, IndexGateway, BloomGateway, BloomCompactor}, + Backend: {QueryScheduler, Ruler, Compactor, IndexGateway}, - // TODO(salvacorts): We added the BloomCompactor component to the `all` target to ease testing. - // We should remove it before releasing the feature since we don’t think any user running - // the single binary will benefit from the blooms given their scale in terms of ingested data - All: {QueryScheduler, QueryFrontend, Querier, Ingester, Distributor, Ruler, Compactor, BloomCompactor}, + All: {QueryScheduler, QueryFrontend, Querier, Ingester, Distributor, Ruler, Compactor}, } if t.Cfg.Querier.PerRequestLimitsEnabled { From c1edb8220c509bd174aee564f9fb4de07eae8993 Mon Sep 17 00:00:00 2001 From: J Stickler Date: Wed, 27 Mar 2024 14:46:00 -0400 Subject: [PATCH 22/54] docs: fix formatting in structured metadata topic (#12382) --- docs/sources/get-started/labels/structured-metadata.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sources/get-started/labels/structured-metadata.md b/docs/sources/get-started/labels/structured-metadata.md index 587306b2d8526..4864576a3fefb 100644 --- a/docs/sources/get-started/labels/structured-metadata.md +++ b/docs/sources/get-started/labels/structured-metadata.md @@ -21,8 +21,8 @@ Structured metadata can also be used to query commonly needed metadata from log You should only use structured metadata in the following situations: - • If you are ingesting data in OpenTelemetry format, using the Grafana Agent or an OpenTelemetry Collector. Structured metadata was designed to support native ingestion of OpenTelemetry data. - • If you have high cardinality metadata that should not be used as a label and does not exist in the log line. Some examples might include `process_id` or `thread_id` or Kubernetes pod names. +- If you are ingesting data in OpenTelemetry format, using the Grafana Agent or an OpenTelemetry Collector. Structured metadata was designed to support native ingestion of OpenTelemetry data. +- If you have high cardinality metadata that should not be used as a label and does not exist in the log line. Some examples might include `process_id` or `thread_id` or Kubernetes pod names. It is an antipattern to extract information that already exists in your log lines and put it into structured metadata. From 15dc2bac04b9119b1e6f0358614da22150b9ad1a Mon Sep 17 00:00:00 2001 From: Christian Haudum Date: Wed, 27 Mar 2024 19:55:13 +0100 Subject: [PATCH 23/54] feat(bloomstore): Support for sharding blocks across multiple different directories (#12375) Signed-off-by: Christian Haudum --- docs/sources/configure/_index.md | 5 +- pkg/bloomgateway/bloomgateway_test.go | 2 +- pkg/loki/config_wrapper.go | 7 ++- pkg/loki/config_wrapper_test.go | 5 +- pkg/loki/modules_test.go | 2 +- .../stores/shipper/bloomshipper/cache.go | 13 ++++-- .../stores/shipper/bloomshipper/cache_test.go | 2 +- .../stores/shipper/bloomshipper/client.go | 7 ++- .../shipper/bloomshipper/client_test.go | 4 +- .../shipper/bloomshipper/config/config.go | 10 ++-- .../stores/shipper/bloomshipper/fetcher.go | 6 ++- .../shipper/bloomshipper/fetcher_test.go | 12 +++-- .../stores/shipper/bloomshipper/resolver.go | 46 +++++++++++++++++++ .../shipper/bloomshipper/resolver_test.go | 46 +++++++++++++++++++ .../stores/shipper/bloomshipper/store.go | 10 ++-- .../stores/shipper/bloomshipper/store_test.go | 2 +- 16 files changed, 148 insertions(+), 31 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 158bca6c00585..93582d3897b6f 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2353,9 +2353,10 @@ tsdb_shipper: # Configures Bloom Shipper. bloom_shipper: - # Working directory to store downloaded Bloom Blocks. + # Working directory to store downloaded bloom blocks. Supports multiple + # directories, separated by comma. # CLI flag: -bloom.shipper.working-directory - [working_directory: | default = "bloom-shipper"] + [working_directory: | default = "/data/blooms"] # Maximum size of bloom pages that should be queried. Larger pages than this # limit are skipped when querying blooms to limit memory usage. diff --git a/pkg/bloomgateway/bloomgateway_test.go b/pkg/bloomgateway/bloomgateway_test.go index 45c9a3926c157..f705bb5eb0919 100644 --- a/pkg/bloomgateway/bloomgateway_test.go +++ b/pkg/bloomgateway/bloomgateway_test.go @@ -72,7 +72,7 @@ func setupBloomStore(t *testing.T) *bloomshipper.BloomStore { } storageCfg := storage.Config{ BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: t.TempDir(), + WorkingDirectory: []string{t.TempDir()}, BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ WorkersCount: 1, }, diff --git a/pkg/loki/config_wrapper.go b/pkg/loki/config_wrapper.go index 8a5f6c6811250..c602f53cc6dd1 100644 --- a/pkg/loki/config_wrapper.go +++ b/pkg/loki/config_wrapper.go @@ -409,8 +409,11 @@ func applyPathPrefixDefaults(r, defaults *ConfigWrapper) { if r.CompactorConfig.WorkingDirectory == defaults.CompactorConfig.WorkingDirectory { r.CompactorConfig.WorkingDirectory = fmt.Sprintf("%s/compactor", prefix) } - if r.StorageConfig.BloomShipperConfig.WorkingDirectory == defaults.StorageConfig.BloomShipperConfig.WorkingDirectory { - r.StorageConfig.BloomShipperConfig.WorkingDirectory = fmt.Sprintf("%s/blooms", prefix) + if len(r.StorageConfig.BloomShipperConfig.WorkingDirectory) == 1 && + len(r.StorageConfig.BloomShipperConfig.WorkingDirectory) == len(defaults.StorageConfig.BloomShipperConfig.WorkingDirectory) && + + r.StorageConfig.BloomShipperConfig.WorkingDirectory[0] == defaults.StorageConfig.BloomShipperConfig.WorkingDirectory[0] { + _ = r.StorageConfig.BloomShipperConfig.WorkingDirectory.Set(fmt.Sprintf("%s/blooms", prefix)) } } } diff --git a/pkg/loki/config_wrapper_test.go b/pkg/loki/config_wrapper_test.go index 41705f012f020..f6e22f74add51 100644 --- a/pkg/loki/config_wrapper_test.go +++ b/pkg/loki/config_wrapper_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "github.com/grafana/dskit/flagext" "github.com/grafana/dskit/netutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -100,7 +101,7 @@ common: assert.EqualValues(t, "/opt/loki/rules-temp", config.Ruler.RulePath) assert.EqualValues(t, "/opt/loki/wal", config.Ingester.WAL.Dir) assert.EqualValues(t, "/opt/loki/compactor", config.CompactorConfig.WorkingDirectory) - assert.EqualValues(t, "/opt/loki/blooms", config.StorageConfig.BloomShipperConfig.WorkingDirectory) + assert.EqualValues(t, flagext.StringSliceCSV{"/opt/loki/blooms"}, config.StorageConfig.BloomShipperConfig.WorkingDirectory) }) t.Run("accepts paths both with and without trailing slash", func(t *testing.T) { @@ -112,7 +113,7 @@ common: assert.EqualValues(t, "/opt/loki/rules-temp", config.Ruler.RulePath) assert.EqualValues(t, "/opt/loki/wal", config.Ingester.WAL.Dir) assert.EqualValues(t, "/opt/loki/compactor", config.CompactorConfig.WorkingDirectory) - assert.EqualValues(t, "/opt/loki/blooms", config.StorageConfig.BloomShipperConfig.WorkingDirectory) + assert.EqualValues(t, flagext.StringSliceCSV{"/opt/loki/blooms"}, config.StorageConfig.BloomShipperConfig.WorkingDirectory) }) t.Run("does not rewrite custom (non-default) paths passed via config file", func(t *testing.T) { diff --git a/pkg/loki/modules_test.go b/pkg/loki/modules_test.go index 61cc9198bbf28..1c8945b51a37e 100644 --- a/pkg/loki/modules_test.go +++ b/pkg/loki/modules_test.go @@ -367,7 +367,7 @@ func minimalWorkingConfig(t *testing.T, dir, target string, cfgTransformers ...f cfg.StorageConfig = storage.Config{ FSConfig: local.FSConfig{Directory: dir}, BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: filepath.Join(dir, "blooms"), + WorkingDirectory: []string{filepath.Join(dir, "blooms")}, BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ WorkersCount: 1, }, diff --git a/pkg/storage/stores/shipper/bloomshipper/cache.go b/pkg/storage/stores/shipper/bloomshipper/cache.go index 3097822fccf75..e7fcfaff1666a 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache.go @@ -12,6 +12,7 @@ import ( v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/pkg/util" ) type CloseableBlockQuerier struct { @@ -34,10 +35,14 @@ func (c *CloseableBlockQuerier) SeriesIter() (v1.PeekingIterator[*v1.SeriesWithB return v1.NewPeekingIter[*v1.SeriesWithBloom](c.BlockQuerier), nil } -func LoadBlocksDirIntoCache(path string, c Cache, logger log.Logger) error { - level.Debug(logger).Log("msg", "load bloomshipper working directory into cache", "path", path) - keys, values := loadBlockDirectories(path, logger) - return c.PutMany(context.Background(), keys, values) +func LoadBlocksDirIntoCache(paths []string, c Cache, logger log.Logger) error { + var err util.MultiError + for _, path := range paths { + level.Debug(logger).Log("msg", "load bloomshipper working directory into cache", "path", path) + keys, values := loadBlockDirectories(path, logger) + err.Add(c.PutMany(context.Background(), keys, values)) + } + return err.Err() } func loadBlockDirectories(root string, logger log.Logger) (keys []string, values []BlockDirectory) { diff --git a/pkg/storage/stores/shipper/bloomshipper/cache_test.go b/pkg/storage/stores/shipper/bloomshipper/cache_test.go index ca591efebb993..eb2a061c775bb 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache_test.go @@ -88,7 +88,7 @@ func Test_LoadBlocksDirIntoCache(t *testing.T) { } c := NewFsBlocksCache(cfg, nil, log.NewNopLogger()) - err := LoadBlocksDirIntoCache(wd, c, logger) + err := LoadBlocksDirIntoCache([]string{wd, t.TempDir()}, c, logger) require.NoError(t, err) require.Equal(t, 1, len(c.entries)) diff --git a/pkg/storage/stores/shipper/bloomshipper/client.go b/pkg/storage/stores/shipper/bloomshipper/client.go index f5258570d869c..7e9128a9971a3 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client.go +++ b/pkg/storage/stores/shipper/bloomshipper/client.go @@ -256,9 +256,14 @@ type BloomClient struct { } func NewBloomClient(cfg bloomStoreConfig, client client.ObjectClient, logger log.Logger) (*BloomClient, error) { + fsResolver, err := NewShardedPrefixedResolver(cfg.workingDirs, defaultKeyResolver{}) + if err != nil { + return nil, errors.Wrap(err, "creating fs resolver") + } + return &BloomClient{ KeyResolver: defaultKeyResolver{}, // TODO(owen-d): hook into schema, similar to `{,Parse}ExternalKey` - fsResolver: NewPrefixedResolver(cfg.workingDir, defaultKeyResolver{}), + fsResolver: fsResolver, concurrency: cfg.numWorkers, client: client, logger: logger, diff --git a/pkg/storage/stores/shipper/bloomshipper/client_test.go b/pkg/storage/stores/shipper/bloomshipper/client_test.go index e5bbe3b5b1bf5..cee23671b7216 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/client_test.go @@ -41,8 +41,8 @@ func newMockBloomClient(t *testing.T) (*BloomClient, string) { dir := t.TempDir() logger := log.NewLogfmtLogger(os.Stderr) cfg := bloomStoreConfig{ - workingDir: dir, - numWorkers: 3, + workingDirs: []string{dir}, + numWorkers: 3, } client, err := NewBloomClient(cfg, oc, logger) require.NoError(t, err) diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index 791c97bfe1e4d..3aef86cabdf22 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -4,7 +4,6 @@ package config import ( "errors" "flag" - "strings" "time" "github.com/grafana/dskit/flagext" @@ -13,7 +12,7 @@ import ( ) type Config struct { - WorkingDirectory string `yaml:"working_directory"` + WorkingDirectory flagext.StringSliceCSV `yaml:"working_directory"` MaxQueryPageSize flagext.Bytes `yaml:"max_query_page_size"` BlocksDownloadingQueue DownloadingQueueConfig `yaml:"blocks_downloading_queue"` BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` @@ -31,7 +30,8 @@ func (cfg *DownloadingQueueConfig) RegisterFlagsWithPrefix(prefix string, f *fla } func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { - f.StringVar(&c.WorkingDirectory, prefix+"shipper.working-directory", "bloom-shipper", "Working directory to store downloaded Bloom Blocks.") + c.WorkingDirectory = []string{"/data/blooms"} + f.Var(&c.WorkingDirectory, prefix+"shipper.working-directory", "Working directory to store downloaded bloom blocks. Supports multiple directories, separated by comma.") _ = c.MaxQueryPageSize.Set("64MiB") // default should match the one set in pkg/storage/bloom/v1/bloom.go f.Var(&c.MaxQueryPageSize, prefix+"max-query-page-size", "Maximum size of bloom pages that should be queried. Larger pages than this limit are skipped when querying blooms to limit memory usage.") c.BlocksDownloadingQueue.RegisterFlagsWithPrefix(prefix+"shipper.blocks-downloading-queue.", f) @@ -40,8 +40,8 @@ func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { } func (c *Config) Validate() error { - if strings.TrimSpace(c.WorkingDirectory) == "" { - return errors.New("working directory must be specified") + if len(c.WorkingDirectory) == 0 { + return errors.New("at least one working directory must be specified") } return nil } diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index 74fb9a177a667..b9483675f21cc 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -91,12 +91,16 @@ func NewFetcher( logger log.Logger, bloomMetrics *v1.Metrics, ) (*Fetcher, error) { + localFSResolver, err := NewShardedPrefixedResolver(cfg.workingDirs, defaultKeyResolver{}) + if err != nil { + return nil, errors.Wrap(err, "creating fs resolver") + } fetcher := &Fetcher{ cfg: cfg, client: client, metasCache: metasCache, blocksCache: blocksCache, - localFSResolver: NewPrefixedResolver(cfg.workingDir, defaultKeyResolver{}), + localFSResolver: localFSResolver, metrics: newFetcherMetrics(reg, constants.Loki, "bloom_store"), bloomMetrics: bloomMetrics, logger: logger, diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go index e51d153098381..ca3fc006c2688 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go @@ -100,7 +100,7 @@ func TestMetasFetcher(t *testing.T) { t.Run(test.name, func(t *testing.T) { ctx := context.Background() metasCache := cache.NewMockCache() - cfg := bloomStoreConfig{workingDir: t.TempDir(), numWorkers: 1} + cfg := bloomStoreConfig{workingDirs: []string{t.TempDir()}, numWorkers: 1} oc, err := local.NewFSObjectClient(local.FSConfig{Directory: dir}) require.NoError(t, err) @@ -259,7 +259,7 @@ func TestFetcher_DownloadQueue(t *testing.T) { func TestFetcher_LoadBlocksFromFS(t *testing.T) { base := t.TempDir() - cfg := bloomStoreConfig{workingDir: base, numWorkers: 1} + cfg := bloomStoreConfig{workingDirs: []string{base}, numWorkers: 1} resolver := NewPrefixedResolver(base, defaultKeyResolver{}) refs := []BlockRef{ @@ -312,9 +312,13 @@ func createBlockDir(t *testing.T, path string) { } func TestFetcher_IsBlockDir(t *testing.T) { - cfg := bloomStoreConfig{numWorkers: 1} + cfg := bloomStoreConfig{ + numWorkers: 1, + workingDirs: []string{t.TempDir()}, + } - fetcher, _ := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) + fetcher, err := NewFetcher(cfg, nil, nil, nil, nil, log.NewNopLogger(), v1.NewMetrics(nil)) + require.NoError(t, err) t.Run("path does not exist", func(t *testing.T) { base := t.TempDir() diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver.go b/pkg/storage/stores/shipper/bloomshipper/resolver.go index b88a48758d63d..7fb6652ebd174 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver.go @@ -2,6 +2,8 @@ package bloomshipper import ( "fmt" + "hash" + "hash/fnv" "path" "path/filepath" "strconv" @@ -150,6 +152,50 @@ func (p PrefixedResolver) Block(ref BlockRef) Location { } } +type hashable interface { + Hash(hash.Hash32) error +} + +type ShardedPrefixedResolver struct { + prefixes []string + KeyResolver +} + +func NewShardedPrefixedResolver(prefixes []string, resolver KeyResolver) (KeyResolver, error) { + n := len(prefixes) + switch n { + case 0: + return nil, fmt.Errorf("requires at least 1 prefix") + case 1: + return NewPrefixedResolver(prefixes[0], resolver), nil + default: + return ShardedPrefixedResolver{ + prefixes: prefixes, + KeyResolver: resolver, + }, nil + } +} + +func (r ShardedPrefixedResolver) prefix(ref hashable) key { + h := fnv.New32() + _ = ref.Hash(h) + return key(r.prefixes[h.Sum32()%uint32(len(r.prefixes))]) +} + +func (r ShardedPrefixedResolver) Meta(ref MetaRef) Location { + return locations{ + r.prefix(ref), + r.KeyResolver.Meta(ref), + } +} + +func (r ShardedPrefixedResolver) Block(ref BlockRef) Location { + return locations{ + r.prefix(ref), + r.KeyResolver.Block(ref), + } +} + type Location interface { Addr() string // object storage location LocalPath() string // local path version diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go index b2aa7e60a4b53..151b3bc11bb47 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go @@ -53,3 +53,49 @@ func TestResolver_ParseBlockKey(t *testing.T) { require.NoError(t, err) require.Equal(t, ref, parsed) } + +func TestResolver_ShardedPrefixedResolver(t *testing.T) { + + blockRef := BlockRef{ + Ref: Ref{ + TenantID: "tenant", + TableName: "table_1", + Bounds: v1.NewBounds(0x0000, 0xffff), + StartTimestamp: 0, + EndTimestamp: 3600000, + Checksum: 48350, + }, + } + + metaRef := MetaRef{ + Ref: Ref{ + TenantID: "tenant", + TableName: "table_1", + Bounds: v1.NewBounds(0x0000, 0xffff), + Checksum: 43981, + }, + } + + t.Run("empty prefixes cause error", func(t *testing.T) { + _, err := NewShardedPrefixedResolver([]string{}, defaultKeyResolver{}) + require.ErrorContains(t, err, "requires at least 1 prefix") + }) + + t.Run("single prefix", func(t *testing.T) { + r, err := NewShardedPrefixedResolver([]string{"prefix"}, defaultKeyResolver{}) + require.NoError(t, err) + loc := r.Meta(metaRef) + require.Equal(t, "prefix/bloom/table_1/tenant/metas/0000000000000000-000000000000ffff-abcd.json", loc.LocalPath()) + loc = r.Block(blockRef) + require.Equal(t, "prefix/bloom/table_1/tenant/blocks/0000000000000000-000000000000ffff/0-3600000-bcde.tar.gz", loc.LocalPath()) + }) + + t.Run("multiple prefixes", func(t *testing.T) { + r, err := NewShardedPrefixedResolver([]string{"a", "b", "c", "d"}, defaultKeyResolver{}) + require.NoError(t, err) + loc := r.Meta(metaRef) + require.Equal(t, "b/bloom/table_1/tenant/metas/0000000000000000-000000000000ffff-abcd.json", loc.LocalPath()) + loc = r.Block(blockRef) + require.Equal(t, "d/bloom/table_1/tenant/blocks/0000000000000000-000000000000ffff/0-3600000-bcde.tar.gz", loc.LocalPath()) + }) +} diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index 8daa94bddf00d..aed16cd8c2532 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -41,7 +41,7 @@ type StoreWithMetrics interface { } type bloomStoreConfig struct { - workingDir string + workingDirs []string numWorkers int maxBloomPageSize int } @@ -193,13 +193,15 @@ func NewBloomStore( // TODO(chaudum): Remove wrapper cfg := bloomStoreConfig{ - workingDir: storageConfig.BloomShipperConfig.WorkingDirectory, + workingDirs: storageConfig.BloomShipperConfig.WorkingDirectory, numWorkers: storageConfig.BloomShipperConfig.BlocksDownloadingQueue.WorkersCount, maxBloomPageSize: int(storageConfig.BloomShipperConfig.MaxQueryPageSize), } - if err := util.EnsureDirectory(cfg.workingDir); err != nil { - return nil, errors.Wrapf(err, "failed to create working directory for bloom store: '%s'", cfg.workingDir) + for _, wd := range cfg.workingDirs { + if err := util.EnsureDirectory(wd); err != nil { + return nil, errors.Wrapf(err, "failed to create working directory for bloom store: '%s'", wd) + } } for _, periodicConfig := range periodicConfigs { diff --git a/pkg/storage/stores/shipper/bloomshipper/store_test.go b/pkg/storage/stores/shipper/bloomshipper/store_test.go index 9274bfc620b6e..074a965ddb5b4 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/store_test.go @@ -51,7 +51,7 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, st storageConfig := storage.Config{ BloomShipperConfig: config.Config{ - WorkingDirectory: workDir, + WorkingDirectory: []string{workDir}, BlocksDownloadingQueue: config.DownloadingQueueConfig{ WorkersCount: 1, }, From d48496892fddac8077c38ddd9ffa141f6d497953 Mon Sep 17 00:00:00 2001 From: Trevor Whitney Date: Wed, 27 Mar 2024 17:10:04 -0600 Subject: [PATCH 24/54] feat: prepare 3.0.0 release candidate again (#12379) --- .github/jsonnetfile.lock.json | 4 +- .github/release-workflows.jsonnet | 29 +- .../loki-release/workflows/build.libsonnet | 48 +- .../loki-release/workflows/main.jsonnet | 5 +- .../loki-release/workflows/release.libsonnet | 2 +- .github/workflows/minor-release-pr.yml | 50 +- .github/workflows/patch-release-pr.yml | 49 +- .github/workflows/three-0-release.yml | 829 +++++++++++++++++ .github/workflows/three-zero-release.yml | 830 ++++++++++++++++++ 9 files changed, 1794 insertions(+), 52 deletions(-) create mode 100644 .github/workflows/three-0-release.yml create mode 100644 .github/workflows/three-zero-release.yml diff --git a/.github/jsonnetfile.lock.json b/.github/jsonnetfile.lock.json index 3806723e517b7..395ab9190e3e1 100644 --- a/.github/jsonnetfile.lock.json +++ b/.github/jsonnetfile.lock.json @@ -8,8 +8,8 @@ "subdir": "workflows" } }, - "version": "d3fa90c124d13a4e0359a46c8708704e92ee8a50", - "sum": "TQ2X5sm7o+BCrytzSbJ7Th2YqNZ2ZPx9Wg62x5mEVJ0=" + "version": "634945b73e8eed4f5161ec08810178ddeca7505b", + "sum": "BOnwSjzyOjWwv9ikwJSAgPBNnYHTU2PEDJ0PWY6nr7I=" } ], "legacyImports": false diff --git a/.github/release-workflows.jsonnet b/.github/release-workflows.jsonnet index ff977b7f124e2..27d72bcf477fb 100644 --- a/.github/release-workflows.jsonnet +++ b/.github/release-workflows.jsonnet @@ -42,11 +42,13 @@ local imagePrefix = 'grafana'; skipValidation=false, useGitHubAppToken=true, versioningStrategy='always-bump-patch', - ), false, false + ) + { + name: 'Prepare Patch Release PR', + }, false, false ), 'minor-release-pr.yml': std.manifestYamlDoc( lokiRelease.releasePRWorkflow( - branches=['k[0-9]+', 'main'], + branches=['k[0-9]+'], buildImage=buildImage, checkTemplate=checkTemplate, golangCiLintVersion=golangCiLintVersion, @@ -59,7 +61,28 @@ local imagePrefix = 'grafana'; skipValidation=false, useGitHubAppToken=true, versioningStrategy='always-bump-minor', - ), false, false + ) + { + name: 'Prepare Minor Release PR from Weekly', + }, false, false + ), + 'three-zero-release.yml': std.manifestYamlDoc( + lokiRelease.releasePRWorkflow( + branches=['main'], + buildImage=buildImage, + checkTemplate=checkTemplate, + golangCiLintVersion=golangCiLintVersion, + imageBuildTimeoutMin=imageBuildTimeoutMin, + imageJobs=imageJobs, + imagePrefix=imagePrefix, + releaseLibRef=releaseLibRef, + releaseRepo='grafana/loki', + skipArm=false, + skipValidation=false, + useGitHubAppToken=true, + releaseAs='3.0.0-rc.1', + ) + { + name: 'Prepare Loki 3.0 release', + }, false, false ), 'release.yml': std.manifestYamlDoc( lokiRelease.releaseWorkflow( diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet index 6a2749b62ee28..1857836d66655 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/build.libsonnet @@ -119,19 +119,36 @@ local releaseLibStep = common.releaseLibStep; + step.withId('version') + step.withRun(||| npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -214,5 +231,8 @@ local releaseLibStep = common.releaseLibStep; destination: '${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}', //TODO: make bucket configurable process_gcloudignore: false, }), - ]), + ]) + + job.withOutputs({ + version: '${{ needs.version.outputs.version }}', + }), } diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet index fb401d3158829..d274d21a0571d 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/main.jsonnet @@ -19,6 +19,7 @@ imageBuildTimeoutMin=25, imageJobs={}, imagePrefix='grafana', + releaseAs=null, releaseLibRef='main', releaseRepo='grafana/loki-release', skipArm=false, @@ -53,7 +54,9 @@ SKIP_VALIDATION: skipValidation, USE_GITHUB_APP_TOKEN: useGitHubAppToken, VERSIONING_STRATEGY: versioningStrategy, - }, + } + if releaseAs != null then { + RELEASE_AS: releaseAs, + } else {}, local validationSteps = ['check'], jobs: { check: {} + $.job.withUses(checkTemplate) diff --git a/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet b/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet index dc9978b2335d6..62f065b40288a 100644 --- a/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet +++ b/.github/vendor/github.com/grafana/loki-release/workflows/release.libsonnet @@ -40,12 +40,12 @@ local pullRequestFooter = 'Merging this PR will release the [artifacts](https:// --manifest-file .release-please-manifest.json \ --pull-request-footer "%s" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} ||| % pullRequestFooter), diff --git a/.github/workflows/minor-release-pr.yml b/.github/workflows/minor-release-pr.yml index 2704a770dbf91..19f859a6702e1 100644 --- a/.github/workflows/minor-release-pr.yml +++ b/.github/workflows/minor-release-pr.yml @@ -85,18 +85,20 @@ jobs: --manifest-file .release-please-manifest.json \ --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} working-directory: "lib" dist: needs: - "version" + outputs: + version: "${{ needs.version.outputs.version }}" runs-on: "ubuntu-latest" steps: - name: "pull code to release" @@ -768,19 +770,36 @@ jobs: name: "get release version" run: | npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -799,12 +818,11 @@ jobs: echo "pr_created=true" >> $GITHUB_OUTPUT fi working-directory: "lib" -name: "create release PR" +name: "Prepare Minor Release PR from Weekly" "on": push: branches: - "k[0-9]+" - - "main" permissions: contents: "write" id-token: "write" diff --git a/.github/workflows/patch-release-pr.yml b/.github/workflows/patch-release-pr.yml index a388035df98cf..124ea87ef6958 100644 --- a/.github/workflows/patch-release-pr.yml +++ b/.github/workflows/patch-release-pr.yml @@ -85,18 +85,20 @@ jobs: --manifest-file .release-please-manifest.json \ --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ --release-type simple \ --repo-url "${{ env.RELEASE_REPO }}" \ --separate-pull-requests false \ --target-branch "${{ steps.extract_branch.outputs.branch }}" \ --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ --dry-run ${{ fromJSON(env.DRY_RUN) }} working-directory: "lib" dist: needs: - "version" + outputs: + version: "${{ needs.version.outputs.version }}" runs-on: "ubuntu-latest" steps: - name: "pull code to release" @@ -768,19 +770,36 @@ jobs: name: "get release version" run: | npm install - npm exec -- release-please release-pr \ - --consider-all-branches \ - --dry-run \ - --dry-run-output release.json \ - --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --manifest-file .release-please-manifest.json \ - --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ - --release-type simple \ - --repo-url "${{ env.RELEASE_REPO }}" \ - --separate-pull-requests false \ - --target-branch "${{ steps.extract_branch.outputs.branch }}" \ - --token "${{ steps.github_app_token.outputs.token }}" \ - --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi cat release.json @@ -799,7 +818,7 @@ jobs: echo "pr_created=true" >> $GITHUB_OUTPUT fi working-directory: "lib" -name: "create release PR" +name: "Prepare Patch Release PR" "on": push: branches: diff --git a/.github/workflows/three-0-release.yml b/.github/workflows/three-0-release.yml new file mode 100644 index 0000000000000..e63fe7d8fa536 --- /dev/null +++ b/.github/workflows/three-0-release.yml @@ -0,0 +1,829 @@ +concurrency: + group: "create-release-pr-${{ github.sha }}" +env: + BUILD_ARTIFACTS_BUCKET: "loki-build-artifacts" + BUILD_TIMEOUT: 40 + CHANGELOG_PATH: "CHANGELOG.md" + DOCKER_USERNAME: "grafana" + DRY_RUN: false + IMAGE_PREFIX: "grafana" + RELEASE_AS: "3.0.0-rc.1" + RELEASE_LIB_REF: "main" + RELEASE_REPO: "grafana/loki" + SKIP_VALIDATION: false + USE_GITHUB_APP_TOKEN: true + VERSIONING_STRATEGY: "always-bump-patch" +jobs: + check: + uses: "grafana/loki-release/.github/workflows/check.yml@main" + with: + build_image: "grafana/loki-build-image:0.33.0" + golang_ci_lint_version: "v1.55.1" + release_lib_ref: "main" + skip_validation: false + use_github_app_token: true + create-release-pr: + needs: + - "dist" + - "fluent-bit" + - "fluentd" + - "logcli" + - "logstash" + - "loki" + - "loki-canary" + - "loki-canary-boringcrypto" + - "promtail" + - "querytee" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - env: + SHA: "${{ github.sha }}" + id: "release" + name: "release please" + run: | + npm install + npm exec -- release-please release-pr \ + --changelog-path "${CHANGELOG_PATH}" \ + --consider-all-branches \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --label "backport main,autorelease: pending,product-approved" \ + --manifest-file .release-please-manifest.json \ + --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" \ + --dry-run ${{ fromJSON(env.DRY_RUN) }} + + working-directory: "lib" + dist: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v2" + with: + version: ">= 452.0.0" + - id: "get-secrets" + name: "get nfpm signing keys" + uses: "grafana/shared-workflows/actions/get-vault-secrets@main" + with: + common_secrets: | + NFPM_SIGNING_KEY=packages-gpg:private-key + NFPM_PASSPHRASE=packages-gpg:passphrase + - env: + BUILD_IN_CONTAINER: false + DRONE_TAG: "${{ needs.version.outputs.version }}" + IMAGE_TAG: "${{ needs.version.outputs.version }}" + NFPM_SIGNING_KEY_FILE: "nfpm-private-key.key" + SKIP_ARM: false + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "build artifacts" + run: | + cat < $NFPM_SIGNING_KEY_FILE + make dist packages + EOF + working-directory: "release" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}" + path: "release/dist" + process_gcloudignore: false + fluent-bit: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluent-bit/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-bit-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + fluentd: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluentd/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + logcli: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/logcli/Dockerfile" + outputs: "type=docker,dest=release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logcli:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + logstash: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/logstash/Dockerfile" + outputs: "type=docker,dest=release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logstash-output-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + loki: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki/Dockerfile" + outputs: "type=docker,dest=release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary-boringcrypto: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary-boringcrypto/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary-boringcrypto:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + promtail: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/promtail/Dockerfile" + outputs: "type=docker,dest=release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/promtail:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + querytee: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/querytee/Dockerfile" + outputs: "type=docker,dest=release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-query-tee:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + version: + needs: + - "check" + outputs: + pr_created: "${{ steps.version.outputs.pr_created }}" + version: "${{ steps.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - id: "version" + name: "get release version" + run: | + npm install + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi + + cat release.json + + if [[ `jq length release.json` -gt 1 ]]; then + echo 'release-please would create more than 1 PR, so cannot determine correct version' + echo "pr_created=false" >> $GITHUB_OUTPUT + exit 1 + fi + + if [[ `jq length release.json` -eq 0 ]]; then + echo "pr_created=false" >> $GITHUB_OUTPUT + else + version="$(npm run --silent get-version)" + echo "Parsed version: ${version}" + echo "version=${version}" >> $GITHUB_OUTPUT + echo "pr_created=true" >> $GITHUB_OUTPUT + fi + working-directory: "lib" +name: "Prepare Loki 3.0 release" +"on": + push: + branches: + - "main" + - "add-major-release-workflow" +permissions: + contents: "write" + id-token: "write" + pull-requests: "write" diff --git a/.github/workflows/three-zero-release.yml b/.github/workflows/three-zero-release.yml new file mode 100644 index 0000000000000..70b28fbc92bab --- /dev/null +++ b/.github/workflows/three-zero-release.yml @@ -0,0 +1,830 @@ +concurrency: + group: "create-release-pr-${{ github.sha }}" +env: + BUILD_ARTIFACTS_BUCKET: "loki-build-artifacts" + BUILD_TIMEOUT: 40 + CHANGELOG_PATH: "CHANGELOG.md" + DOCKER_USERNAME: "grafana" + DRY_RUN: false + IMAGE_PREFIX: "grafana" + RELEASE_AS: "3.0.0-rc.1" + RELEASE_LIB_REF: "main" + RELEASE_REPO: "grafana/loki" + SKIP_VALIDATION: false + USE_GITHUB_APP_TOKEN: true + VERSIONING_STRATEGY: "always-bump-patch" +jobs: + check: + uses: "grafana/loki-release/.github/workflows/check.yml@main" + with: + build_image: "grafana/loki-build-image:0.33.0" + golang_ci_lint_version: "v1.55.1" + release_lib_ref: "main" + skip_validation: false + use_github_app_token: true + create-release-pr: + needs: + - "dist" + - "fluent-bit" + - "fluentd" + - "logcli" + - "logstash" + - "loki" + - "loki-canary" + - "loki-canary-boringcrypto" + - "promtail" + - "querytee" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - env: + SHA: "${{ github.sha }}" + id: "release" + name: "release please" + run: | + npm install + npm exec -- release-please release-pr \ + --changelog-path "${CHANGELOG_PATH}" \ + --consider-all-branches \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --label "backport main,autorelease: pending,product-approved" \ + --manifest-file .release-please-manifest.json \ + --pull-request-footer "Merging this PR will release the [artifacts](https://console.cloud.google.com/storage/browser/${BUILD_ARTIFACTS_BUCKET}/${SHA}) of ${SHA}" \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-as "${{ needs.dist.outputs.version }}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --dry-run ${{ fromJSON(env.DRY_RUN) }} + + working-directory: "lib" + dist: + needs: + - "version" + outputs: + version: "${{ needs.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v2" + with: + version: ">= 452.0.0" + - id: "get-secrets" + name: "get nfpm signing keys" + uses: "grafana/shared-workflows/actions/get-vault-secrets@main" + with: + common_secrets: | + NFPM_SIGNING_KEY=packages-gpg:private-key + NFPM_PASSPHRASE=packages-gpg:passphrase + - env: + BUILD_IN_CONTAINER: false + DRONE_TAG: "${{ needs.version.outputs.version }}" + IMAGE_TAG: "${{ needs.version.outputs.version }}" + NFPM_SIGNING_KEY_FILE: "nfpm-private-key.key" + SKIP_ARM: false + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "build artifacts" + run: | + cat < $NFPM_SIGNING_KEY_FILE + make dist packages + EOF + working-directory: "release" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}" + path: "release/dist" + process_gcloudignore: false + fluent-bit: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluent-bit/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-bit-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-bit-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + fluentd: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/fluentd/Dockerfile" + outputs: "type=docker,dest=release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/fluent-plugin-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/fluent-plugin-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + logcli: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/logcli/Dockerfile" + outputs: "type=docker,dest=release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logcli:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logcli-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + logstash: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/logstash/Dockerfile" + outputs: "type=docker,dest=release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/logstash-output-loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/logstash-output-loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + loki: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki/Dockerfile" + outputs: "type=docker,dest=release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + loki-canary-boringcrypto: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/loki-canary-boringcrypto/Dockerfile" + outputs: "type=docker,dest=release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-canary-boringcrypto:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-canary-boringcrypto-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + promtail: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/clients/cmd/promtail/Dockerfile" + outputs: "type=docker,dest=release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/promtail:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/promtail-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + - "linux/arm64" + - "linux/arm" + querytee: + needs: + - "version" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - name: "auth gcs" + uses: "google-github-actions/auth@v2" + with: + credentials_json: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + - name: "Set up QEMU" + uses: "docker/setup-qemu-action@v3" + - name: "set up docker buildx" + uses: "docker/setup-buildx-action@v3" + - id: "platform" + name: "parse image platform" + run: | + mkdir -p images + + platform="$(echo "${{ matrix.platform}}" | sed "s/\(.*\)\/\(.*\)/\1-\2/")" + echo "platform=${platform}" >> $GITHUB_OUTPUT + echo "platform_short=$(echo ${{ matrix.platform }} | cut -d / -f 2)" >> $GITHUB_OUTPUT + working-directory: "release" + - env: + IMAGE_TAG: "${{ needs.version.outputs.version }}" + if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "Build and export" + timeout-minutes: "${{ fromJSON(env.BUILD_TIMEOUT) }}" + uses: "docker/build-push-action@v5" + with: + build-args: "IMAGE_TAG=${{ needs.version.outputs.version }}" + context: "release" + file: "release/cmd/querytee/Dockerfile" + outputs: "type=docker,dest=release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + platforms: "${{ matrix.platform }}" + tags: "${{ env.IMAGE_PREFIX }}/loki-query-tee:${{ needs.version.outputs.version }}-${{ steps.platform.outputs.platform_short }}" + - if: "${{ fromJSON(needs.version.outputs.pr_created) }}" + name: "upload artifacts" + uses: "google-github-actions/upload-cloud-storage@v2" + with: + destination: "${{ env.BUILD_ARTIFACTS_BUCKET }}/${{ github.sha }}/images" + path: "release/images/loki-query-tee-${{ needs.version.outputs.version}}-${{ steps.platform.outputs.platform }}.tar" + process_gcloudignore: false + strategy: + fail-fast: true + matrix: + platform: + - "linux/amd64" + version: + needs: + - "check" + outputs: + pr_created: "${{ steps.version.outputs.pr_created }}" + version: "${{ steps.version.outputs.version }}" + runs-on: "ubuntu-latest" + steps: + - name: "pull release library code" + uses: "actions/checkout@v4" + with: + path: "lib" + ref: "${{ env.RELEASE_LIB_REF }}" + repository: "grafana/loki-release" + - name: "pull code to release" + uses: "actions/checkout@v4" + with: + path: "release" + repository: "${{ env.RELEASE_REPO }}" + - name: "setup node" + uses: "actions/setup-node@v4" + with: + node-version: 20 + - id: "extract_branch" + name: "extract branch name" + run: | + echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + working-directory: "release" + - id: "get_github_app_token" + if: "${{ fromJSON(env.USE_GITHUB_APP_TOKEN) }}" + name: "get github app token" + uses: "actions/github-app-token@v1" + with: + app-id: "${{ secrets.APP_ID }}" + owner: "${{ github.repository_owner }}" + private-key: "${{ secrets.APP_PRIVATE_KEY }}" + - id: "github_app_token" + name: "set github token" + run: | + if [[ "${USE_GITHUB_APP_TOKEN}" == "true" ]]; then + echo "token=${{ steps.get_github_app_token.outputs.token }}" >> $GITHUB_OUTPUT + else + echo "token=${{ secrets.GH_TOKEN }}" >> $GITHUB_OUTPUT + fi + - id: "version" + name: "get release version" + run: | + npm install + + if [[ -z "${{ env.RELEASE_AS }}" ]]; then + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --versioning-strategy "${{ env.VERSIONING_STRATEGY }}" + else + npm exec -- release-please release-pr \ + --consider-all-branches \ + --dry-run \ + --dry-run-output release.json \ + --group-pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --manifest-file .release-please-manifest.json \ + --pull-request-title-pattern "chore\${scope}: release\${component} \${version}" \ + --release-type simple \ + --repo-url "${{ env.RELEASE_REPO }}" \ + --separate-pull-requests false \ + --target-branch "${{ steps.extract_branch.outputs.branch }}" \ + --token "${{ steps.github_app_token.outputs.token }}" \ + --release-as "${{ env.RELEASE_AS }}" + fi + + cat release.json + + if [[ `jq length release.json` -gt 1 ]]; then + echo 'release-please would create more than 1 PR, so cannot determine correct version' + echo "pr_created=false" >> $GITHUB_OUTPUT + exit 1 + fi + + if [[ `jq length release.json` -eq 0 ]]; then + echo "pr_created=false" >> $GITHUB_OUTPUT + else + version="$(npm run --silent get-version)" + echo "Parsed version: ${version}" + echo "version=${version}" >> $GITHUB_OUTPUT + echo "pr_created=true" >> $GITHUB_OUTPUT + fi + working-directory: "lib" +name: "Prepare Loki 3.0 release" +"on": + push: + branches: + - "main" +permissions: + contents: "write" + id-token: "write" + pull-requests: "write" From 5b4bfa54b6c4b9118b0f2cf8d5a06c2ebcbaa7c3 Mon Sep 17 00:00:00 2001 From: Trevor Whitney Date: Wed, 27 Mar 2024 17:31:21 -0600 Subject: [PATCH 25/54] feat(detected_fields): initial plumbing of new endpoint (#12385) --- cmd/loki/loki-local-experimental-config.yaml | 53 + docs/sources/configure/_index.md | 4 + pkg/ingester/ingester.go | 12 + pkg/loghttp/labels.go | 14 + pkg/logproto/extensions.go | 9 + pkg/logproto/logproto.pb.go | 1281 +++++++++++++++--- pkg/logproto/logproto.proto | 33 + pkg/logql/metrics.go | 30 + pkg/loki/modules.go | 4 + pkg/lokifrontend/config.go | 3 + pkg/querier/handler.go | 9 + pkg/querier/http.go | 13 + pkg/querier/querier.go | 13 + pkg/querier/querier_mock_test.go | 12 + pkg/querier/queryrange/codec.go | 127 ++ pkg/querier/queryrange/extensions.go | 17 + pkg/querier/queryrange/queryrange.pb.go | 760 +++++++++-- pkg/querier/queryrange/queryrange.proto | 10 + pkg/querier/queryrange/roundtrip.go | 76 +- pkg/querier/queryrange/roundtrip_test.go | 1 + pkg/querier/queryrange/stats.go | 21 +- pkg/util/marshal/marshal.go | 10 + 22 files changed, 2209 insertions(+), 303 deletions(-) create mode 100644 cmd/loki/loki-local-experimental-config.yaml diff --git a/cmd/loki/loki-local-experimental-config.yaml b/cmd/loki/loki-local-experimental-config.yaml new file mode 100644 index 0000000000000..f9968bc04b72d --- /dev/null +++ b/cmd/loki/loki-local-experimental-config.yaml @@ -0,0 +1,53 @@ +auth_enabled: false + +server: + http_listen_port: 3100 + grpc_listen_port: 9096 + +common: + instance_addr: 127.0.0.1 + path_prefix: /tmp/loki + storage: + filesystem: + chunks_directory: /tmp/loki/chunks + rules_directory: /tmp/loki/rules + replication_factor: 1 + ring: + kvstore: + store: inmemory + +query_range: + results_cache: + cache: + embedded_cache: + enabled: true + max_size_mb: 100 + +schema_config: + configs: + - from: 2020-10-24 + store: tsdb + object_store: filesystem + schema: v12 + index: + prefix: index_ + period: 24h + +frontend: + experimental_apis_enabled: true + +ruler: + alertmanager_url: http://localhost:9093 + +# By default, Loki will send anonymous, but uniquely-identifiable usage and configuration +# analytics to Grafana Labs. These statistics are sent to https://stats.grafana.org/ +# +# Statistics help us better understand how Loki is used, and they show us performance +# levels for most users. This helps us prioritize features and documentation. +# For more information on what's sent, look at +# https://github.com/grafana/loki/blob/main/pkg/analytics/stats.go +# Refer to the buildReport method to see what goes into a report. +# +# If you would like to disable reporting, uncomment the following lines: +#analytics: +# reporting_enabled: false diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 93582d3897b6f..2d5e667b13683 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -817,6 +817,10 @@ The `frontend` block configures the Loki query-frontend. # The TLS configuration. [tail_tls_config: ] + +# Whether to enable experimental APIs in the frontend. +# CLI flag: -frontend.experimental-apis-enabled +[experimental_apis_enabled: | default = false] ``` ### query_range diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 0cd76c55a2035..7d0fb671a0a57 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -1359,3 +1359,15 @@ func adjustQueryStartTime(maxLookBackPeriod time.Duration, start, now time.Time) } return start } + +func (i *Ingester) GetDetectedFields(_ context.Context, _ *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{ + { + Label: "foo", + Type: logproto.DetectedFieldString, + Cardinality: 1, + }, + }, + }, nil +} diff --git a/pkg/loghttp/labels.go b/pkg/loghttp/labels.go index 98bad4e957869..f239873323cfe 100644 --- a/pkg/loghttp/labels.go +++ b/pkg/loghttp/labels.go @@ -86,3 +86,17 @@ func ParseLabelQuery(r *http.Request) (*logproto.LabelRequest, error) { req.Query = query(r) return req, nil } + +func ParseDetectedFieldsQuery(r *http.Request) (*logproto.DetectedFieldsRequest, error) { + req := &logproto.DetectedFieldsRequest{} + + start, end, err := bounds(r) + if err != nil { + return nil, err + } + req.Start = &start + req.End = &end + + req.Query = query(r) + return req, nil +} diff --git a/pkg/logproto/extensions.go b/pkg/logproto/extensions.go index 5fa5048e1a9d1..e3996fbe69cbd 100644 --- a/pkg/logproto/extensions.go +++ b/pkg/logproto/extensions.go @@ -140,3 +140,12 @@ func (m *Shard) SpaceFor(stats *IndexStatsResponse, targetShardBytes uint64) boo newDelta := max(updated, targetShardBytes) - min(updated, targetShardBytes) return newDelta <= curDelta } + +const ( + DetectedFieldString DetectedFieldType = 0 + DetectedFieldInt DetectedFieldType = 1 + DetectedFieldFloat DetectedFieldType = 2 + DetectedFieldBoolean DetectedFieldType = 3 + DetectedFieldDuration DetectedFieldType = 4 + DetectedFieldBytes DetectedFieldType = 5 +) diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 7302df0cfb212..1747d4dda7d5e 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -63,6 +63,39 @@ func (Direction) EnumDescriptor() ([]byte, []int) { return fileDescriptor_c28a5f14f1f4c79a, []int{0} } +type DetectedFieldType int32 + +const ( + STRING DetectedFieldType = 0 + INT DetectedFieldType = 1 + FLOAT DetectedFieldType = 2 + BOOL DetectedFieldType = 3 + DURATION DetectedFieldType = 4 + BYTES DetectedFieldType = 5 +) + +var DetectedFieldType_name = map[int32]string{ + 0: "STRING", + 1: "INT", + 2: "FLOAT", + 3: "BOOL", + 4: "DURATION", + 5: "BYTES", +} + +var DetectedFieldType_value = map[string]int32{ + "STRING": 0, + "INT": 1, + "FLOAT": 2, + "BOOL": 3, + "DURATION": 4, + "BYTES": 5, +} + +func (DetectedFieldType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{1} +} + type StreamRatesRequest struct { } @@ -2512,8 +2545,170 @@ func (m *Volume) GetVolume() uint64 { return 0 } +type DetectedFieldsRequest struct { + Start *time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start,omitempty"` + End *time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end,omitempty"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` +} + +func (m *DetectedFieldsRequest) Reset() { *m = DetectedFieldsRequest{} } +func (*DetectedFieldsRequest) ProtoMessage() {} +func (*DetectedFieldsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{45} +} +func (m *DetectedFieldsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsRequest.Merge(m, src) +} +func (m *DetectedFieldsRequest) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsRequest proto.InternalMessageInfo + +func (m *DetectedFieldsRequest) GetStart() *time.Time { + if m != nil { + return m.Start + } + return nil +} + +func (m *DetectedFieldsRequest) GetEnd() *time.Time { + if m != nil { + return m.End + } + return nil +} + +func (m *DetectedFieldsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +type DetectedFieldsResponse struct { + Fields []*DetectedField `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` +} + +func (m *DetectedFieldsResponse) Reset() { *m = DetectedFieldsResponse{} } +func (*DetectedFieldsResponse) ProtoMessage() {} +func (*DetectedFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{46} +} +func (m *DetectedFieldsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsResponse.Merge(m, src) +} +func (m *DetectedFieldsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo + +func (m *DetectedFieldsResponse) GetFields() []*DetectedField { + if m != nil { + return m.Fields + } + return nil +} + +type DetectedField struct { + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + Type DetectedFieldType `protobuf:"varint,2,opt,name=type,proto3,enum=logproto.DetectedFieldType" json:"type,omitempty"` + Cardinality uint64 `protobuf:"varint,3,opt,name=cardinality,proto3" json:"cardinality,omitempty"` +} + +func (m *DetectedField) Reset() { *m = DetectedField{} } +func (*DetectedField) ProtoMessage() {} +func (*DetectedField) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{47} +} +func (m *DetectedField) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedField.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedField) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedField.Merge(m, src) +} +func (m *DetectedField) XXX_Size() int { + return m.Size() +} +func (m *DetectedField) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedField.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedField proto.InternalMessageInfo + +func (m *DetectedField) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *DetectedField) GetType() DetectedFieldType { + if m != nil { + return m.Type + } + return STRING +} + +func (m *DetectedField) GetCardinality() uint64 { + if m != nil { + return m.Cardinality + } + return 0 +} + func init() { proto.RegisterEnum("logproto.Direction", Direction_name, Direction_value) + proto.RegisterEnum("logproto.DetectedFieldType", DetectedFieldType_name, DetectedFieldType_value) proto.RegisterType((*StreamRatesRequest)(nil), "logproto.StreamRatesRequest") proto.RegisterType((*StreamRatesResponse)(nil), "logproto.StreamRatesResponse") proto.RegisterType((*StreamRate)(nil), "logproto.StreamRate") @@ -2560,155 +2755,169 @@ func init() { proto.RegisterType((*VolumeRequest)(nil), "logproto.VolumeRequest") proto.RegisterType((*VolumeResponse)(nil), "logproto.VolumeResponse") proto.RegisterType((*Volume)(nil), "logproto.Volume") + proto.RegisterType((*DetectedFieldsRequest)(nil), "logproto.DetectedFieldsRequest") + proto.RegisterType((*DetectedFieldsResponse)(nil), "logproto.DetectedFieldsResponse") + proto.RegisterType((*DetectedField)(nil), "logproto.DetectedField") } func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2278 bytes of a gzipped FileDescriptorProto + // 2455 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4d, 0x6f, 0x1b, 0xc7, - 0x95, 0x4b, 0x2e, 0xbf, 0x1e, 0x29, 0x59, 0x1e, 0x31, 0x36, 0x41, 0xdb, 0xa4, 0x3c, 0x48, 0x1d, - 0xc1, 0x71, 0xc8, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0xd0, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x3f, 0x32, - 0x72, 0xdd, 0xc2, 0x68, 0x6b, 0xac, 0xc4, 0x11, 0x45, 0x88, 0xbb, 0x4b, 0xef, 0x0e, 0x63, 0x0b, - 0xe8, 0xa1, 0x7f, 0x20, 0x68, 0x6e, 0x45, 0x2f, 0x45, 0x0f, 0x05, 0x52, 0xa0, 0xe8, 0xa5, 0x3f, - 0xa0, 0xbd, 0xf4, 0xe0, 0xde, 0xdc, 0x5b, 0x90, 0x03, 0x5b, 0xcb, 0x97, 0x42, 0xa7, 0xdc, 0x72, - 0x2d, 0xe6, 0x6b, 0x77, 0x96, 0xa2, 0xdd, 0x50, 0x75, 0x51, 0xf8, 0xc2, 0x9d, 0x79, 0xf3, 0xe6, - 0xcd, 0xfb, 0x9a, 0xf7, 0x31, 0x84, 0x13, 0x83, 0x9d, 0x6e, 0xab, 0xef, 0x77, 0x07, 0x81, 0xcf, - 0xfc, 0x68, 0xd0, 0x14, 0xbf, 0xa8, 0xa0, 0xe7, 0xb5, 0x4a, 0xd7, 0xef, 0xfa, 0x12, 0x87, 0x8f, - 0xe4, 0x7a, 0xad, 0xd1, 0xf5, 0xfd, 0x6e, 0x9f, 0xb6, 0xc4, 0x6c, 0x63, 0xb8, 0xd5, 0x62, 0x3d, - 0x97, 0x86, 0xcc, 0x71, 0x07, 0x0a, 0x61, 0x41, 0x51, 0x7f, 0xd8, 0x77, 0xfd, 0x0e, 0xed, 0xb7, - 0x42, 0xe6, 0xb0, 0x50, 0xfe, 0x2a, 0x8c, 0x79, 0x8e, 0x31, 0x18, 0x86, 0xdb, 0xe2, 0x47, 0x02, - 0x71, 0x05, 0xd0, 0x3a, 0x0b, 0xa8, 0xe3, 0x12, 0x87, 0xd1, 0x90, 0xd0, 0x87, 0x43, 0x1a, 0x32, - 0x7c, 0x13, 0xe6, 0x13, 0xd0, 0x70, 0xe0, 0x7b, 0x21, 0x45, 0x17, 0xa1, 0x14, 0xc6, 0xe0, 0xaa, - 0xb5, 0x90, 0x59, 0x2c, 0x2d, 0x55, 0x9a, 0x91, 0x28, 0xf1, 0x1e, 0x62, 0x22, 0xe2, 0xdf, 0x58, - 0x00, 0xf1, 0x1a, 0xaa, 0x03, 0xc8, 0xd5, 0x8f, 0x9c, 0x70, 0xbb, 0x6a, 0x2d, 0x58, 0x8b, 0x36, - 0x31, 0x20, 0xe8, 0x1c, 0x1c, 0x8d, 0x67, 0xb7, 0xfc, 0xf5, 0x6d, 0x27, 0xe8, 0x54, 0xd3, 0x02, - 0xed, 0xe0, 0x02, 0x42, 0x60, 0x07, 0x0e, 0xa3, 0xd5, 0xcc, 0x82, 0xb5, 0x98, 0x21, 0x62, 0x8c, - 0x8e, 0x41, 0x8e, 0x51, 0xcf, 0xf1, 0x58, 0xd5, 0x5e, 0xb0, 0x16, 0x8b, 0x44, 0xcd, 0x38, 0x9c, - 0xcb, 0x4e, 0xc3, 0x6a, 0x76, 0xc1, 0x5a, 0x9c, 0x21, 0x6a, 0x86, 0x3f, 0xcf, 0x40, 0xf9, 0xe3, - 0x21, 0x0d, 0x76, 0x95, 0x02, 0x50, 0x1d, 0x0a, 0x21, 0xed, 0xd3, 0x4d, 0xe6, 0x07, 0x82, 0xc1, - 0x62, 0x3b, 0x5d, 0xb5, 0x48, 0x04, 0x43, 0x15, 0xc8, 0xf6, 0x7b, 0x6e, 0x8f, 0x09, 0xb6, 0x66, - 0x88, 0x9c, 0xa0, 0x4b, 0x90, 0x0d, 0x99, 0x13, 0x30, 0xc1, 0x4b, 0x69, 0xa9, 0xd6, 0x94, 0x46, - 0x6b, 0x6a, 0xa3, 0x35, 0xef, 0x6a, 0xa3, 0xb5, 0x0b, 0x4f, 0x46, 0x8d, 0xd4, 0x67, 0xff, 0x68, - 0x58, 0x44, 0x6e, 0x41, 0x17, 0x21, 0x43, 0xbd, 0x8e, 0xe0, 0xf7, 0x9b, 0xee, 0xe4, 0x1b, 0xd0, - 0x79, 0x28, 0x76, 0x7a, 0x01, 0xdd, 0x64, 0x3d, 0xdf, 0x13, 0x52, 0xcd, 0x2e, 0xcd, 0xc7, 0x16, - 0x59, 0xd1, 0x4b, 0x24, 0xc6, 0x42, 0xe7, 0x20, 0x17, 0x72, 0xd5, 0x85, 0xd5, 0xfc, 0x42, 0x66, - 0xb1, 0xd8, 0xae, 0xec, 0x8f, 0x1a, 0x73, 0x12, 0x72, 0xce, 0x77, 0x7b, 0x8c, 0xba, 0x03, 0xb6, - 0x4b, 0x14, 0x0e, 0x3a, 0x0b, 0xf9, 0x0e, 0xed, 0x53, 0x6e, 0xf0, 0x82, 0x30, 0xf8, 0x9c, 0x41, - 0x5e, 0x2c, 0x10, 0x8d, 0x80, 0xee, 0x83, 0x3d, 0xe8, 0x3b, 0x5e, 0xb5, 0x28, 0xa4, 0x98, 0x8d, - 0x11, 0xef, 0xf4, 0x1d, 0xaf, 0x7d, 0xf1, 0xcb, 0x51, 0x63, 0xa9, 0xdb, 0x63, 0xdb, 0xc3, 0x8d, - 0xe6, 0xa6, 0xef, 0xb6, 0xba, 0x81, 0xb3, 0xe5, 0x78, 0x4e, 0xab, 0xef, 0xef, 0xf4, 0x5a, 0xdc, - 0x39, 0x1f, 0x0e, 0x69, 0xd0, 0xa3, 0x41, 0x8b, 0xd3, 0x68, 0x0a, 0x7b, 0xf0, 0x7d, 0x44, 0xd0, - 0xbc, 0x6e, 0x17, 0x72, 0x73, 0x79, 0x3c, 0x4a, 0x03, 0x5a, 0x77, 0xdc, 0x41, 0x9f, 0x4e, 0x65, - 0xaf, 0xc8, 0x32, 0xe9, 0x43, 0x5b, 0x26, 0x33, 0xad, 0x65, 0x62, 0x35, 0xdb, 0xd3, 0xa9, 0x39, - 0xfb, 0x4d, 0xd5, 0x9c, 0x7b, 0xf5, 0x6a, 0xc6, 0x55, 0xb0, 0xf9, 0x0c, 0xcd, 0x41, 0x26, 0x70, - 0x1e, 0x09, 0x65, 0x96, 0x09, 0x1f, 0xe2, 0x35, 0xc8, 0x49, 0x46, 0x50, 0x6d, 0x5c, 0xdb, 0xc9, - 0x9b, 0x11, 0x6b, 0x3a, 0xa3, 0x75, 0x38, 0x17, 0xeb, 0x30, 0x23, 0xb4, 0x83, 0x7f, 0x6b, 0xc1, - 0x8c, 0x32, 0xa1, 0x8a, 0x2e, 0x1b, 0x90, 0x97, 0xb7, 0x5b, 0x47, 0x96, 0xe3, 0xe3, 0x91, 0xe5, - 0x4a, 0xc7, 0x19, 0x30, 0x1a, 0xb4, 0x5b, 0x4f, 0x46, 0x0d, 0xeb, 0xcb, 0x51, 0xe3, 0xad, 0x97, - 0x49, 0x29, 0x82, 0x9c, 0x8a, 0x3a, 0x9a, 0x30, 0x7a, 0x5b, 0x70, 0xc7, 0x42, 0xe5, 0x07, 0x47, - 0x9a, 0x32, 0x40, 0xae, 0x7a, 0x5d, 0x1a, 0x72, 0xca, 0x36, 0x37, 0x21, 0x91, 0x38, 0xf8, 0xe7, - 0x30, 0x9f, 0x70, 0x35, 0xc5, 0xe7, 0xfb, 0x90, 0x0b, 0xb9, 0x02, 0x35, 0x9b, 0x86, 0xa1, 0xd6, - 0x05, 0xbc, 0x3d, 0xab, 0xf8, 0xcb, 0xc9, 0x39, 0x51, 0xf8, 0xd3, 0x9d, 0xfe, 0x57, 0x0b, 0xca, - 0x6b, 0xce, 0x06, 0xed, 0x6b, 0x1f, 0x47, 0x60, 0x7b, 0x8e, 0x4b, 0x95, 0xc6, 0xc5, 0x98, 0x07, - 0xb4, 0x4f, 0x9c, 0xfe, 0x90, 0x4a, 0x92, 0x05, 0xa2, 0x66, 0xd3, 0x46, 0x22, 0xeb, 0xd0, 0x91, - 0xc8, 0x8a, 0xfd, 0xbd, 0x02, 0x59, 0xee, 0x59, 0xbb, 0x22, 0x0a, 0x15, 0x89, 0x9c, 0xe0, 0xb7, - 0x60, 0x46, 0x49, 0xa1, 0xd4, 0x17, 0xb3, 0xcc, 0xd5, 0x57, 0xd4, 0x2c, 0x63, 0x17, 0x72, 0x52, - 0xdb, 0xe8, 0x4d, 0x28, 0x46, 0xd9, 0x4d, 0x48, 0x9b, 0x69, 0xe7, 0xf6, 0x47, 0x8d, 0x34, 0x0b, - 0x49, 0xbc, 0x80, 0x1a, 0x90, 0x15, 0x3b, 0x85, 0xe4, 0x56, 0xbb, 0xb8, 0x3f, 0x6a, 0x48, 0x00, - 0x91, 0x1f, 0x74, 0x12, 0xec, 0x6d, 0x9e, 0x60, 0xb8, 0x0a, 0xec, 0x76, 0x61, 0x7f, 0xd4, 0x10, - 0x73, 0x22, 0x7e, 0xf1, 0x35, 0x28, 0xaf, 0xd1, 0xae, 0xb3, 0xb9, 0xab, 0x0e, 0xad, 0x68, 0x72, - 0xfc, 0x40, 0x4b, 0xd3, 0x38, 0x0d, 0xe5, 0xe8, 0xc4, 0x07, 0x6e, 0xa8, 0x9c, 0xba, 0x14, 0xc1, - 0x6e, 0x86, 0xf8, 0xd7, 0x16, 0x28, 0x3b, 0x23, 0x0c, 0xb9, 0x3e, 0x97, 0x35, 0x54, 0x31, 0x08, - 0xf6, 0x47, 0x0d, 0x05, 0x21, 0xea, 0x8b, 0x2e, 0x43, 0x3e, 0x14, 0x27, 0x72, 0x62, 0xe3, 0xee, - 0x23, 0x16, 0xda, 0x47, 0xb8, 0x1b, 0xec, 0x8f, 0x1a, 0x1a, 0x91, 0xe8, 0x01, 0x6a, 0x26, 0x32, - 0xa7, 0x14, 0x6c, 0x76, 0x7f, 0xd4, 0x30, 0xa0, 0x66, 0x26, 0xc5, 0x5f, 0x5b, 0x50, 0xba, 0xeb, - 0xf4, 0x22, 0x17, 0xaa, 0x6a, 0x13, 0xc5, 0x31, 0x52, 0x02, 0xf8, 0x95, 0xee, 0xd0, 0xbe, 0xb3, - 0x7b, 0xd5, 0x0f, 0x04, 0xdd, 0x19, 0x12, 0xcd, 0xe3, 0x64, 0x67, 0x4f, 0x4c, 0x76, 0xd9, 0xe9, - 0x43, 0xea, 0xff, 0x30, 0x80, 0x5d, 0xb7, 0x0b, 0xe9, 0xb9, 0x0c, 0xfe, 0xa3, 0x05, 0x65, 0x29, - 0xb9, 0x72, 0xbb, 0x9f, 0x40, 0x4e, 0x2a, 0x46, 0xc8, 0xfe, 0x92, 0xe0, 0xf2, 0xf6, 0x34, 0x81, - 0x45, 0xd1, 0x44, 0xdf, 0x87, 0xd9, 0x4e, 0xe0, 0x0f, 0x06, 0xb4, 0xb3, 0xae, 0x42, 0x58, 0x7a, - 0x3c, 0x84, 0xad, 0x98, 0xeb, 0x64, 0x0c, 0x1d, 0xff, 0xcd, 0x82, 0x19, 0x15, 0x2d, 0x94, 0xad, - 0x22, 0xfd, 0x5a, 0x87, 0x4e, 0x59, 0xe9, 0x69, 0x53, 0xd6, 0x31, 0xc8, 0x75, 0x03, 0x7f, 0x38, - 0x08, 0xab, 0x19, 0x79, 0x37, 0xe5, 0x6c, 0xba, 0x54, 0x86, 0xaf, 0xc3, 0xac, 0x16, 0xe5, 0x05, - 0x21, 0xb3, 0x36, 0x1e, 0x32, 0x57, 0x3b, 0xd4, 0x63, 0xbd, 0xad, 0x5e, 0x14, 0x04, 0x15, 0x3e, - 0xfe, 0xa5, 0x05, 0x73, 0xe3, 0x28, 0x68, 0xc5, 0xb8, 0x67, 0x9c, 0xdc, 0x99, 0x17, 0x93, 0x6b, - 0x8a, 0xe0, 0x13, 0x7e, 0xe8, 0xb1, 0x60, 0x57, 0x93, 0x96, 0x7b, 0x6b, 0xef, 0x41, 0xc9, 0x58, - 0xe4, 0x29, 0x6a, 0x87, 0xaa, 0x9b, 0x41, 0xf8, 0x30, 0x0e, 0x09, 0x69, 0x19, 0xd0, 0xc4, 0x04, - 0xff, 0xca, 0x82, 0x99, 0x84, 0x2d, 0xd1, 0xfb, 0x60, 0x6f, 0x05, 0xbe, 0x3b, 0x95, 0xa1, 0xc4, - 0x0e, 0xf4, 0x6d, 0x48, 0x33, 0x7f, 0x2a, 0x33, 0xa5, 0x99, 0xcf, 0xad, 0xa4, 0xc4, 0xcf, 0xc8, - 0xea, 0x56, 0xce, 0xf0, 0x7b, 0x50, 0x14, 0x02, 0xdd, 0x71, 0x7a, 0xc1, 0xc4, 0x6c, 0x31, 0x59, - 0xa0, 0xcb, 0x70, 0x44, 0x46, 0xc2, 0xc9, 0x9b, 0xcb, 0x93, 0x36, 0x97, 0xf5, 0xe6, 0x13, 0x90, - 0x5d, 0xde, 0x1e, 0x7a, 0x3b, 0x7c, 0x4b, 0xc7, 0x61, 0x8e, 0xde, 0xc2, 0xc7, 0xf8, 0x0d, 0x98, - 0xe7, 0x77, 0x90, 0x06, 0xe1, 0xb2, 0x3f, 0xf4, 0x98, 0xee, 0x2e, 0xce, 0x41, 0x25, 0x09, 0x56, - 0x5e, 0x52, 0x81, 0xec, 0x26, 0x07, 0x08, 0x1a, 0x33, 0x44, 0x4e, 0xf0, 0xef, 0x2c, 0x40, 0xd7, - 0x28, 0x13, 0xa7, 0xac, 0xae, 0x44, 0xd7, 0xa3, 0x06, 0x05, 0xd7, 0x61, 0x9b, 0xdb, 0x34, 0x08, - 0x75, 0x0d, 0xa2, 0xe7, 0xff, 0x8f, 0x6a, 0x0f, 0x9f, 0x87, 0xf9, 0x04, 0x97, 0x4a, 0xa6, 0x1a, - 0x14, 0x36, 0x15, 0x4c, 0xe5, 0xbb, 0x68, 0x8e, 0xff, 0x94, 0x86, 0x82, 0xd8, 0x40, 0xe8, 0x16, - 0x3a, 0x0f, 0xa5, 0xad, 0x9e, 0xd7, 0xa5, 0xc1, 0x20, 0xe8, 0x29, 0x15, 0xd8, 0xed, 0x23, 0xfb, - 0xa3, 0x86, 0x09, 0x26, 0xe6, 0x04, 0xbd, 0x03, 0xf9, 0x61, 0x48, 0x83, 0x07, 0x3d, 0x79, 0xd3, - 0x8b, 0xed, 0xca, 0xde, 0xa8, 0x91, 0xfb, 0x61, 0x48, 0x83, 0xd5, 0x15, 0x9e, 0x79, 0x86, 0x62, - 0x44, 0xe4, 0xb7, 0x83, 0x6e, 0x28, 0x37, 0x15, 0x45, 0x58, 0xfb, 0x3b, 0x9c, 0xfd, 0xb1, 0x50, - 0x37, 0x08, 0x7c, 0x97, 0xb2, 0x6d, 0x3a, 0x0c, 0x5b, 0x9b, 0xbe, 0xeb, 0xfa, 0x5e, 0x4b, 0xf4, - 0x92, 0x42, 0x68, 0x9e, 0x3e, 0xf9, 0x76, 0xe5, 0xb9, 0x77, 0x21, 0xcf, 0xb6, 0x03, 0x7f, 0xd8, - 0xdd, 0x16, 0x59, 0x21, 0xd3, 0xbe, 0x34, 0x3d, 0x3d, 0x4d, 0x81, 0xe8, 0x01, 0x3a, 0xcd, 0xb5, - 0x45, 0x37, 0x77, 0xc2, 0xa1, 0x2b, 0x3b, 0xb4, 0x76, 0x76, 0x7f, 0xd4, 0xb0, 0xde, 0x21, 0x11, - 0x18, 0x7f, 0x9a, 0x86, 0x86, 0x70, 0xd4, 0x7b, 0xa2, 0x6c, 0xb8, 0xea, 0x07, 0x37, 0x29, 0x0b, - 0x7a, 0x9b, 0xb7, 0x1c, 0x97, 0x6a, 0xdf, 0x68, 0x40, 0xc9, 0x15, 0xc0, 0x07, 0xc6, 0x15, 0x00, - 0x37, 0xc2, 0x43, 0xa7, 0x00, 0xc4, 0x9d, 0x91, 0xeb, 0xf2, 0x36, 0x14, 0x05, 0x44, 0x2c, 0x2f, - 0x27, 0x34, 0xd5, 0x9a, 0x52, 0x32, 0xa5, 0xa1, 0xd5, 0x71, 0x0d, 0x4d, 0x4d, 0x27, 0x52, 0x8b, - 0xe9, 0xeb, 0xd9, 0xa4, 0xaf, 0xe3, 0xbf, 0x5b, 0x50, 0x5f, 0xd3, 0x9c, 0x1f, 0x52, 0x1d, 0x5a, - 0xde, 0xf4, 0x2b, 0x92, 0x37, 0xf3, 0xdf, 0xc9, 0x8b, 0xeb, 0x00, 0x6b, 0x3d, 0x8f, 0x5e, 0xed, - 0xf5, 0x19, 0x0d, 0x26, 0x74, 0x22, 0x9f, 0x66, 0xe2, 0x90, 0x40, 0xe8, 0x96, 0x96, 0x73, 0xd9, - 0x88, 0xc3, 0xaf, 0x42, 0x8c, 0xf4, 0x2b, 0x34, 0x5b, 0x66, 0x2c, 0x44, 0xed, 0x40, 0x7e, 0x4b, - 0x88, 0x27, 0x53, 0x6a, 0xe2, 0x19, 0x25, 0x96, 0xbd, 0x7d, 0x59, 0x1d, 0x7e, 0xe1, 0x65, 0x05, - 0x89, 0x78, 0xf5, 0x69, 0x85, 0xbb, 0x1e, 0x73, 0x1e, 0x1b, 0x9b, 0x89, 0x3e, 0x01, 0xfd, 0x4c, - 0x95, 0x5b, 0xd9, 0x89, 0xe5, 0x96, 0xbe, 0xb9, 0x87, 0xef, 0x19, 0x3f, 0x88, 0x63, 0x9f, 0x30, - 0x87, 0x8a, 0x7d, 0x67, 0xc0, 0x0e, 0xe8, 0x96, 0x4e, 0xd2, 0x28, 0x3e, 0x36, 0xc2, 0x14, 0xeb, - 0xf8, 0xcf, 0x16, 0xcc, 0x5d, 0xa3, 0x2c, 0x59, 0xfe, 0xbc, 0x46, 0xc6, 0xc4, 0x1f, 0xc1, 0x51, - 0x83, 0x7f, 0x25, 0xfd, 0x85, 0xb1, 0x9a, 0xe7, 0x8d, 0x58, 0xfe, 0x55, 0xaf, 0x43, 0x1f, 0xab, - 0x5e, 0x31, 0x59, 0xee, 0xdc, 0x81, 0x92, 0xb1, 0x88, 0xae, 0x8c, 0x15, 0x3a, 0xc6, 0xcb, 0x4e, - 0x94, 0xac, 0xdb, 0x15, 0x25, 0x93, 0xec, 0x16, 0x55, 0x19, 0x1b, 0x15, 0x05, 0xeb, 0x80, 0x84, - 0xb9, 0x04, 0x59, 0x33, 0x2d, 0x09, 0xe8, 0x8d, 0xa8, 0xe2, 0x89, 0xe6, 0xe8, 0x34, 0xd8, 0x81, - 0xff, 0x48, 0x57, 0xb0, 0x33, 0xf1, 0x91, 0xc4, 0x7f, 0x44, 0xc4, 0x12, 0xbe, 0x0c, 0x19, 0xe2, - 0x3f, 0x42, 0x75, 0x80, 0xc0, 0xf1, 0xba, 0xf4, 0x5e, 0xd4, 0x38, 0x95, 0x89, 0x01, 0x79, 0x41, - 0xc9, 0xb0, 0x0c, 0x47, 0x4d, 0x8e, 0xa4, 0xb9, 0x9b, 0x90, 0xff, 0x78, 0x68, 0xaa, 0xab, 0x32, - 0xa6, 0x2e, 0xd9, 0x83, 0x6b, 0x24, 0xee, 0x33, 0x10, 0xc3, 0xd1, 0x49, 0x28, 0x32, 0x67, 0xa3, - 0x4f, 0x6f, 0xc5, 0x01, 0x2e, 0x06, 0xf0, 0x55, 0xde, 0xf3, 0xdd, 0x33, 0x6a, 0x9f, 0x18, 0x80, - 0xce, 0xc2, 0x5c, 0xcc, 0xf3, 0x9d, 0x80, 0x6e, 0xf5, 0x1e, 0x0b, 0x0b, 0x97, 0xc9, 0x01, 0x38, - 0x5a, 0x84, 0x23, 0x31, 0x6c, 0x5d, 0xd4, 0x18, 0xb6, 0x40, 0x1d, 0x07, 0x73, 0xdd, 0x08, 0x71, - 0x3f, 0x7c, 0x38, 0x74, 0xfa, 0xe2, 0xe6, 0x95, 0x89, 0x01, 0xc1, 0x7f, 0xb1, 0xe0, 0xa8, 0x34, - 0x35, 0xef, 0xf6, 0x5f, 0x47, 0xaf, 0xff, 0xdc, 0x02, 0x64, 0x4a, 0xa0, 0x5c, 0xeb, 0x5b, 0xe6, - 0x33, 0x0e, 0x2f, 0x62, 0x4a, 0xa2, 0x95, 0x95, 0xa0, 0xf8, 0x25, 0x06, 0x43, 0x4e, 0x14, 0x42, - 0xb2, 0xa7, 0xb6, 0x65, 0xaf, 0x2c, 0x21, 0x44, 0x7d, 0x79, 0x8b, 0xbf, 0xb1, 0xcb, 0x68, 0xa8, - 0x3a, 0x5d, 0xd1, 0xe2, 0x0b, 0x00, 0x91, 0x1f, 0x7e, 0x16, 0xf5, 0x98, 0xf0, 0x1a, 0x3b, 0x3e, - 0x4b, 0x81, 0x88, 0x1e, 0xe0, 0x3f, 0xa4, 0x61, 0xe6, 0x9e, 0xdf, 0x1f, 0xc6, 0x29, 0xf1, 0x75, - 0x4a, 0x15, 0x89, 0xf6, 0x3b, 0xab, 0xdb, 0x6f, 0x04, 0x76, 0xc8, 0xe8, 0x40, 0x78, 0x56, 0x86, - 0x88, 0x31, 0xc2, 0x50, 0x66, 0x4e, 0xd0, 0xa5, 0x4c, 0xf6, 0x35, 0xd5, 0x9c, 0x28, 0x38, 0x13, - 0x30, 0xb4, 0x00, 0x25, 0xa7, 0xdb, 0x0d, 0x68, 0xd7, 0x61, 0xb4, 0xbd, 0x5b, 0xcd, 0x8b, 0xc3, - 0x4c, 0x10, 0xfe, 0x31, 0xcc, 0x6a, 0x65, 0x29, 0x93, 0xbe, 0x0b, 0xf9, 0x4f, 0x04, 0x64, 0xc2, - 0x93, 0x97, 0x44, 0x55, 0x61, 0x4c, 0xa3, 0x25, 0xdf, 0xc7, 0x35, 0xcf, 0xf8, 0x3a, 0xe4, 0x24, - 0x3a, 0x3a, 0x69, 0x76, 0x27, 0xf2, 0x6d, 0x86, 0xcf, 0x55, 0xab, 0x81, 0x21, 0x27, 0x09, 0x29, - 0xc3, 0x0b, 0xdf, 0x90, 0x10, 0xa2, 0xbe, 0x67, 0xcf, 0x40, 0x31, 0x7a, 0xdc, 0x46, 0x25, 0xc8, - 0x5f, 0xbd, 0x4d, 0x7e, 0x74, 0x85, 0xac, 0xcc, 0xa5, 0x50, 0x19, 0x0a, 0xed, 0x2b, 0xcb, 0x37, - 0xc4, 0xcc, 0x5a, 0xfa, 0xda, 0xd6, 0x91, 0x25, 0x40, 0xdf, 0x83, 0xac, 0x0c, 0x17, 0xc7, 0x62, - 0xfe, 0xcd, 0x67, 0xe4, 0xda, 0xf1, 0x03, 0x70, 0xa9, 0x01, 0x9c, 0x7a, 0xd7, 0x42, 0xb7, 0xa0, - 0x24, 0x80, 0xea, 0xc1, 0xe8, 0xe4, 0xf8, 0xbb, 0x4d, 0x82, 0xd2, 0xa9, 0x17, 0xac, 0x1a, 0xf4, - 0x2e, 0x41, 0x56, 0xd8, 0xc4, 0xe4, 0xc6, 0x7c, 0xf0, 0x33, 0xb9, 0x49, 0x3c, 0xa1, 0xe1, 0x14, - 0xfa, 0x2e, 0xd8, 0xbc, 0x85, 0x42, 0x46, 0x52, 0x31, 0xde, 0x79, 0x6a, 0xc7, 0xc6, 0xc1, 0xc6, - 0xb1, 0x1f, 0x44, 0xcf, 0x55, 0xc7, 0xc7, 0xdb, 0x66, 0xbd, 0xbd, 0x7a, 0x70, 0x21, 0x3a, 0xf9, - 0xb6, 0x7c, 0x57, 0xd1, 0xcd, 0x1b, 0x3a, 0x95, 0x3c, 0x6a, 0xac, 0xd7, 0xab, 0xd5, 0x5f, 0xb4, - 0x1c, 0x11, 0x5c, 0x83, 0x92, 0xd1, 0x38, 0x99, 0x6a, 0x3d, 0xd8, 0xf5, 0x99, 0x6a, 0x9d, 0xd0, - 0x6d, 0xe1, 0x14, 0xba, 0x06, 0x05, 0x9e, 0x8a, 0x79, 0x44, 0x42, 0x27, 0xc6, 0x33, 0xae, 0x11, - 0x69, 0x6b, 0x27, 0x27, 0x2f, 0x46, 0x84, 0x7e, 0x00, 0xc5, 0x6b, 0x94, 0x29, 0x77, 0x3d, 0x3e, - 0xee, 0xef, 0x13, 0x34, 0x95, 0xbc, 0x33, 0x38, 0xb5, 0xf4, 0x53, 0xfd, 0xa7, 0xd7, 0x8a, 0xc3, - 0x1c, 0x74, 0x1b, 0x66, 0x05, 0x63, 0xd1, 0xbf, 0x62, 0x09, 0x07, 0x3a, 0xf0, 0x17, 0x5c, 0xc2, - 0x81, 0x0e, 0xfe, 0x15, 0x87, 0x53, 0xed, 0xfb, 0x4f, 0x9f, 0xd5, 0x53, 0x5f, 0x3c, 0xab, 0xa7, - 0xbe, 0x7a, 0x56, 0xb7, 0x7e, 0xb1, 0x57, 0xb7, 0x7e, 0xbf, 0x57, 0xb7, 0x9e, 0xec, 0xd5, 0xad, - 0xa7, 0x7b, 0x75, 0xeb, 0x9f, 0x7b, 0x75, 0xeb, 0x5f, 0x7b, 0xf5, 0xd4, 0x57, 0x7b, 0x75, 0xeb, - 0xb3, 0xe7, 0xf5, 0xd4, 0xd3, 0xe7, 0xf5, 0xd4, 0x17, 0xcf, 0xeb, 0xa9, 0xfb, 0x6f, 0xfe, 0x87, - 0x42, 0x52, 0x36, 0xba, 0x39, 0xf1, 0xb9, 0xf0, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3e, 0xbe, - 0x5b, 0x4c, 0xb3, 0x1c, 0x00, 0x00, + 0x95, 0xcb, 0x5d, 0x7e, 0x3d, 0x52, 0x32, 0x3d, 0x62, 0x6c, 0x82, 0x76, 0x48, 0x79, 0x90, 0x3a, + 0x82, 0xe3, 0x88, 0xb1, 0xdc, 0xb8, 0xa9, 0xdd, 0xa0, 0x15, 0x25, 0x4b, 0x91, 0x2d, 0x4b, 0xce, + 0x48, 0x71, 0x53, 0xa3, 0xad, 0xb1, 0x22, 0x47, 0xd4, 0x42, 0xe4, 0x2e, 0xbd, 0x3b, 0x8c, 0x4d, + 0xa0, 0x87, 0xfe, 0x81, 0xa0, 0xb9, 0x15, 0xbd, 0x14, 0x2d, 0x50, 0x20, 0x05, 0x8a, 0x5e, 0xfa, + 0x03, 0xda, 0x4b, 0x0f, 0xee, 0xcd, 0xb9, 0x05, 0x39, 0xb0, 0xb5, 0x7c, 0x29, 0x74, 0xca, 0xad, + 0xd7, 0x62, 0x3e, 0xf6, 0x8b, 0xa2, 0xdc, 0x50, 0x75, 0x51, 0xf8, 0xc2, 0x9d, 0x79, 0xf3, 0xe6, + 0xcd, 0xfb, 0x9a, 0xf7, 0x31, 0x84, 0x73, 0xbd, 0xfd, 0x76, 0xbd, 0xe3, 0xb4, 0x7b, 0xae, 0xc3, + 0x9c, 0x60, 0x30, 0x2f, 0x7e, 0x51, 0xd6, 0x9f, 0x57, 0x4a, 0x6d, 0xa7, 0xed, 0x48, 0x1c, 0x3e, + 0x92, 0xeb, 0x95, 0x5a, 0xdb, 0x71, 0xda, 0x1d, 0x5a, 0x17, 0xb3, 0x9d, 0xfe, 0x6e, 0x9d, 0x59, + 0x5d, 0xea, 0x31, 0xb3, 0xdb, 0x53, 0x08, 0xb3, 0x8a, 0xfa, 0xc3, 0x4e, 0xd7, 0x69, 0xd1, 0x4e, + 0xdd, 0x63, 0x26, 0xf3, 0xe4, 0xaf, 0xc2, 0x98, 0xe1, 0x18, 0xbd, 0xbe, 0xb7, 0x27, 0x7e, 0x24, + 0x10, 0x97, 0x00, 0x6d, 0x31, 0x97, 0x9a, 0x5d, 0x62, 0x32, 0xea, 0x11, 0xfa, 0xb0, 0x4f, 0x3d, + 0x86, 0xef, 0xc0, 0x4c, 0x0c, 0xea, 0xf5, 0x1c, 0xdb, 0xa3, 0xe8, 0x1a, 0xe4, 0xbd, 0x10, 0x5c, + 0xd6, 0x66, 0xf5, 0xb9, 0xfc, 0x42, 0x69, 0x3e, 0x10, 0x25, 0xdc, 0x43, 0xa2, 0x88, 0xf8, 0xd7, + 0x1a, 0x40, 0xb8, 0x86, 0xaa, 0x00, 0x72, 0xf5, 0x03, 0xd3, 0xdb, 0x2b, 0x6b, 0xb3, 0xda, 0x9c, + 0x41, 0x22, 0x10, 0x74, 0x19, 0x4e, 0x87, 0xb3, 0x0d, 0x67, 0x6b, 0xcf, 0x74, 0x5b, 0xe5, 0xa4, + 0x40, 0x3b, 0xba, 0x80, 0x10, 0x18, 0xae, 0xc9, 0x68, 0x59, 0x9f, 0xd5, 0xe6, 0x74, 0x22, 0xc6, + 0xe8, 0x0c, 0xa4, 0x19, 0xb5, 0x4d, 0x9b, 0x95, 0x8d, 0x59, 0x6d, 0x2e, 0x47, 0xd4, 0x8c, 0xc3, + 0xb9, 0xec, 0xd4, 0x2b, 0xa7, 0x66, 0xb5, 0xb9, 0x29, 0xa2, 0x66, 0xf8, 0x73, 0x1d, 0x0a, 0x1f, + 0xf6, 0xa9, 0x3b, 0x50, 0x0a, 0x40, 0x55, 0xc8, 0x7a, 0xb4, 0x43, 0x9b, 0xcc, 0x71, 0x05, 0x83, + 0xb9, 0x46, 0xb2, 0xac, 0x91, 0x00, 0x86, 0x4a, 0x90, 0xea, 0x58, 0x5d, 0x8b, 0x09, 0xb6, 0xa6, + 0x88, 0x9c, 0xa0, 0xeb, 0x90, 0xf2, 0x98, 0xe9, 0x32, 0xc1, 0x4b, 0x7e, 0xa1, 0x32, 0x2f, 0x8d, + 0x36, 0xef, 0x1b, 0x6d, 0x7e, 0xdb, 0x37, 0x5a, 0x23, 0xfb, 0x64, 0x58, 0x4b, 0x7c, 0xf6, 0xf7, + 0x9a, 0x46, 0xe4, 0x16, 0x74, 0x0d, 0x74, 0x6a, 0xb7, 0x04, 0xbf, 0xdf, 0x74, 0x27, 0xdf, 0x80, + 0xae, 0x40, 0xae, 0x65, 0xb9, 0xb4, 0xc9, 0x2c, 0xc7, 0x16, 0x52, 0x4d, 0x2f, 0xcc, 0x84, 0x16, + 0x59, 0xf6, 0x97, 0x48, 0x88, 0x85, 0x2e, 0x43, 0xda, 0xe3, 0xaa, 0xf3, 0xca, 0x99, 0x59, 0x7d, + 0x2e, 0xd7, 0x28, 0x1d, 0x0e, 0x6b, 0x45, 0x09, 0xb9, 0xec, 0x74, 0x2d, 0x46, 0xbb, 0x3d, 0x36, + 0x20, 0x0a, 0x07, 0x5d, 0x82, 0x4c, 0x8b, 0x76, 0x28, 0x37, 0x78, 0x56, 0x18, 0xbc, 0x18, 0x21, + 0x2f, 0x16, 0x88, 0x8f, 0x80, 0xee, 0x83, 0xd1, 0xeb, 0x98, 0x76, 0x39, 0x27, 0xa4, 0x98, 0x0e, + 0x11, 0xef, 0x76, 0x4c, 0xbb, 0x71, 0xed, 0xab, 0x61, 0x6d, 0xa1, 0x6d, 0xb1, 0xbd, 0xfe, 0xce, + 0x7c, 0xd3, 0xe9, 0xd6, 0xdb, 0xae, 0xb9, 0x6b, 0xda, 0x66, 0xbd, 0xe3, 0xec, 0x5b, 0x75, 0xee, + 0x9c, 0x0f, 0xfb, 0xd4, 0xb5, 0xa8, 0x5b, 0xe7, 0x34, 0xe6, 0x85, 0x3d, 0xf8, 0x3e, 0x22, 0x68, + 0xde, 0x32, 0xb2, 0xe9, 0x62, 0x06, 0x0f, 0x93, 0x80, 0xb6, 0xcc, 0x6e, 0xaf, 0x43, 0x27, 0xb2, + 0x57, 0x60, 0x99, 0xe4, 0x89, 0x2d, 0xa3, 0x4f, 0x6a, 0x99, 0x50, 0xcd, 0xc6, 0x64, 0x6a, 0x4e, + 0x7d, 0x53, 0x35, 0xa7, 0x5f, 0xbe, 0x9a, 0x71, 0x19, 0x0c, 0x3e, 0x43, 0x45, 0xd0, 0x5d, 0xf3, + 0x91, 0x50, 0x66, 0x81, 0xf0, 0x21, 0x5e, 0x87, 0xb4, 0x64, 0x04, 0x55, 0x46, 0xb5, 0x1d, 0xbf, + 0x19, 0xa1, 0xa6, 0x75, 0x5f, 0x87, 0xc5, 0x50, 0x87, 0xba, 0xd0, 0x0e, 0xfe, 0x8d, 0x06, 0x53, + 0xca, 0x84, 0x2a, 0xba, 0xec, 0x40, 0x46, 0xde, 0x6e, 0x3f, 0xb2, 0x9c, 0x1d, 0x8d, 0x2c, 0x8b, + 0x2d, 0xb3, 0xc7, 0xa8, 0xdb, 0xa8, 0x3f, 0x19, 0xd6, 0xb4, 0xaf, 0x86, 0xb5, 0x37, 0x5f, 0x24, + 0xa5, 0x08, 0x72, 0x2a, 0xea, 0xf8, 0x84, 0xd1, 0x5b, 0x82, 0x3b, 0xe6, 0x29, 0x3f, 0x38, 0x35, + 0x2f, 0x03, 0xe4, 0x9a, 0xdd, 0xa6, 0x1e, 0xa7, 0x6c, 0x70, 0x13, 0x12, 0x89, 0x83, 0x7f, 0x06, + 0x33, 0x31, 0x57, 0x53, 0x7c, 0xbe, 0x07, 0x69, 0x8f, 0x2b, 0xd0, 0x67, 0x33, 0x62, 0xa8, 0x2d, + 0x01, 0x6f, 0x4c, 0x2b, 0xfe, 0xd2, 0x72, 0x4e, 0x14, 0xfe, 0x64, 0xa7, 0xff, 0x55, 0x83, 0xc2, + 0xba, 0xb9, 0x43, 0x3b, 0xbe, 0x8f, 0x23, 0x30, 0x6c, 0xb3, 0x4b, 0x95, 0xc6, 0xc5, 0x98, 0x07, + 0xb4, 0x4f, 0xcc, 0x4e, 0x9f, 0x4a, 0x92, 0x59, 0xa2, 0x66, 0x93, 0x46, 0x22, 0xed, 0xc4, 0x91, + 0x48, 0x0b, 0xfd, 0xbd, 0x04, 0x29, 0xee, 0x59, 0x03, 0x11, 0x85, 0x72, 0x44, 0x4e, 0xf0, 0x9b, + 0x30, 0xa5, 0xa4, 0x50, 0xea, 0x0b, 0x59, 0xe6, 0xea, 0xcb, 0xf9, 0x2c, 0xe3, 0x2e, 0xa4, 0xa5, + 0xb6, 0xd1, 0x1b, 0x90, 0x0b, 0xb2, 0x9b, 0x90, 0x56, 0x6f, 0xa4, 0x0f, 0x87, 0xb5, 0x24, 0xf3, + 0x48, 0xb8, 0x80, 0x6a, 0x90, 0x12, 0x3b, 0x85, 0xe4, 0x5a, 0x23, 0x77, 0x38, 0xac, 0x49, 0x00, + 0x91, 0x1f, 0x74, 0x1e, 0x8c, 0x3d, 0x9e, 0x60, 0xb8, 0x0a, 0x8c, 0x46, 0xf6, 0x70, 0x58, 0x13, + 0x73, 0x22, 0x7e, 0xf1, 0x2a, 0x14, 0xd6, 0x69, 0xdb, 0x6c, 0x0e, 0xd4, 0xa1, 0x25, 0x9f, 0x1c, + 0x3f, 0x50, 0xf3, 0x69, 0x5c, 0x80, 0x42, 0x70, 0xe2, 0x83, 0xae, 0xa7, 0x9c, 0x3a, 0x1f, 0xc0, + 0xee, 0x78, 0xf8, 0x57, 0x1a, 0x28, 0x3b, 0x23, 0x0c, 0xe9, 0x0e, 0x97, 0xd5, 0x53, 0x31, 0x08, + 0x0e, 0x87, 0x35, 0x05, 0x21, 0xea, 0x8b, 0x6e, 0x40, 0xc6, 0x13, 0x27, 0x72, 0x62, 0xa3, 0xee, + 0x23, 0x16, 0x1a, 0xa7, 0xb8, 0x1b, 0x1c, 0x0e, 0x6b, 0x3e, 0x22, 0xf1, 0x07, 0x68, 0x3e, 0x96, + 0x39, 0xa5, 0x60, 0xd3, 0x87, 0xc3, 0x5a, 0x04, 0x1a, 0xcd, 0xa4, 0xf8, 0x5f, 0x1a, 0xe4, 0xb7, + 0x4d, 0x2b, 0x70, 0xa1, 0xb2, 0x6f, 0xa2, 0x30, 0x46, 0x4a, 0x00, 0xbf, 0xd2, 0x2d, 0xda, 0x31, + 0x07, 0x2b, 0x8e, 0x2b, 0xe8, 0x4e, 0x91, 0x60, 0x1e, 0x26, 0x3b, 0x63, 0x6c, 0xb2, 0x4b, 0x4d, + 0x1e, 0x52, 0xff, 0x87, 0x01, 0xec, 0x96, 0x91, 0x4d, 0x16, 0x75, 0xfc, 0x47, 0x0d, 0x0a, 0x52, + 0x72, 0xe5, 0x76, 0x3f, 0x86, 0xb4, 0x54, 0x8c, 0x90, 0xfd, 0x05, 0xc1, 0xe5, 0xad, 0x49, 0x02, + 0x8b, 0xa2, 0x89, 0xbe, 0x0f, 0xd3, 0x2d, 0xd7, 0xe9, 0xf5, 0x68, 0x6b, 0x4b, 0x85, 0xb0, 0xe4, + 0x68, 0x08, 0x5b, 0x8e, 0xae, 0x93, 0x11, 0x74, 0xfc, 0x37, 0x0d, 0xa6, 0x54, 0xb4, 0x50, 0xb6, + 0x0a, 0xf4, 0xab, 0x9d, 0x38, 0x65, 0x25, 0x27, 0x4d, 0x59, 0x67, 0x20, 0xdd, 0x76, 0x9d, 0x7e, + 0xcf, 0x2b, 0xeb, 0xf2, 0x6e, 0xca, 0xd9, 0x64, 0xa9, 0x0c, 0xdf, 0x82, 0x69, 0x5f, 0x94, 0x63, + 0x42, 0x66, 0x65, 0x34, 0x64, 0xae, 0xb5, 0xa8, 0xcd, 0xac, 0x5d, 0x2b, 0x08, 0x82, 0x0a, 0x1f, + 0xff, 0x42, 0x83, 0xe2, 0x28, 0x0a, 0x5a, 0x8e, 0xdc, 0x33, 0x4e, 0xee, 0xe2, 0xf1, 0xe4, 0xe6, + 0x45, 0xf0, 0xf1, 0x6e, 0xda, 0xcc, 0x1d, 0xf8, 0xa4, 0xe5, 0xde, 0xca, 0xbb, 0x90, 0x8f, 0x2c, + 0xf2, 0x14, 0xb5, 0x4f, 0xd5, 0xcd, 0x20, 0x7c, 0x18, 0x86, 0x84, 0xa4, 0x0c, 0x68, 0x62, 0x82, + 0x7f, 0xa9, 0xc1, 0x54, 0xcc, 0x96, 0xe8, 0x3d, 0x30, 0x76, 0x5d, 0xa7, 0x3b, 0x91, 0xa1, 0xc4, + 0x0e, 0xf4, 0x6d, 0x48, 0x32, 0x67, 0x22, 0x33, 0x25, 0x99, 0xc3, 0xad, 0xa4, 0xc4, 0xd7, 0x65, + 0x75, 0x2b, 0x67, 0xf8, 0x5d, 0xc8, 0x09, 0x81, 0xee, 0x9a, 0x96, 0x3b, 0x36, 0x5b, 0x8c, 0x17, + 0xe8, 0x06, 0x9c, 0x92, 0x91, 0x70, 0xfc, 0xe6, 0xc2, 0xb8, 0xcd, 0x05, 0x7f, 0xf3, 0x39, 0x48, + 0x2d, 0xed, 0xf5, 0xed, 0x7d, 0xbe, 0xa5, 0x65, 0x32, 0xd3, 0xdf, 0xc2, 0xc7, 0xf8, 0x35, 0x98, + 0xe1, 0x77, 0x90, 0xba, 0xde, 0x92, 0xd3, 0xb7, 0x99, 0xdf, 0x5d, 0x5c, 0x86, 0x52, 0x1c, 0xac, + 0xbc, 0xa4, 0x04, 0xa9, 0x26, 0x07, 0x08, 0x1a, 0x53, 0x44, 0x4e, 0xf0, 0xef, 0x34, 0x40, 0xab, + 0x94, 0x89, 0x53, 0xd6, 0x96, 0x83, 0xeb, 0x51, 0x81, 0x6c, 0xd7, 0x64, 0xcd, 0x3d, 0xea, 0x7a, + 0x7e, 0x0d, 0xe2, 0xcf, 0xff, 0x1f, 0xd5, 0x1e, 0xbe, 0x02, 0x33, 0x31, 0x2e, 0x95, 0x4c, 0x15, + 0xc8, 0x36, 0x15, 0x4c, 0xe5, 0xbb, 0x60, 0x8e, 0xff, 0x94, 0x84, 0xac, 0xd8, 0x40, 0xe8, 0x2e, + 0xba, 0x02, 0xf9, 0x5d, 0xcb, 0x6e, 0x53, 0xb7, 0xe7, 0x5a, 0x4a, 0x05, 0x46, 0xe3, 0xd4, 0xe1, + 0xb0, 0x16, 0x05, 0x93, 0xe8, 0x04, 0xbd, 0x0d, 0x99, 0xbe, 0x47, 0xdd, 0x07, 0x96, 0xbc, 0xe9, + 0xb9, 0x46, 0xe9, 0x60, 0x58, 0x4b, 0x7f, 0xe4, 0x51, 0x77, 0x6d, 0x99, 0x67, 0x9e, 0xbe, 0x18, + 0x11, 0xf9, 0x6d, 0xa1, 0xdb, 0xca, 0x4d, 0x45, 0x11, 0xd6, 0xf8, 0x0e, 0x67, 0x7f, 0x24, 0xd4, + 0xf5, 0x5c, 0xa7, 0x4b, 0xd9, 0x1e, 0xed, 0x7b, 0xf5, 0xa6, 0xd3, 0xed, 0x3a, 0x76, 0x5d, 0xf4, + 0x92, 0x42, 0x68, 0x9e, 0x3e, 0xf9, 0x76, 0xe5, 0xb9, 0xdb, 0x90, 0x61, 0x7b, 0xae, 0xd3, 0x6f, + 0xef, 0x89, 0xac, 0xa0, 0x37, 0xae, 0x4f, 0x4e, 0xcf, 0xa7, 0x40, 0xfc, 0x01, 0xba, 0xc0, 0xb5, + 0x45, 0x9b, 0xfb, 0x5e, 0xbf, 0x2b, 0x3b, 0xb4, 0x46, 0xea, 0x70, 0x58, 0xd3, 0xde, 0x26, 0x01, + 0x18, 0x7f, 0x9a, 0x84, 0x9a, 0x70, 0xd4, 0x7b, 0xa2, 0x6c, 0x58, 0x71, 0xdc, 0x3b, 0x94, 0xb9, + 0x56, 0x73, 0xc3, 0xec, 0x52, 0xdf, 0x37, 0x6a, 0x90, 0xef, 0x0a, 0xe0, 0x83, 0xc8, 0x15, 0x80, + 0x6e, 0x80, 0x87, 0x5e, 0x07, 0x10, 0x77, 0x46, 0xae, 0xcb, 0xdb, 0x90, 0x13, 0x10, 0xb1, 0xbc, + 0x14, 0xd3, 0x54, 0x7d, 0x42, 0xc9, 0x94, 0x86, 0xd6, 0x46, 0x35, 0x34, 0x31, 0x9d, 0x40, 0x2d, + 0x51, 0x5f, 0x4f, 0xc5, 0x7d, 0x1d, 0x7f, 0xa1, 0x41, 0x75, 0xdd, 0xe7, 0xfc, 0x84, 0xea, 0xf0, + 0xe5, 0x4d, 0xbe, 0x24, 0x79, 0xf5, 0xff, 0x4e, 0x5e, 0x5c, 0x05, 0x58, 0xb7, 0x6c, 0xba, 0x62, + 0x75, 0x18, 0x75, 0xc7, 0x74, 0x22, 0x9f, 0xea, 0x61, 0x48, 0x20, 0x74, 0xd7, 0x97, 0x73, 0x29, + 0x12, 0x87, 0x5f, 0x86, 0x18, 0xc9, 0x97, 0x68, 0x36, 0x7d, 0x24, 0x44, 0xed, 0x43, 0x66, 0x57, + 0x88, 0x27, 0x53, 0x6a, 0xec, 0x19, 0x25, 0x94, 0xbd, 0x71, 0x43, 0x1d, 0x7e, 0xf5, 0x45, 0x05, + 0x89, 0x78, 0xf5, 0xa9, 0x7b, 0x03, 0x9b, 0x99, 0x8f, 0x23, 0x9b, 0x89, 0x7f, 0x02, 0xfa, 0xa9, + 0x2a, 0xb7, 0x52, 0x63, 0xcb, 0x2d, 0xff, 0xe6, 0x9e, 0xbc, 0x67, 0x7c, 0x3f, 0x8c, 0x7d, 0xc2, + 0x1c, 0x2a, 0xf6, 0x5d, 0x04, 0xc3, 0xa5, 0xbb, 0x7e, 0x92, 0x46, 0xe1, 0xb1, 0x01, 0xa6, 0x58, + 0xc7, 0x7f, 0xd6, 0xa0, 0xb8, 0x4a, 0x59, 0xbc, 0xfc, 0x79, 0x85, 0x8c, 0x89, 0x3f, 0x80, 0xd3, + 0x11, 0xfe, 0x95, 0xf4, 0x57, 0x47, 0x6a, 0x9e, 0xd7, 0x42, 0xf9, 0xd7, 0xec, 0x16, 0x7d, 0xac, + 0x7a, 0xc5, 0x78, 0xb9, 0x73, 0x17, 0xf2, 0x91, 0x45, 0xb4, 0x38, 0x52, 0xe8, 0x44, 0x5e, 0x76, + 0x82, 0x64, 0xdd, 0x28, 0x29, 0x99, 0x64, 0xb7, 0xa8, 0xca, 0xd8, 0xa0, 0x28, 0xd8, 0x02, 0x24, + 0xcc, 0x25, 0xc8, 0x46, 0xd3, 0x92, 0x80, 0xde, 0x0e, 0x2a, 0x9e, 0x60, 0x8e, 0x2e, 0x80, 0xe1, + 0x3a, 0x8f, 0xfc, 0x0a, 0x76, 0x2a, 0x3c, 0x92, 0x38, 0x8f, 0x88, 0x58, 0xc2, 0x37, 0x40, 0x27, + 0xce, 0x23, 0x54, 0x05, 0x70, 0x4d, 0xbb, 0x4d, 0xef, 0x05, 0x8d, 0x53, 0x81, 0x44, 0x20, 0xc7, + 0x94, 0x0c, 0x4b, 0x70, 0x3a, 0xca, 0x91, 0x34, 0xf7, 0x3c, 0x64, 0x3e, 0xec, 0x47, 0xd5, 0x55, + 0x1a, 0x51, 0x97, 0xec, 0xc1, 0x7d, 0x24, 0xee, 0x33, 0x10, 0xc2, 0xd1, 0x79, 0xc8, 0x31, 0x73, + 0xa7, 0x43, 0x37, 0xc2, 0x00, 0x17, 0x02, 0xf8, 0x2a, 0xef, 0xf9, 0xee, 0x45, 0x6a, 0x9f, 0x10, + 0x80, 0x2e, 0x41, 0x31, 0xe4, 0xf9, 0xae, 0x4b, 0x77, 0xad, 0xc7, 0xc2, 0xc2, 0x05, 0x72, 0x04, + 0x8e, 0xe6, 0xe0, 0x54, 0x08, 0xdb, 0x12, 0x35, 0x86, 0x21, 0x50, 0x47, 0xc1, 0x5c, 0x37, 0x42, + 0xdc, 0x9b, 0x0f, 0xfb, 0x66, 0x47, 0xdc, 0xbc, 0x02, 0x89, 0x40, 0xf0, 0x5f, 0x34, 0x38, 0x2d, + 0x4d, 0xcd, 0xbb, 0xfd, 0x57, 0xd1, 0xeb, 0x3f, 0xd7, 0x00, 0x45, 0x25, 0x50, 0xae, 0xf5, 0xad, + 0xe8, 0x33, 0x0e, 0x2f, 0x62, 0xf2, 0xa2, 0x95, 0x95, 0xa0, 0xf0, 0x25, 0x06, 0x43, 0x5a, 0x14, + 0x42, 0xb2, 0xa7, 0x36, 0x64, 0xaf, 0x2c, 0x21, 0x44, 0x7d, 0x79, 0x8b, 0xbf, 0x33, 0x60, 0xd4, + 0x53, 0x9d, 0xae, 0x68, 0xf1, 0x05, 0x80, 0xc8, 0x0f, 0x3f, 0x8b, 0xda, 0x4c, 0x78, 0x8d, 0x11, + 0x9e, 0xa5, 0x40, 0xc4, 0x1f, 0xe0, 0x3f, 0x24, 0x61, 0xea, 0x9e, 0xd3, 0xe9, 0x87, 0x29, 0xf1, + 0x55, 0x4a, 0x15, 0xb1, 0xf6, 0x3b, 0xe5, 0xb7, 0xdf, 0x08, 0x0c, 0x8f, 0xd1, 0x9e, 0xf0, 0x2c, + 0x9d, 0x88, 0x31, 0xc2, 0x50, 0x60, 0xa6, 0xdb, 0xa6, 0x4c, 0xf6, 0x35, 0xe5, 0xb4, 0x28, 0x38, + 0x63, 0x30, 0x34, 0x0b, 0x79, 0xb3, 0xdd, 0x76, 0x69, 0xdb, 0x64, 0xb4, 0x31, 0x28, 0x67, 0xc4, + 0x61, 0x51, 0x10, 0xfe, 0x18, 0xa6, 0x7d, 0x65, 0x29, 0x93, 0xbe, 0x03, 0x99, 0x4f, 0x04, 0x64, + 0xcc, 0x93, 0x97, 0x44, 0x55, 0x61, 0xcc, 0x47, 0x8b, 0xbf, 0x8f, 0xfb, 0x3c, 0xe3, 0x5b, 0x90, + 0x96, 0xe8, 0xe8, 0x7c, 0xb4, 0x3b, 0x91, 0x6f, 0x33, 0x7c, 0xae, 0x5a, 0x0d, 0x0c, 0x69, 0x49, + 0x48, 0x19, 0x5e, 0xf8, 0x86, 0x84, 0x10, 0xf5, 0xc5, 0xbf, 0xd5, 0xe0, 0xb5, 0x65, 0xca, 0x68, + 0x93, 0xd1, 0xd6, 0x8a, 0x45, 0x3b, 0xad, 0x93, 0x36, 0xce, 0xda, 0x89, 0x1b, 0xe7, 0x71, 0x6f, + 0x5f, 0x7a, 0xf4, 0xed, 0x6b, 0x0d, 0xce, 0x8c, 0xb2, 0xa8, 0x34, 0x5a, 0x87, 0xf4, 0xae, 0x80, + 0x1c, 0x7d, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x3f, 0x86, 0xa9, 0xd8, 0x82, 0xd0, 0x30, 0xb7, + 0xa8, 0x8a, 0x76, 0x72, 0x82, 0xea, 0x60, 0xb0, 0x41, 0x4f, 0x06, 0xb9, 0xe9, 0x85, 0x73, 0xc7, + 0x50, 0xdd, 0x1e, 0xf4, 0x28, 0x11, 0x88, 0xdc, 0x1d, 0x9a, 0xa6, 0xdb, 0xb2, 0x6c, 0xb3, 0x63, + 0x31, 0xc9, 0xbe, 0x41, 0xa2, 0xa0, 0x4b, 0x17, 0x21, 0x17, 0xfc, 0x8b, 0x80, 0xf2, 0x90, 0x59, + 0xd9, 0x24, 0x3f, 0x5c, 0x24, 0xcb, 0xc5, 0x04, 0x2a, 0x40, 0xb6, 0xb1, 0xb8, 0x74, 0x5b, 0xcc, + 0xb4, 0x4b, 0xf7, 0xe0, 0xf4, 0x91, 0x43, 0x10, 0x40, 0x7a, 0x6b, 0x9b, 0xac, 0x6d, 0xac, 0x16, + 0x13, 0x28, 0x03, 0xfa, 0xda, 0xc6, 0x76, 0x51, 0x43, 0x39, 0x48, 0xad, 0xac, 0x6f, 0x2e, 0x6e, + 0x17, 0x93, 0x28, 0x0b, 0x46, 0x63, 0x73, 0x73, 0xbd, 0xa8, 0x73, 0x62, 0xcb, 0x1f, 0x91, 0xc5, + 0xed, 0xb5, 0xcd, 0x8d, 0xa2, 0xc1, 0x51, 0x1a, 0x3f, 0xda, 0xbe, 0xb9, 0x55, 0x4c, 0x2d, 0x7c, + 0x91, 0xf2, 0x53, 0x83, 0x8b, 0xbe, 0x07, 0x29, 0x19, 0xef, 0xcf, 0x84, 0x92, 0x45, 0xff, 0x07, + 0xa8, 0x9c, 0x3d, 0x02, 0x97, 0x0a, 0xc7, 0x89, 0x77, 0x34, 0xb4, 0x01, 0x79, 0x01, 0x54, 0x2f, + 0x7e, 0xe7, 0x47, 0x1f, 0xde, 0x62, 0x94, 0x5e, 0x3f, 0x66, 0x35, 0x42, 0xef, 0x3a, 0xa4, 0xc4, + 0xa5, 0x8a, 0x72, 0x13, 0x7d, 0xb1, 0x8d, 0x72, 0x13, 0x7b, 0x03, 0xc5, 0x09, 0xf4, 0x5d, 0x30, + 0x78, 0x0f, 0x8c, 0x22, 0x55, 0x41, 0xe4, 0xa1, 0xae, 0x72, 0x66, 0x14, 0x1c, 0x39, 0xf6, 0xfd, + 0xe0, 0xbd, 0xf1, 0xec, 0xe8, 0xbb, 0x87, 0xbf, 0xbd, 0x7c, 0x74, 0x21, 0x38, 0x79, 0x53, 0x3e, + 0x8c, 0xf9, 0xdd, 0x37, 0x7a, 0x3d, 0x7e, 0xd4, 0x48, 0xb3, 0x5e, 0xa9, 0x1e, 0xb7, 0x1c, 0x10, + 0x5c, 0x87, 0x7c, 0xa4, 0xf3, 0x8d, 0xaa, 0xf5, 0x68, 0xdb, 0x1e, 0x55, 0xeb, 0x98, 0x76, 0x19, + 0x27, 0xd0, 0x2a, 0x64, 0x79, 0x2d, 0xc5, 0x53, 0x0a, 0x3a, 0x37, 0x5a, 0x32, 0x45, 0x52, 0x65, + 0xe5, 0xfc, 0xf8, 0xc5, 0x80, 0xd0, 0x0f, 0x20, 0xb7, 0x4a, 0x99, 0x8a, 0x37, 0x67, 0x47, 0x03, + 0xd6, 0x18, 0x4d, 0xc5, 0x83, 0x1e, 0x4e, 0xa0, 0x8f, 0x45, 0x59, 0x17, 0xbf, 0xc1, 0xa8, 0x76, + 0xcc, 0x9d, 0x0a, 0xf8, 0x9a, 0x3d, 0x1e, 0xc1, 0xa7, 0xbc, 0xf0, 0x13, 0xff, 0xff, 0xd0, 0x65, + 0x93, 0x99, 0x68, 0x13, 0xa6, 0x85, 0xc8, 0xc1, 0x1f, 0xa6, 0x31, 0xd7, 0x3c, 0xf2, 0xef, 0x6c, + 0xcc, 0x35, 0x8f, 0xfe, 0x4b, 0x8b, 0x13, 0x8d, 0xfb, 0x4f, 0x9f, 0x55, 0x13, 0x5f, 0x3e, 0xab, + 0x26, 0xbe, 0x7e, 0x56, 0xd5, 0x7e, 0x7e, 0x50, 0xd5, 0x7e, 0x7f, 0x50, 0xd5, 0x9e, 0x1c, 0x54, + 0xb5, 0xa7, 0x07, 0x55, 0xed, 0x1f, 0x07, 0x55, 0xed, 0x9f, 0x07, 0xd5, 0xc4, 0xd7, 0x07, 0x55, + 0xed, 0xb3, 0xe7, 0xd5, 0xc4, 0xd3, 0xe7, 0xd5, 0xc4, 0x97, 0xcf, 0xab, 0x89, 0xfb, 0x6f, 0xfc, + 0x87, 0x1e, 0x43, 0x46, 0xc1, 0xb4, 0xf8, 0x5c, 0xfd, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x10, + 0x3a, 0x9f, 0xc0, 0xce, 0x1e, 0x00, 0x00, } func (x Direction) String() string { @@ -2718,6 +2927,13 @@ func (x Direction) String() string { } return strconv.Itoa(int(x)) } +func (x DetectedFieldType) String() string { + s, ok := DetectedFieldType_name[int32(x)] + if ok { + return s + } + return strconv.Itoa(int(x)) +} func (this *StreamRatesRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -4192,6 +4408,103 @@ func (this *Volume) Equal(that interface{}) bool { } return true } +func (this *DetectedFieldsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedFieldsRequest) + if !ok { + that2, ok := that.(DetectedFieldsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Start == nil { + if this.Start != nil { + return false + } + } else if !this.Start.Equal(*that1.Start) { + return false + } + if that1.End == nil { + if this.End != nil { + return false + } + } else if !this.End.Equal(*that1.End) { + return false + } + if this.Query != that1.Query { + return false + } + return true +} +func (this *DetectedFieldsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedFieldsResponse) + if !ok { + that2, ok := that.(DetectedFieldsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.Fields) != len(that1.Fields) { + return false + } + for i := range this.Fields { + if !this.Fields[i].Equal(that1.Fields[i]) { + return false + } + } + return true +} +func (this *DetectedField) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedField) + if !ok { + that2, ok := that.(DetectedField) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Label != that1.Label { + return false + } + if this.Type != that1.Type { + return false + } + if this.Cardinality != that1.Cardinality { + return false + } + return true +} func (this *StreamRatesRequest) GoString() string { if this == nil { return "nil" @@ -4773,6 +5086,42 @@ func (this *Volume) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *DetectedFieldsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 7) + s = append(s, "&logproto.DetectedFieldsRequest{") + s = append(s, "Start: "+fmt.Sprintf("%#v", this.Start)+",\n") + s = append(s, "End: "+fmt.Sprintf("%#v", this.End)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedFieldsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedFieldsResponse{") + if this.Fields != nil { + s = append(s, "Fields: "+fmt.Sprintf("%#v", this.Fields)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedField) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 7) + s = append(s, "&logproto.DetectedField{") + s = append(s, "Label: "+fmt.Sprintf("%#v", this.Label)+",\n") + s = append(s, "Type: "+fmt.Sprintf("%#v", this.Type)+",\n") + s = append(s, "Cardinality: "+fmt.Sprintf("%#v", this.Cardinality)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func valueToGoStringLogproto(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -4807,6 +5156,7 @@ type QuerierClient interface { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. GetVolume(ctx context.Context, in *VolumeRequest, opts ...grpc.CallOption) (*VolumeResponse, error) + GetDetectedFields(ctx context.Context, in *DetectedFieldsRequest, opts ...grpc.CallOption) (*DetectedFieldsResponse, error) } type querierClient struct { @@ -4967,6 +5317,15 @@ func (c *querierClient) GetVolume(ctx context.Context, in *VolumeRequest, opts . return out, nil } +func (c *querierClient) GetDetectedFields(ctx context.Context, in *DetectedFieldsRequest, opts ...grpc.CallOption) (*DetectedFieldsResponse, error) { + out := new(DetectedFieldsResponse) + err := c.cc.Invoke(ctx, "/logproto.Querier/GetDetectedFields", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // QuerierServer is the server API for Querier service. type QuerierServer interface { Query(*QueryRequest, Querier_QueryServer) error @@ -4982,6 +5341,7 @@ type QuerierServer interface { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. GetVolume(context.Context, *VolumeRequest) (*VolumeResponse, error) + GetDetectedFields(context.Context, *DetectedFieldsRequest) (*DetectedFieldsResponse, error) } // UnimplementedQuerierServer can be embedded to have forward compatible implementations. @@ -5015,6 +5375,9 @@ func (*UnimplementedQuerierServer) GetStats(ctx context.Context, req *IndexStats func (*UnimplementedQuerierServer) GetVolume(ctx context.Context, req *VolumeRequest) (*VolumeResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetVolume not implemented") } +func (*UnimplementedQuerierServer) GetDetectedFields(ctx context.Context, req *DetectedFieldsRequest) (*DetectedFieldsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetDetectedFields not implemented") +} func RegisterQuerierServer(s *grpc.Server, srv QuerierServer) { s.RegisterService(&_Querier_serviceDesc, srv) @@ -5191,6 +5554,24 @@ func _Querier_GetVolume_Handler(srv interface{}, ctx context.Context, dec func(i return interceptor(ctx, in, info, handler) } +func _Querier_GetDetectedFields_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetectedFieldsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QuerierServer).GetDetectedFields(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/logproto.Querier/GetDetectedFields", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QuerierServer).GetDetectedFields(ctx, req.(*DetectedFieldsRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _Querier_serviceDesc = grpc.ServiceDesc{ ServiceName: "logproto.Querier", HandlerType: (*QuerierServer)(nil), @@ -5219,6 +5600,10 @@ var _Querier_serviceDesc = grpc.ServiceDesc{ MethodName: "GetVolume", Handler: _Querier_GetVolume_Handler, }, + { + MethodName: "GetDetectedFields", + Handler: _Querier_GetDetectedFields_Handler, + }, }, Streams: []grpc.StreamDesc{ { @@ -7326,17 +7711,144 @@ func (m *Volume) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { - offset -= sovLogproto(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} +func (m *DetectedFieldsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedFieldsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + if m.End != nil { + n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.End):]) + if err21 != nil { + return 0, err21 + } + i -= n21 + i = encodeVarintLogproto(dAtA, i, uint64(n21)) + i-- + dAtA[i] = 0x12 + } + if m.Start != nil { + n22, err22 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start):]) + if err22 != nil { + return 0, err22 + } + i -= n22 + i = encodeVarintLogproto(dAtA, i, uint64(n22)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *DetectedFieldsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedFieldsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Fields) > 0 { + for iNdEx := len(m.Fields) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Fields[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintLogproto(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *DetectedField) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedField) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedField) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Cardinality != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Cardinality)) + i-- + dAtA[i] = 0x18 + } + if m.Type != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Type)) + i-- + dAtA[i] = 0x10 + } + if len(m.Label) > 0 { + i -= len(m.Label) + copy(dAtA[i:], m.Label) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Label))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { + offset -= sovLogproto(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} func (m *StreamRatesRequest) Size() (n int) { if m == nil { return 0 @@ -8219,6 +8731,61 @@ func (m *Volume) Size() (n int) { return n } +func (m *DetectedFieldsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Start != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start) + n += 1 + l + sovLogproto(uint64(l)) + } + if m.End != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.End) + n += 1 + l + sovLogproto(uint64(l)) + } + l = len(m.Query) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + return n +} + +func (m *DetectedFieldsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Fields) > 0 { + for _, e := range m.Fields { + l = e.Size() + n += 1 + l + sovLogproto(uint64(l)) + } + } + return n +} + +func (m *DetectedField) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Label) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + if m.Type != 0 { + n += 1 + sovLogproto(uint64(m.Type)) + } + if m.Cardinality != 0 { + n += 1 + sovLogproto(uint64(m.Cardinality)) + } + return n +} + func sovLogproto(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -8822,6 +9389,45 @@ func (this *Volume) String() string { }, "") return s } +func (this *DetectedFieldsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedFieldsRequest{`, + `Start:` + strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1) + `,`, + `End:` + strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedFieldsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForFields := "[]*DetectedField{" + for _, f := range this.Fields { + repeatedStringForFields += strings.Replace(f.String(), "DetectedField", "DetectedField", 1) + "," + } + repeatedStringForFields += "}" + s := strings.Join([]string{`&DetectedFieldsResponse{`, + `Fields:` + repeatedStringForFields + `,`, + `}`, + }, "") + return s +} +func (this *DetectedField) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedField{`, + `Label:` + fmt.Sprintf("%v", this.Label) + `,`, + `Type:` + fmt.Sprintf("%v", this.Type) + `,`, + `Cardinality:` + fmt.Sprintf("%v", this.Cardinality) + `,`, + `}`, + }, "") + return s +} func valueToStringLogproto(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -14733,6 +15339,373 @@ func (m *Volume) Unmarshal(dAtA []byte) error { } return nil } +func (m *DetectedFieldsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedFieldsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedFieldsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Start", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Start == nil { + m.Start = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.Start, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field End", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.End == nil { + m.End = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.End, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedFieldsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedFieldsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Fields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Fields = append(m.Fields, &DetectedField{}) + if err := m.Fields[len(m.Fields)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedField) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedField: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedField: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Label", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Label = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) + } + m.Type = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Type |= DetectedFieldType(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Cardinality", wireType) + } + m.Cardinality = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Cardinality |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipLogproto(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index c50246a1b57b5..48799a85dce1a 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -30,6 +30,8 @@ service Querier { // Note: this MUST be the same as the variant defined in // indexgateway.proto on the IndexGateway service. rpc GetVolume(VolumeRequest) returns (VolumeResponse) {} + + rpc GetDetectedFields(DetectedFieldsRequest) returns (DetectedFieldsResponse) {} } service StreamData { @@ -420,3 +422,34 @@ message Volume { string name = 1 [(gogoproto.jsontag) = "name"]; uint64 volume = 3 [(gogoproto.jsontag) = "volume"]; } + +message DetectedFieldsRequest { + google.protobuf.Timestamp start = 1 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + google.protobuf.Timestamp end = 2 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + string query = 3; // Naming this query instead of match because this should be with queryrangebase.Request interface +} + +message DetectedFieldsResponse { + repeated DetectedField fields = 1; +} + +enum DetectedFieldType { + STRING = 0; + INT = 1; + FLOAT = 2; + BOOL = 3; + DURATION = 4; + BYTES = 5; +} + +message DetectedField { + string label = 1; + DetectedFieldType type = 2; + uint64 cardinality = 3; +} diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index bd5bfec77db5e..19c1875475351 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -479,6 +479,36 @@ func RecordVolumeQueryMetrics(ctx context.Context, log log.Logger, start, end ti execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) } +func RecordDetectedFieldsQueryMetrics(ctx context.Context, log log.Logger, start, end time.Time, query string, status string, stats logql_stats.Result) { + var ( + logger = fixLogger(ctx, log) + latencyType = latencyTypeFast + queryType = QueryTypeVolume + ) + + // Tag throughput metric by latency type based on a threshold. + // Latency below the threshold is fast, above is slow. + if stats.Summary.ExecTime > slowQueryThresholdSecond { + latencyType = latencyTypeSlow + } + + level.Info(logger).Log( + "latency", latencyType, + "query_type", queryType, + "query", query, + "query_hash", util.HashedQuery(query), + "start", start.Format(time.RFC3339Nano), + "end", end.Format(time.RFC3339Nano), + "start_delta", time.Since(start), + "end_delta", time.Since(end), + "length", end.Sub(start), + "status", status, + // "duration", time.Duration(int64(stats.Summary.ExecTime*float64(time.Second))), + ) + //TODO(twhitney): add stats and exec time + // execLatency.WithLabelValues(status, queryType, "").Observe(stats.Summary.ExecTime) +} + func recordUsageStats(queryType string, stats logql_stats.Result) { if queryType == QueryTypeMetric { bytePerSecondMetricUsage.Record(float64(stats.Summary.BytesProcessedPerSecond)) diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index 1c9a8c189a5b3..b62a122fe5d8d 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -1057,6 +1057,10 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/api/prom/tail").Methods("GET", "POST").Handler(defaultHandler) } + if t.Cfg.Frontend.ExperimentalAPIsEnabled { + t.Server.HTTP.Path("/loki/api/experimental/detected_fields").Methods("GET", "POST").Handler(frontendHandler) + } + if t.frontend == nil { return services.NewIdleService(nil, func(_ error) error { if t.stopper != nil { diff --git a/pkg/lokifrontend/config.go b/pkg/lokifrontend/config.go index 30ab5cd29fecc..648a049c74812 100644 --- a/pkg/lokifrontend/config.go +++ b/pkg/lokifrontend/config.go @@ -20,6 +20,8 @@ type Config struct { TailProxyURL string `yaml:"tail_proxy_url"` TLS tls.ClientConfig `yaml:"tail_tls_config"` + + ExperimentalAPIsEnabled bool `yaml:"experimental_apis_enabled"` } // RegisterFlags adds the flags required to config this to the given FlagSet. @@ -32,4 +34,5 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.BoolVar(&cfg.CompressResponses, "querier.compress-http-responses", true, "Compress HTTP responses.") f.StringVar(&cfg.DownstreamURL, "frontend.downstream-url", "", "URL of downstream Loki.") f.StringVar(&cfg.TailProxyURL, "frontend.tail-proxy-url", "", "URL of querier for tail proxy.") + f.BoolVar(&cfg.ExperimentalAPIsEnabled, "frontend.experimental-apis-enabled", false, "Whether to enable experimental APIs in the frontend.") } diff --git a/pkg/querier/handler.go b/pkg/querier/handler.go index 3e57d61396945..b2a7fd70cae44 100644 --- a/pkg/querier/handler.go +++ b/pkg/querier/handler.go @@ -111,6 +111,15 @@ func (h *Handler) Do(ctx context.Context, req queryrangebase.Request) (queryrang return nil, err } return &queryrange.VolumeResponse{Response: result}, nil + case *queryrange.DetectedFieldsRequest: + result, err := h.api.DetectedFieldsHandler(ctx, &concrete.DetectedFieldsRequest) + if err != nil { + return nil, err + } + + return &queryrange.DetectedFieldsResponse{ + Response: result, + }, nil default: return nil, fmt.Errorf("unsupported query type %T", req) } diff --git a/pkg/querier/http.go b/pkg/querier/http.go index 664e3b2ba4c9e..c3b74428b5b1f 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -376,6 +376,19 @@ func (q *QuerierAPI) VolumeHandler(ctx context.Context, req *logproto.VolumeRequ return resp, nil } +func (q *QuerierAPI) DetectedFieldsHandler(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + resp, err := q.querier.DetectedFields(ctx, req) + if err != nil { + return nil, err + } + if resp == nil { // Some stores don't implement this + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{}, + }, nil + } + return resp, nil +} + func (q *QuerierAPI) validateMaxEntriesLimits(ctx context.Context, expr syntax.Expr, limit uint32) error { tenantIDs, err := tenant.TenantIDs(ctx) if err != nil { diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index d368900b21e68..c2e744a6f03d9 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -95,6 +95,7 @@ type Querier interface { IndexStats(ctx context.Context, req *loghttp.RangeQuery) (*stats.Stats, error) IndexShards(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) + DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) } type Limits querier_limits.Limits @@ -897,3 +898,15 @@ func (q *SingleTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRe return seriesvolume.Merge(responses, req.Limit), nil } + +func (q *SingleTenantQuerier) DetectedFields(_ context.Context, _ *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{ + { + Label: "foo", + Type: logproto.DetectedFieldString, + Cardinality: 1, + }, + }, + }, nil +} diff --git a/pkg/querier/querier_mock_test.go b/pkg/querier/querier_mock_test.go index ed34a91bcaf7b..b94037898c554 100644 --- a/pkg/querier/querier_mock_test.go +++ b/pkg/querier/querier_mock_test.go @@ -580,6 +580,18 @@ func (q *querierMock) Volume(ctx context.Context, req *logproto.VolumeRequest) ( return resp.(*logproto.VolumeResponse), err } +func (q *querierMock) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + args := q.MethodCalled("DetectedFields", ctx, req) + + resp := args.Get(0) + err := args.Error(1) + if resp == nil { + return nil, err + } + + return resp.(*logproto.DetectedFieldsResponse), err +} + type engineMock struct { util.ExtendedMock } diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index fb23c9a0ecd5d..397546cfaf98f 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -388,6 +388,16 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer TargetLabels: req.TargetLabels, AggregateBy: req.AggregateBy, }, err + case DetectedFieldsOp: + req, err := loghttp.ParseDetectedFieldsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *req, + path: r.URL.Path, + }, nil default: return nil, httpgrpc.Errorf(http.StatusNotFound, fmt.Sprintf("unknown request path: %s", r.URL.Path)) } @@ -581,6 +591,16 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) TargetLabels: req.TargetLabels, AggregateBy: req.AggregateBy, }, ctx, err + case DetectedFieldsOp: + req, err := loghttp.ParseDetectedFieldsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *req, + path: httpReq.URL.Path, + }, ctx, nil default: return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, fmt.Sprintf("unknown request path in HTTP gRPC decode: %s", r.Url)) } @@ -837,6 +857,26 @@ func (c Codec) EncodeRequest(ctx context.Context, r queryrangebase.Request) (*ht Body: http.NoBody, Header: header, } + return req.WithContext(ctx), nil + case *DetectedFieldsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.Start.UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.End.UnixNano())}, + "query": []string{request.GetQuery()}, + } + + u := &url.URL{ + Path: "/loki/api/experimental/detected_fields", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + return req.WithContext(ctx), nil default: return nil, httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid request format, got (%T)", r)) @@ -863,6 +903,8 @@ func (c Codec) Path(r queryrangebase.Request) string { return "/loki/api/v1/index/stats" case *logproto.VolumeRequest: return "/loki/api/v1/index/volume_range" + case *DetectedFieldsRequest: + return "/loki/api/experimental/detected_fields" } return "other" @@ -964,6 +1006,15 @@ func decodeResponseJSONFrom(buf []byte, req queryrangebase.Request, headers http Response: &resp, Headers: httpResponseHeadersToPromResponseHeaders(headers), }, nil + case *DetectedFieldsRequest: + var resp logproto.DetectedFieldsResponse + if err := json.Unmarshal(buf, &resp); err != nil { + return nil, httpgrpc.Errorf(http.StatusInternalServerError, "error decoding response: %v", err) + } + return &DetectedFieldsResponse{ + Response: &resp, + Headers: httpResponseHeadersToPromResponseHeaders(headers), + }, nil default: var resp loghttp.QueryResponse if err := resp.UnmarshalJSON(buf); err != nil { @@ -1178,6 +1229,10 @@ func encodeResponseJSONTo(version loghttp.Version, res queryrangebase.Response, if err := marshal.WriteVolumeResponseJSON(response.Response, w); err != nil { return err } + case *DetectedFieldsResponse: + if err := marshal.WriteDetectedFieldsResponseJSON(response.Response, w); err != nil { + return err + } default: return httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid response format, got (%T)", res)) } @@ -1800,3 +1855,75 @@ func mergeLokiResponse(responses ...queryrangebase.Response) *LokiResponse { }, } } + +// In some other world LabelRequest could implement queryrangebase.Request. +type DetectedFieldsRequest struct { + logproto.DetectedFieldsRequest + path string +} + +func NewDetectedFieldsRequest(start, end time.Time, query, path string) *DetectedFieldsRequest { + return &DetectedFieldsRequest{ + DetectedFieldsRequest: logproto.DetectedFieldsRequest{ + Start: &start, + End: &end, + Query: query, + }, + path: path, + } +} + +func (r *DetectedFieldsRequest) AsProto() *logproto.DetectedFieldsRequest { + return &r.DetectedFieldsRequest +} + +func (r *DetectedFieldsRequest) GetEnd() time.Time { + return *r.End +} + +func (r *DetectedFieldsRequest) GetEndTs() time.Time { + return *r.End +} + +func (r *DetectedFieldsRequest) GetStart() time.Time { + return *r.Start +} + +func (r *DetectedFieldsRequest) GetStartTs() time.Time { + return *r.Start +} + +func (r *DetectedFieldsRequest) GetStep() int64 { + return 0 +} + +func (r *DetectedFieldsRequest) Path() string { + return r.path +} + +func (r *DetectedFieldsRequest) WithStartEnd(s, e time.Time) queryrangebase.Request { + clone := *r + clone.Start = &s + clone.End = &e + return &clone +} + +// WithStartEndForCache implements resultscache.Request. +func (r *DetectedFieldsRequest) WithStartEndForCache(s time.Time, e time.Time) resultscache.Request { + return r.WithStartEnd(s, e).(resultscache.Request) +} + +func (r *DetectedFieldsRequest) WithQuery(query string) queryrangebase.Request { + clone := *r + clone.Query = query + return &clone +} + +func (r *DetectedFieldsRequest) LogToSpan(sp opentracing.Span) { + sp.LogFields( + otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + ) +} + +func (*DetectedFieldsRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { return } diff --git a/pkg/querier/queryrange/extensions.go b/pkg/querier/queryrange/extensions.go index 46a8ebdc17084..b2924341eac72 100644 --- a/pkg/querier/queryrange/extensions.go +++ b/pkg/querier/queryrange/extensions.go @@ -236,3 +236,20 @@ func (m *ShardsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader m.Headers = h return m } + +// GetHeaders returns the HTTP headers in the response. +func (m *DetectedFieldsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *DetectedFieldsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *DetectedFieldsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index 38477b8b83912..1eaee30c61eb8 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -868,6 +868,43 @@ func (m *ShardsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo +type DetectedFieldsResponse struct { + Response *github_com_grafana_loki_pkg_logproto.DetectedFieldsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.DetectedFieldsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *DetectedFieldsResponse) Reset() { *m = DetectedFieldsResponse{} } +func (*DetectedFieldsResponse) ProtoMessage() {} +func (*DetectedFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{14} +} +func (m *DetectedFieldsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedFieldsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedFieldsResponse.Merge(m, src) +} +func (m *DetectedFieldsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo + type QueryResponse struct { Status *rpc.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` // Types that are valid to be assigned to Response: @@ -880,13 +917,14 @@ type QueryResponse struct { // *QueryResponse_TopkSketches // *QueryResponse_QuantileSketches // *QueryResponse_ShardsResponse + // *QueryResponse_DetectedFields Response isQueryResponse_Response `protobuf_oneof:"response"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } func (*QueryResponse) ProtoMessage() {} func (*QueryResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{14} + return fileDescriptor_51b9d53b40d11902, []int{15} } func (m *QueryResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -949,6 +987,9 @@ type QueryResponse_QuantileSketches struct { type QueryResponse_ShardsResponse struct { ShardsResponse *ShardsResponse `protobuf:"bytes,10,opt,name=shardsResponse,proto3,oneof"` } +type QueryResponse_DetectedFields struct { + DetectedFields *DetectedFieldsResponse `protobuf:"bytes,11,opt,name=detectedFields,proto3,oneof"` +} func (*QueryResponse_Series) isQueryResponse_Response() {} func (*QueryResponse_Labels) isQueryResponse_Response() {} @@ -959,6 +1000,7 @@ func (*QueryResponse_Volume) isQueryResponse_Response() {} func (*QueryResponse_TopkSketches) isQueryResponse_Response() {} func (*QueryResponse_QuantileSketches) isQueryResponse_Response() {} func (*QueryResponse_ShardsResponse) isQueryResponse_Response() {} +func (*QueryResponse_DetectedFields) isQueryResponse_Response() {} func (m *QueryResponse) GetResponse() isQueryResponse_Response { if m != nil { @@ -1037,6 +1079,13 @@ func (m *QueryResponse) GetShardsResponse() *ShardsResponse { return nil } +func (m *QueryResponse) GetDetectedFields() *DetectedFieldsResponse { + if x, ok := m.GetResponse().(*QueryResponse_DetectedFields); ok { + return x.DetectedFields + } + return nil +} + // XXX_OneofWrappers is for the internal use of the proto package. func (*QueryResponse) XXX_OneofWrappers() []interface{} { return []interface{}{ @@ -1049,6 +1098,7 @@ func (*QueryResponse) XXX_OneofWrappers() []interface{} { (*QueryResponse_TopkSketches)(nil), (*QueryResponse_QuantileSketches)(nil), (*QueryResponse_ShardsResponse)(nil), + (*QueryResponse_DetectedFields)(nil), } } @@ -1061,6 +1111,7 @@ type QueryRequest struct { // *QueryRequest_Streams // *QueryRequest_Volume // *QueryRequest_ShardsRequest + // *QueryRequest_DetectedFields Request isQueryRequest_Request `protobuf_oneof:"request"` Metadata map[string]string `protobuf:"bytes,7,rep,name=metadata,proto3" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } @@ -1068,7 +1119,7 @@ type QueryRequest struct { func (m *QueryRequest) Reset() { *m = QueryRequest{} } func (*QueryRequest) ProtoMessage() {} func (*QueryRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{15} + return fileDescriptor_51b9d53b40d11902, []int{16} } func (m *QueryRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1125,14 +1176,18 @@ type QueryRequest_Volume struct { type QueryRequest_ShardsRequest struct { ShardsRequest *logproto.ShardsRequest `protobuf:"bytes,8,opt,name=shardsRequest,proto3,oneof"` } +type QueryRequest_DetectedFields struct { + DetectedFields *logproto.DetectedFieldsRequest `protobuf:"bytes,9,opt,name=detectedFields,proto3,oneof"` +} -func (*QueryRequest_Series) isQueryRequest_Request() {} -func (*QueryRequest_Labels) isQueryRequest_Request() {} -func (*QueryRequest_Stats) isQueryRequest_Request() {} -func (*QueryRequest_Instant) isQueryRequest_Request() {} -func (*QueryRequest_Streams) isQueryRequest_Request() {} -func (*QueryRequest_Volume) isQueryRequest_Request() {} -func (*QueryRequest_ShardsRequest) isQueryRequest_Request() {} +func (*QueryRequest_Series) isQueryRequest_Request() {} +func (*QueryRequest_Labels) isQueryRequest_Request() {} +func (*QueryRequest_Stats) isQueryRequest_Request() {} +func (*QueryRequest_Instant) isQueryRequest_Request() {} +func (*QueryRequest_Streams) isQueryRequest_Request() {} +func (*QueryRequest_Volume) isQueryRequest_Request() {} +func (*QueryRequest_ShardsRequest) isQueryRequest_Request() {} +func (*QueryRequest_DetectedFields) isQueryRequest_Request() {} func (m *QueryRequest) GetRequest() isQueryRequest_Request { if m != nil { @@ -1190,6 +1245,13 @@ func (m *QueryRequest) GetShardsRequest() *logproto.ShardsRequest { return nil } +func (m *QueryRequest) GetDetectedFields() *logproto.DetectedFieldsRequest { + if x, ok := m.GetRequest().(*QueryRequest_DetectedFields); ok { + return x.DetectedFields + } + return nil +} + func (m *QueryRequest) GetMetadata() map[string]string { if m != nil { return m.Metadata @@ -1207,6 +1269,7 @@ func (*QueryRequest) XXX_OneofWrappers() []interface{} { (*QueryRequest_Streams)(nil), (*QueryRequest_Volume)(nil), (*QueryRequest_ShardsRequest)(nil), + (*QueryRequest_DetectedFields)(nil), } } @@ -1225,6 +1288,7 @@ func init() { proto.RegisterType((*TopKSketchesResponse)(nil), "queryrange.TopKSketchesResponse") proto.RegisterType((*QuantileSketchResponse)(nil), "queryrange.QuantileSketchResponse") proto.RegisterType((*ShardsResponse)(nil), "queryrange.ShardsResponse") + proto.RegisterType((*DetectedFieldsResponse)(nil), "queryrange.DetectedFieldsResponse") proto.RegisterType((*QueryResponse)(nil), "queryrange.QueryResponse") proto.RegisterType((*QueryRequest)(nil), "queryrange.QueryRequest") proto.RegisterMapType((map[string]string)(nil), "queryrange.QueryRequest.MetadataEntry") @@ -1235,107 +1299,112 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1586 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0xcb, 0x6f, 0x1b, 0x45, - 0x18, 0xf7, 0xfa, 0x19, 0x4f, 0x9a, 0x10, 0x26, 0x51, 0xba, 0xa4, 0xed, 0xae, 0x65, 0x89, 0x36, - 0x20, 0x58, 0xd3, 0xa4, 0xf4, 0x09, 0x88, 0x2e, 0x69, 0xe5, 0x8a, 0x16, 0xb5, 0x9b, 0x88, 0x03, - 0xe2, 0x32, 0x89, 0x27, 0xce, 0x92, 0x7d, 0x65, 0x67, 0x9c, 0x36, 0x37, 0xfe, 0x00, 0x90, 0xfa, - 0x57, 0x20, 0x24, 0xaa, 0x9e, 0x38, 0x71, 0xe4, 0x42, 0x8f, 0x3d, 0x56, 0x91, 0x30, 0x34, 0xbd, - 0xa0, 0x9c, 0x7a, 0xe3, 0x8a, 0xe6, 0xb1, 0xeb, 0x59, 0xdb, 0x69, 0x9d, 0x22, 0xa4, 0x46, 0xe2, - 0x62, 0xcf, 0xe3, 0xfb, 0xcd, 0xce, 0xfe, 0xbe, 0xdf, 0xf7, 0xed, 0x37, 0x03, 0xce, 0x44, 0x9b, - 0xed, 0xc6, 0x56, 0x07, 0xc7, 0x2e, 0x8e, 0xf9, 0xff, 0x4e, 0x8c, 0x82, 0x36, 0x56, 0x9a, 0x56, - 0x14, 0x87, 0x34, 0x84, 0xa0, 0x37, 0x32, 0xb7, 0xd0, 0x76, 0xe9, 0x46, 0x67, 0xd5, 0x5a, 0x0b, - 0xfd, 0x46, 0x3b, 0x6c, 0x87, 0x8d, 0x76, 0x18, 0xb6, 0x3d, 0x8c, 0x22, 0x97, 0xc8, 0x66, 0x23, - 0x8e, 0xd6, 0x1a, 0x84, 0x22, 0xda, 0x21, 0x02, 0x3f, 0x37, 0xc3, 0x0c, 0x79, 0x93, 0x43, 0xe4, - 0xa8, 0x29, 0xcd, 0x79, 0x6f, 0xb5, 0xb3, 0xde, 0xa0, 0xae, 0x8f, 0x09, 0x45, 0x7e, 0x94, 0x18, - 0xb0, 0xfd, 0x79, 0x61, 0x5b, 0x20, 0xdd, 0xa0, 0x85, 0xef, 0xb5, 0x11, 0xc5, 0x77, 0xd1, 0x8e, - 0x34, 0x38, 0x91, 0x31, 0x48, 0x1a, 0x72, 0xf2, 0xad, 0xcc, 0x24, 0xd9, 0xc4, 0x74, 0x6d, 0x43, - 0x4e, 0xd5, 0xe4, 0xd4, 0x96, 0xe7, 0x87, 0x2d, 0xec, 0xf1, 0xcd, 0x12, 0xf1, 0x2b, 0x2d, 0xa6, - 0x99, 0x45, 0xd4, 0x21, 0x1b, 0xfc, 0x47, 0x0e, 0x7e, 0xf6, 0x52, 0xbe, 0x56, 0x11, 0xc1, 0x8d, - 0x16, 0x5e, 0x77, 0x03, 0x97, 0xba, 0x61, 0x40, 0xd4, 0xb6, 0x5c, 0xe4, 0xfc, 0x68, 0x8b, 0xf4, - 0xfb, 0xa0, 0xfe, 0xb0, 0x00, 0xc6, 0x6f, 0x86, 0x9b, 0xae, 0x83, 0xb7, 0x3a, 0x98, 0x50, 0x38, - 0x03, 0x4a, 0xdc, 0x46, 0xd7, 0x6a, 0xda, 0x7c, 0xd5, 0x11, 0x1d, 0x36, 0xea, 0xb9, 0xbe, 0x4b, - 0xf5, 0x7c, 0x4d, 0x9b, 0x9f, 0x70, 0x44, 0x07, 0x42, 0x50, 0x24, 0x14, 0x47, 0x7a, 0xa1, 0xa6, - 0xcd, 0x17, 0x1c, 0xde, 0x86, 0x73, 0x60, 0xcc, 0x0d, 0x28, 0x8e, 0xb7, 0x91, 0xa7, 0x57, 0xf9, - 0x78, 0xda, 0x87, 0x9f, 0x80, 0x0a, 0xa1, 0x28, 0xa6, 0x2b, 0x44, 0x2f, 0xd6, 0xb4, 0xf9, 0xf1, - 0x85, 0x39, 0x4b, 0xf8, 0xca, 0x4a, 0x7c, 0x65, 0xad, 0x24, 0xbe, 0xb2, 0xc7, 0x1e, 0x75, 0xcd, - 0xdc, 0xfd, 0x3f, 0x4c, 0xcd, 0x49, 0x40, 0xf0, 0x32, 0x28, 0xe1, 0xa0, 0xb5, 0x42, 0xf4, 0xd2, - 0x21, 0xd0, 0x02, 0x02, 0xcf, 0x82, 0x6a, 0xcb, 0x8d, 0xf1, 0x1a, 0xe3, 0x4c, 0x2f, 0xd7, 0xb4, - 0xf9, 0xc9, 0x85, 0x69, 0x2b, 0x75, 0xed, 0x52, 0x32, 0xe5, 0xf4, 0xac, 0xd8, 0xeb, 0x45, 0x88, - 0x6e, 0xe8, 0x15, 0xce, 0x04, 0x6f, 0xc3, 0x3a, 0x28, 0x93, 0x0d, 0x14, 0xb7, 0x88, 0x3e, 0x56, - 0x2b, 0xcc, 0x57, 0x6d, 0xb0, 0xdf, 0x35, 0xe5, 0x88, 0x23, 0xff, 0xe1, 0xd7, 0xa0, 0x18, 0x79, - 0x28, 0xd0, 0x01, 0xdf, 0xe5, 0x94, 0xa5, 0x70, 0x7e, 0xdb, 0x43, 0x81, 0x7d, 0x7e, 0xb7, 0x6b, - 0x66, 0xe4, 0x1e, 0xa3, 0x75, 0x14, 0xa0, 0x86, 0x17, 0x6e, 0xba, 0x0d, 0xd5, 0x8d, 0x6c, 0x15, - 0xeb, 0x0e, 0x43, 0x33, 0x9c, 0xc3, 0x57, 0xad, 0xff, 0x96, 0x07, 0x90, 0x39, 0xec, 0x46, 0x40, - 0x28, 0x0a, 0xe8, 0xab, 0xf8, 0xed, 0x23, 0x50, 0x66, 0x31, 0xb1, 0x42, 0xb8, 0xe7, 0x46, 0x25, - 0x52, 0x62, 0xb2, 0x4c, 0x16, 0x0f, 0xc5, 0x64, 0x69, 0x28, 0x93, 0xe5, 0x97, 0x32, 0x59, 0xf9, - 0x4f, 0x98, 0xd4, 0x41, 0x91, 0xf5, 0xe0, 0x14, 0x28, 0xc4, 0xe8, 0x2e, 0x27, 0xee, 0x98, 0xc3, - 0x9a, 0xf5, 0x9f, 0x8a, 0xe0, 0x98, 0x08, 0x0a, 0x12, 0x85, 0x01, 0xc1, 0x6c, 0xb3, 0xcb, 0x3c, - 0xf3, 0x08, 0x7a, 0xe5, 0x66, 0xf9, 0x88, 0x23, 0x67, 0xe0, 0xa7, 0xa0, 0xb8, 0x84, 0x28, 0xe2, - 0x54, 0x8f, 0x2f, 0xcc, 0xa8, 0x9b, 0x65, 0x6b, 0xb1, 0x39, 0x7b, 0x96, 0xb1, 0xb9, 0xdf, 0x35, - 0x27, 0x5b, 0x88, 0xa2, 0xf7, 0x42, 0xdf, 0xa5, 0xd8, 0x8f, 0xe8, 0x8e, 0xc3, 0x91, 0xf0, 0x43, - 0x50, 0xbd, 0x16, 0xc7, 0x61, 0xbc, 0xb2, 0x13, 0x61, 0xee, 0x9a, 0xaa, 0x7d, 0x7c, 0xbf, 0x6b, - 0x4e, 0xe3, 0x64, 0x50, 0x41, 0xf4, 0x2c, 0xe1, 0x3b, 0xa0, 0xc4, 0x3b, 0xdc, 0x19, 0x55, 0x7b, - 0x7a, 0xbf, 0x6b, 0xbe, 0xc1, 0x21, 0x8a, 0xb9, 0xb0, 0xc8, 0xfa, 0xae, 0x34, 0x92, 0xef, 0x52, - 0x09, 0x95, 0x55, 0x09, 0xe9, 0xa0, 0xb2, 0x8d, 0x63, 0xc2, 0x96, 0xa9, 0xf0, 0xf1, 0xa4, 0x0b, - 0xaf, 0x02, 0xc0, 0x88, 0x71, 0x09, 0x75, 0xd7, 0x58, 0x94, 0x30, 0x32, 0x26, 0x2c, 0x91, 0x04, - 0x1d, 0x4c, 0x3a, 0x1e, 0xb5, 0xa1, 0x64, 0x41, 0x31, 0x74, 0x94, 0x36, 0x7c, 0xa0, 0x81, 0x4a, - 0x13, 0xa3, 0x16, 0x8e, 0x89, 0x5e, 0xad, 0x15, 0xe6, 0xc7, 0x17, 0xde, 0xb6, 0xd4, 0x8c, 0x77, - 0x3b, 0x0e, 0x7d, 0x4c, 0x37, 0x70, 0x87, 0x24, 0x0e, 0x12, 0xd6, 0xf6, 0xe6, 0x6e, 0xd7, 0x5c, - 0x1d, 0x45, 0x0f, 0x23, 0x65, 0xd9, 0x03, 0x9f, 0xb3, 0xdf, 0x35, 0xb5, 0xf7, 0x9d, 0x64, 0x8b, - 0xf5, 0xdf, 0x35, 0xf0, 0x26, 0xf3, 0xf0, 0x32, 0x5b, 0x9b, 0x28, 0x01, 0xe9, 0x23, 0xba, 0xb6, - 0xa1, 0x6b, 0x4c, 0xde, 0x8e, 0xe8, 0xa8, 0x29, 0x30, 0xff, 0xaf, 0x52, 0x60, 0xe1, 0xf0, 0x29, - 0x30, 0x89, 0xc2, 0xe2, 0xd0, 0x28, 0x2c, 0x1d, 0x14, 0x85, 0xf5, 0xef, 0x0a, 0x22, 0xe3, 0x24, - 0xef, 0x77, 0x88, 0x98, 0xb8, 0x9e, 0xc6, 0x44, 0x81, 0xef, 0x36, 0x95, 0x9a, 0x58, 0xeb, 0x46, - 0x0b, 0x07, 0xd4, 0x5d, 0x77, 0x71, 0xfc, 0x92, 0xc8, 0x50, 0xe4, 0x56, 0xc8, 0xca, 0x4d, 0xd5, - 0x4a, 0xf1, 0xb5, 0xd7, 0x4a, 0x5f, 0x74, 0x94, 0x5e, 0x21, 0x3a, 0xea, 0xcf, 0xf3, 0x60, 0x96, - 0xb9, 0xe3, 0x26, 0x5a, 0xc5, 0xde, 0x17, 0xc8, 0x3f, 0xa4, 0x4b, 0x4e, 0x2b, 0x2e, 0xa9, 0xda, - 0xf0, 0x7f, 0xca, 0x47, 0xa0, 0xfc, 0x07, 0x0d, 0x8c, 0x25, 0x39, 0x1c, 0x5a, 0x00, 0x08, 0x18, - 0x4f, 0xd3, 0x82, 0xe8, 0x49, 0x06, 0x8e, 0xd3, 0x51, 0x47, 0xb1, 0x80, 0xdf, 0x80, 0xb2, 0xe8, - 0xc9, 0x28, 0x38, 0xae, 0x44, 0x01, 0x8d, 0x31, 0xf2, 0xaf, 0xb6, 0x50, 0x44, 0x71, 0x6c, 0x5f, - 0x62, 0xbb, 0xd8, 0xed, 0x9a, 0x67, 0x5e, 0x44, 0x11, 0xaf, 0x1b, 0x05, 0x8e, 0x39, 0x57, 0x3c, - 0xd3, 0x91, 0x4f, 0xa8, 0x7f, 0xaf, 0x81, 0x29, 0xb6, 0x51, 0x46, 0x4d, 0xaa, 0x8a, 0x25, 0x30, - 0x16, 0xcb, 0x36, 0xdf, 0xee, 0xf8, 0x42, 0xdd, 0xca, 0xd2, 0x3a, 0x84, 0x4a, 0xbb, 0xf8, 0xa8, - 0x6b, 0x6a, 0x4e, 0x8a, 0x84, 0x8b, 0x19, 0x1a, 0xf3, 0xc3, 0x68, 0x64, 0x90, 0x5c, 0x86, 0xb8, - 0x5f, 0xf2, 0x00, 0xde, 0x60, 0x05, 0x36, 0x13, 0x5f, 0x4f, 0xa7, 0x9d, 0x81, 0x1d, 0x9d, 0xec, - 0x91, 0x32, 0x68, 0x6f, 0x5f, 0xd9, 0xed, 0x9a, 0x17, 0x5e, 0xc4, 0xca, 0x0b, 0xc0, 0xca, 0x2b, - 0xa8, 0xc2, 0xcd, 0xbf, 0xfe, 0xdf, 0x95, 0x87, 0x79, 0x30, 0xf9, 0x65, 0xe8, 0x75, 0x7c, 0x9c, - 0x12, 0xe7, 0x0f, 0x10, 0xa7, 0xf7, 0x88, 0xcb, 0xda, 0xda, 0x17, 0x76, 0xbb, 0xe6, 0xe2, 0x48, - 0xa4, 0x65, 0x81, 0x47, 0x97, 0xb0, 0x07, 0x79, 0x30, 0xb3, 0x12, 0x46, 0x9f, 0x2f, 0xf3, 0x43, - 0x99, 0x92, 0x17, 0xf1, 0x00, 0x6d, 0x33, 0x3d, 0xda, 0x18, 0xe2, 0x16, 0xa2, 0xb1, 0x7b, 0xcf, - 0x5e, 0xdc, 0xed, 0x9a, 0x8d, 0x91, 0x28, 0xeb, 0x81, 0x8e, 0x2e, 0x5d, 0xbf, 0xe6, 0xc1, 0xec, - 0x9d, 0x0e, 0x0a, 0xa8, 0xeb, 0x61, 0x41, 0x59, 0x4a, 0xd8, 0xce, 0x00, 0x61, 0x46, 0x8f, 0xb0, - 0x2c, 0x46, 0x52, 0xf7, 0xf1, 0x6e, 0xd7, 0xbc, 0x34, 0x12, 0x75, 0xc3, 0xe0, 0x47, 0x97, 0xc4, - 0x9f, 0xf3, 0x60, 0x72, 0x59, 0xd4, 0x4b, 0xc9, 0x1b, 0x90, 0x21, 0xe4, 0xa9, 0xb7, 0x0c, 0xd1, - 0xaa, 0x95, 0x45, 0x1c, 0x22, 0x54, 0xb3, 0xc0, 0xa3, 0x4b, 0xdb, 0xdf, 0x45, 0x30, 0xc1, 0xcf, - 0x63, 0x29, 0x6b, 0xef, 0x02, 0x59, 0xa9, 0x48, 0xce, 0x60, 0x52, 0xda, 0xc6, 0xd1, 0x9a, 0xb5, - 0x2c, 0x6b, 0x18, 0x61, 0x01, 0x2f, 0x82, 0x32, 0xe1, 0x05, 0xa4, 0xfc, 0x0e, 0x19, 0xfd, 0x87, - 0xad, 0x6c, 0xa9, 0xda, 0xcc, 0x39, 0xd2, 0x9e, 0x1d, 0x7d, 0x3d, 0x56, 0x37, 0x25, 0x05, 0x74, - 0xbd, 0x1f, 0x39, 0x58, 0x55, 0x31, 0xb4, 0xc0, 0xc0, 0xf3, 0xa0, 0xc4, 0x3f, 0x78, 0xf2, 0xfa, - 0x22, 0xf3, 0xd8, 0xc1, 0x2f, 0x4f, 0x33, 0xe7, 0x08, 0x73, 0xb8, 0x00, 0x8a, 0x51, 0x1c, 0xfa, - 0xb2, 0xf8, 0x38, 0xd9, 0xff, 0x4c, 0xf5, 0x6b, 0xdd, 0xcc, 0x39, 0xdc, 0x16, 0x9e, 0x63, 0x27, - 0x05, 0xf6, 0x99, 0x27, 0xfc, 0xe4, 0xc5, 0x32, 0x7d, 0x1f, 0x4c, 0x81, 0x24, 0xa6, 0xf0, 0x1c, - 0x28, 0x6f, 0xf3, 0x6c, 0x2e, 0xcf, 0xcc, 0x73, 0x2a, 0x28, 0x9b, 0xe7, 0xd9, 0x7b, 0x09, 0x5b, - 0x78, 0x1d, 0x1c, 0xa3, 0x61, 0xb4, 0x99, 0xe4, 0x4d, 0x79, 0x6a, 0xab, 0xa9, 0xd8, 0x61, 0x79, - 0xb5, 0x99, 0x73, 0x32, 0x38, 0x78, 0x1b, 0x4c, 0x6d, 0x65, 0xa2, 0x1b, 0x13, 0x7e, 0x09, 0xd4, - 0xc7, 0xf3, 0xf0, 0xa4, 0xd3, 0xcc, 0x39, 0x03, 0x68, 0xb8, 0x04, 0x26, 0x49, 0x46, 0xf2, 0xf2, - 0x56, 0x25, 0xf3, 0x5e, 0xd9, 0xa0, 0x68, 0xe6, 0x9c, 0x3e, 0x8c, 0x0d, 0x7a, 0x11, 0xc9, 0xcf, - 0xf6, 0x52, 0x79, 0xe2, 0xa0, 0x76, 0x21, 0x15, 0x93, 0x10, 0xde, 0xa9, 0x83, 0xc4, 0xc4, 0xcd, - 0x15, 0x2d, 0x7d, 0x90, 0x6a, 0x49, 0xa8, 0x70, 0xb6, 0x97, 0x22, 0xb9, 0x8a, 0x14, 0x84, 0xd4, - 0xcf, 0x62, 0xa2, 0x1f, 0x21, 0xbe, 0x13, 0xc3, 0x8b, 0x9e, 0x04, 0x25, 0xc5, 0x73, 0x19, 0x54, - 0x5c, 0x71, 0xd7, 0x33, 0x4c, 0x76, 0x83, 0x57, 0x41, 0x4c, 0x0e, 0x12, 0x00, 0x17, 0x7b, 0x22, - 0x12, 0xda, 0x3b, 0x3e, 0x28, 0xa2, 0x14, 0x94, 0x68, 0xe8, 0x6c, 0xaa, 0xa1, 0xb2, 0xc4, 0x0c, - 0x54, 0x0a, 0xe9, 0x8b, 0x49, 0x01, 0x5d, 0x03, 0x13, 0x09, 0xe5, 0x7c, 0x4a, 0x2a, 0xe8, 0xd4, - 0x41, 0x79, 0x2f, 0xc1, 0x67, 0x51, 0xb0, 0x09, 0xc6, 0x7c, 0x4c, 0x11, 0x3b, 0x8f, 0xe8, 0x15, - 0x9e, 0xc4, 0x4e, 0x67, 0x75, 0xd3, 0x73, 0x9b, 0x75, 0x4b, 0x1a, 0x5e, 0x0b, 0x68, 0xbc, 0x23, - 0x4b, 0xcf, 0x14, 0x3d, 0x77, 0x05, 0x4c, 0x64, 0x0c, 0xe0, 0x14, 0x28, 0x6c, 0xe2, 0xe4, 0x76, - 0x8c, 0x35, 0xd9, 0x01, 0x7d, 0x1b, 0x79, 0x1d, 0xcc, 0xbd, 0x57, 0x75, 0x44, 0xe7, 0x72, 0xfe, - 0xa2, 0x66, 0x57, 0x41, 0x25, 0x16, 0x4f, 0xb1, 0x5b, 0x8f, 0x9f, 0x1a, 0xb9, 0x27, 0x4f, 0x8d, - 0xdc, 0xf3, 0xa7, 0x86, 0xf6, 0xed, 0x9e, 0xa1, 0xfd, 0xb8, 0x67, 0x68, 0x8f, 0xf6, 0x0c, 0xed, - 0xf1, 0x9e, 0xa1, 0xfd, 0xb9, 0x67, 0x68, 0x7f, 0xed, 0x19, 0xb9, 0xe7, 0x7b, 0x86, 0x76, 0xff, - 0x99, 0x91, 0x7b, 0xfc, 0xcc, 0xc8, 0x3d, 0x79, 0x66, 0xe4, 0xbe, 0xb2, 0x0e, 0x97, 0x4f, 0x57, - 0xcb, 0x9c, 0xdd, 0xc5, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x3b, 0x20, 0xe2, 0xc2, 0x3a, 0x17, - 0x00, 0x00, + // 1665 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0xcd, 0x6f, 0x1b, 0xc5, + 0x1b, 0xf6, 0xfa, 0x33, 0x9e, 0x7c, 0xfc, 0xf2, 0x9b, 0x44, 0xe9, 0x92, 0xb6, 0x5e, 0xcb, 0x12, + 0x6d, 0x40, 0xb0, 0xa6, 0x49, 0xe9, 0x37, 0x1f, 0x5d, 0xd2, 0xca, 0x11, 0x2d, 0x6a, 0x37, 0x11, + 0x07, 0xc4, 0x65, 0x62, 0x4f, 0x9c, 0x25, 0x6b, 0xef, 0x66, 0x67, 0x9c, 0x36, 0x07, 0x24, 0xfe, + 0x00, 0x90, 0xfa, 0x57, 0x20, 0x24, 0xaa, 0x9e, 0x38, 0x71, 0x44, 0x02, 0x7a, 0xec, 0xb1, 0xb2, + 0xc4, 0x42, 0xd3, 0x0b, 0xca, 0xa9, 0x27, 0xce, 0x68, 0x3e, 0x76, 0x3d, 0x6b, 0x3b, 0xad, 0x53, + 0x84, 0xd4, 0x48, 0x5c, 0xec, 0x99, 0xd9, 0xf7, 0x99, 0x9d, 0x7d, 0xde, 0xe7, 0x7d, 0xe7, 0x9d, + 0x01, 0xa7, 0xfd, 0xad, 0x66, 0x75, 0xbb, 0x83, 0x03, 0x07, 0x07, 0xfc, 0x7f, 0x37, 0x40, 0xed, + 0x26, 0x56, 0x9a, 0xa6, 0x1f, 0x78, 0xd4, 0x83, 0xa0, 0x37, 0x32, 0xbf, 0xd8, 0x74, 0xe8, 0x66, + 0x67, 0xdd, 0xac, 0x7b, 0xad, 0x6a, 0xd3, 0x6b, 0x7a, 0xd5, 0xa6, 0xe7, 0x35, 0x5d, 0x8c, 0x7c, + 0x87, 0xc8, 0x66, 0x35, 0xf0, 0xeb, 0x55, 0x42, 0x11, 0xed, 0x10, 0x81, 0x9f, 0x9f, 0x65, 0x86, + 0xbc, 0xc9, 0x21, 0x72, 0xd4, 0x90, 0xe6, 0xbc, 0xb7, 0xde, 0xd9, 0xa8, 0x52, 0xa7, 0x85, 0x09, + 0x45, 0x2d, 0x3f, 0x32, 0x60, 0xeb, 0x73, 0xbd, 0xa6, 0x40, 0x3a, 0xed, 0x06, 0xbe, 0xdb, 0x44, + 0x14, 0xdf, 0x41, 0xbb, 0xd2, 0xe0, 0x78, 0xc2, 0x20, 0x6a, 0xc8, 0x87, 0xaf, 0x25, 0x1e, 0x92, + 0x2d, 0x4c, 0xeb, 0x9b, 0xf2, 0x51, 0x59, 0x3e, 0xda, 0x76, 0x5b, 0x5e, 0x03, 0xbb, 0x7c, 0xb1, + 0x44, 0xfc, 0x4a, 0x8b, 0x19, 0x66, 0xe1, 0x77, 0xc8, 0x26, 0xff, 0x91, 0x83, 0x1f, 0xbd, 0x90, + 0xaf, 0x75, 0x44, 0x70, 0xb5, 0x81, 0x37, 0x9c, 0xb6, 0x43, 0x1d, 0xaf, 0x4d, 0xd4, 0xb6, 0x9c, + 0xe4, 0xdc, 0x68, 0x93, 0xf4, 0xfb, 0xa0, 0xf2, 0x20, 0x03, 0xc6, 0x6f, 0x78, 0x5b, 0x8e, 0x8d, + 0xb7, 0x3b, 0x98, 0x50, 0x38, 0x0b, 0x72, 0xdc, 0x46, 0xd7, 0xca, 0xda, 0x42, 0xd1, 0x16, 0x1d, + 0x36, 0xea, 0x3a, 0x2d, 0x87, 0xea, 0xe9, 0xb2, 0xb6, 0x30, 0x69, 0x8b, 0x0e, 0x84, 0x20, 0x4b, + 0x28, 0xf6, 0xf5, 0x4c, 0x59, 0x5b, 0xc8, 0xd8, 0xbc, 0x0d, 0xe7, 0xc1, 0x98, 0xd3, 0xa6, 0x38, + 0xd8, 0x41, 0xae, 0x5e, 0xe4, 0xe3, 0x71, 0x1f, 0xbe, 0x0f, 0x0a, 0x84, 0xa2, 0x80, 0xae, 0x11, + 0x3d, 0x5b, 0xd6, 0x16, 0xc6, 0x17, 0xe7, 0x4d, 0xe1, 0x2b, 0x33, 0xf2, 0x95, 0xb9, 0x16, 0xf9, + 0xca, 0x1a, 0x7b, 0x18, 0x1a, 0xa9, 0x7b, 0xbf, 0x1b, 0x9a, 0x1d, 0x81, 0xe0, 0x25, 0x90, 0xc3, + 0xed, 0xc6, 0x1a, 0xd1, 0x73, 0x87, 0x40, 0x0b, 0x08, 0x3c, 0x03, 0x8a, 0x0d, 0x27, 0xc0, 0x75, + 0xc6, 0x99, 0x9e, 0x2f, 0x6b, 0x0b, 0x53, 0x8b, 0x33, 0x66, 0xec, 0xda, 0xe5, 0xe8, 0x91, 0xdd, + 0xb3, 0x62, 0x9f, 0xe7, 0x23, 0xba, 0xa9, 0x17, 0x38, 0x13, 0xbc, 0x0d, 0x2b, 0x20, 0x4f, 0x36, + 0x51, 0xd0, 0x20, 0xfa, 0x58, 0x39, 0xb3, 0x50, 0xb4, 0xc0, 0x7e, 0x68, 0xc8, 0x11, 0x5b, 0xfe, + 0xc3, 0xcf, 0x41, 0xd6, 0x77, 0x51, 0x5b, 0x07, 0x7c, 0x95, 0xd3, 0xa6, 0xc2, 0xf9, 0x2d, 0x17, + 0xb5, 0xad, 0x73, 0xdd, 0xd0, 0x48, 0xc8, 0x3d, 0x40, 0x1b, 0xa8, 0x8d, 0xaa, 0xae, 0xb7, 0xe5, + 0x54, 0x55, 0x37, 0xb2, 0x59, 0xcc, 0xdb, 0x0c, 0xcd, 0x70, 0x36, 0x9f, 0xb5, 0xf2, 0x6b, 0x1a, + 0x40, 0xe6, 0xb0, 0x95, 0x36, 0xa1, 0xa8, 0x4d, 0x5f, 0xc6, 0x6f, 0x57, 0x40, 0x9e, 0xc5, 0xc4, + 0x1a, 0xe1, 0x9e, 0x1b, 0x95, 0x48, 0x89, 0x49, 0x32, 0x99, 0x3d, 0x14, 0x93, 0xb9, 0xa1, 0x4c, + 0xe6, 0x5f, 0xc8, 0x64, 0xe1, 0x5f, 0x61, 0x52, 0x07, 0x59, 0xd6, 0x83, 0xd3, 0x20, 0x13, 0xa0, + 0x3b, 0x9c, 0xb8, 0x09, 0x9b, 0x35, 0x2b, 0xdf, 0x67, 0xc1, 0x84, 0x08, 0x0a, 0xe2, 0x7b, 0x6d, + 0x82, 0xd9, 0x62, 0x57, 0x79, 0xe6, 0x11, 0xf4, 0xca, 0xc5, 0xf2, 0x11, 0x5b, 0x3e, 0x81, 0x1f, + 0x82, 0xec, 0x32, 0xa2, 0x88, 0x53, 0x3d, 0xbe, 0x38, 0xab, 0x2e, 0x96, 0xcd, 0xc5, 0x9e, 0x59, + 0x73, 0x8c, 0xcd, 0xfd, 0xd0, 0x98, 0x6a, 0x20, 0x8a, 0xde, 0xf2, 0x5a, 0x0e, 0xc5, 0x2d, 0x9f, + 0xee, 0xda, 0x1c, 0x09, 0xdf, 0x05, 0xc5, 0x6b, 0x41, 0xe0, 0x05, 0x6b, 0xbb, 0x3e, 0xe6, 0xae, + 0x29, 0x5a, 0xc7, 0xf6, 0x43, 0x63, 0x06, 0x47, 0x83, 0x0a, 0xa2, 0x67, 0x09, 0xdf, 0x00, 0x39, + 0xde, 0xe1, 0xce, 0x28, 0x5a, 0x33, 0xfb, 0xa1, 0xf1, 0x3f, 0x0e, 0x51, 0xcc, 0x85, 0x45, 0xd2, + 0x77, 0xb9, 0x91, 0x7c, 0x17, 0x4b, 0x28, 0xaf, 0x4a, 0x48, 0x07, 0x85, 0x1d, 0x1c, 0x10, 0x36, + 0x4d, 0x81, 0x8f, 0x47, 0x5d, 0x78, 0x15, 0x00, 0x46, 0x8c, 0x43, 0xa8, 0x53, 0x67, 0x51, 0xc2, + 0xc8, 0x98, 0x34, 0x45, 0x12, 0xb4, 0x31, 0xe9, 0xb8, 0xd4, 0x82, 0x92, 0x05, 0xc5, 0xd0, 0x56, + 0xda, 0xf0, 0xbe, 0x06, 0x0a, 0x35, 0x8c, 0x1a, 0x38, 0x20, 0x7a, 0xb1, 0x9c, 0x59, 0x18, 0x5f, + 0x7c, 0xdd, 0x54, 0x33, 0xde, 0xad, 0xc0, 0x6b, 0x61, 0xba, 0x89, 0x3b, 0x24, 0x72, 0x90, 0xb0, + 0xb6, 0xb6, 0xba, 0xa1, 0xb1, 0x3e, 0x8a, 0x1e, 0x46, 0xca, 0xb2, 0x07, 0xbe, 0x67, 0x3f, 0x34, + 0xb4, 0xb7, 0xed, 0x68, 0x89, 0x95, 0xdf, 0x34, 0xf0, 0x7f, 0xe6, 0xe1, 0x55, 0x36, 0x37, 0x51, + 0x02, 0xb2, 0x85, 0x68, 0x7d, 0x53, 0xd7, 0x98, 0xbc, 0x6d, 0xd1, 0x51, 0x53, 0x60, 0xfa, 0x1f, + 0xa5, 0xc0, 0xcc, 0xe1, 0x53, 0x60, 0x14, 0x85, 0xd9, 0xa1, 0x51, 0x98, 0x3b, 0x28, 0x0a, 0x2b, + 0x5f, 0x67, 0x44, 0xc6, 0x89, 0xbe, 0xef, 0x10, 0x31, 0x71, 0x3d, 0x8e, 0x89, 0x0c, 0x5f, 0x6d, + 0x2c, 0x35, 0x31, 0xd7, 0x4a, 0x03, 0xb7, 0xa9, 0xb3, 0xe1, 0xe0, 0xe0, 0x05, 0x91, 0xa1, 0xc8, + 0x2d, 0x93, 0x94, 0x9b, 0xaa, 0x95, 0xec, 0x2b, 0xaf, 0x95, 0xbe, 0xe8, 0xc8, 0xbd, 0x44, 0x74, + 0x54, 0x9e, 0xa5, 0xc1, 0x1c, 0x73, 0xc7, 0x0d, 0xb4, 0x8e, 0xdd, 0x4f, 0x50, 0xeb, 0x90, 0x2e, + 0x39, 0xa5, 0xb8, 0xa4, 0x68, 0xc1, 0xff, 0x28, 0x1f, 0x81, 0xf2, 0x6f, 0x35, 0x30, 0x16, 0xe5, + 0x70, 0x68, 0x02, 0x20, 0x60, 0x3c, 0x4d, 0x0b, 0xa2, 0xa7, 0x18, 0x38, 0x88, 0x47, 0x6d, 0xc5, + 0x02, 0x7e, 0x01, 0xf2, 0xa2, 0x27, 0xa3, 0xe0, 0x98, 0x12, 0x05, 0x34, 0xc0, 0xa8, 0x75, 0xb5, + 0x81, 0x7c, 0x8a, 0x03, 0xeb, 0x22, 0x5b, 0x45, 0x37, 0x34, 0x4e, 0x3f, 0x8f, 0x22, 0x5e, 0x37, + 0x0a, 0x1c, 0x73, 0xae, 0x78, 0xa7, 0x2d, 0xdf, 0x50, 0xf9, 0x46, 0x03, 0xd3, 0x6c, 0xa1, 0x8c, + 0x9a, 0x58, 0x15, 0xcb, 0x60, 0x2c, 0x90, 0x6d, 0xbe, 0xdc, 0xf1, 0xc5, 0x8a, 0x99, 0xa4, 0x75, + 0x08, 0x95, 0x56, 0xf6, 0x61, 0x68, 0x68, 0x76, 0x8c, 0x84, 0x4b, 0x09, 0x1a, 0xd3, 0xc3, 0x68, + 0x64, 0x90, 0x54, 0x82, 0xb8, 0x1f, 0xd3, 0x00, 0xae, 0xb0, 0x02, 0x9b, 0x89, 0xaf, 0xa7, 0xd3, + 0xce, 0xc0, 0x8a, 0x4e, 0xf4, 0x48, 0x19, 0xb4, 0xb7, 0x2e, 0x77, 0x43, 0xe3, 0xfc, 0xf3, 0x58, + 0x79, 0x0e, 0x58, 0xf9, 0x04, 0x55, 0xb8, 0xe9, 0x57, 0x7f, 0x5f, 0x79, 0x90, 0x06, 0x53, 0x9f, + 0x7a, 0x6e, 0xa7, 0x85, 0x63, 0xe2, 0x5a, 0x03, 0xc4, 0xe9, 0x3d, 0xe2, 0x92, 0xb6, 0xd6, 0xf9, + 0x6e, 0x68, 0x2c, 0x8d, 0x44, 0x5a, 0x12, 0x78, 0x74, 0x09, 0xbb, 0x9f, 0x06, 0xb3, 0x6b, 0x9e, + 0xff, 0xf1, 0x2a, 0x3f, 0x94, 0x29, 0x79, 0x11, 0x0f, 0xd0, 0x36, 0xdb, 0xa3, 0x8d, 0x21, 0x6e, + 0x22, 0x1a, 0x38, 0x77, 0xad, 0xa5, 0x6e, 0x68, 0x54, 0x47, 0xa2, 0xac, 0x07, 0x3a, 0xba, 0x74, + 0xfd, 0x94, 0x06, 0x73, 0xb7, 0x3b, 0xa8, 0x4d, 0x1d, 0x17, 0x0b, 0xca, 0x62, 0xc2, 0x76, 0x07, + 0x08, 0x2b, 0xf5, 0x08, 0x4b, 0x62, 0x24, 0x75, 0xef, 0x75, 0x43, 0xe3, 0xe2, 0x48, 0xd4, 0x0d, + 0x83, 0x1f, 0x5d, 0x12, 0x7f, 0x48, 0x83, 0xa9, 0x55, 0x51, 0x2f, 0x45, 0x5f, 0x40, 0x86, 0x90, + 0xa7, 0xde, 0x32, 0xf8, 0xeb, 0x66, 0x12, 0x71, 0x88, 0x50, 0x4d, 0x02, 0x8f, 0x2e, 0x6d, 0xbf, + 0xa4, 0xc1, 0xdc, 0x32, 0xa6, 0xb8, 0x4e, 0x71, 0xe3, 0xba, 0x83, 0x5d, 0x85, 0xbe, 0x2f, 0x07, + 0xe8, 0x2b, 0x2b, 0x47, 0x94, 0xa1, 0x18, 0xeb, 0x83, 0x6e, 0x68, 0x5c, 0x1e, 0x89, 0xc0, 0xe1, + 0x13, 0x1c, 0x5d, 0x22, 0x7f, 0xce, 0x81, 0x49, 0x7e, 0xb0, 0x8d, 0xf9, 0x7b, 0x13, 0xc8, 0x92, + 0x4f, 0xb2, 0x07, 0xa3, 0x33, 0x42, 0xe0, 0xd7, 0xcd, 0x55, 0x59, 0x0c, 0x0a, 0x0b, 0x78, 0x01, + 0xe4, 0x09, 0xaf, 0xc4, 0xe5, 0x86, 0x5e, 0xea, 0x3f, 0xb5, 0x26, 0x6b, 0xfe, 0x5a, 0xca, 0x96, + 0xf6, 0xf0, 0x0a, 0xc8, 0xbb, 0xac, 0x00, 0x8d, 0x4e, 0x22, 0x95, 0x7e, 0xe4, 0x60, 0x79, 0xca, + 0xd0, 0x02, 0x03, 0xcf, 0x81, 0x1c, 0xaf, 0x1c, 0xe4, 0x3d, 0x50, 0xe2, 0xb5, 0x83, 0x5b, 0x78, + 0x2d, 0x65, 0x0b, 0x73, 0xb8, 0x08, 0xb2, 0x7e, 0xe0, 0xb5, 0x64, 0x15, 0x77, 0xa2, 0xff, 0x9d, + 0x6a, 0xd9, 0x53, 0x4b, 0xd9, 0xdc, 0x16, 0x9e, 0x65, 0x47, 0x2e, 0x56, 0x2f, 0x11, 0x7e, 0x84, + 0x65, 0x5b, 0x66, 0x1f, 0x4c, 0x81, 0x44, 0xa6, 0xf0, 0x2c, 0xc8, 0xef, 0xf0, 0x6d, 0x51, 0x5e, + 0x3e, 0xcc, 0xab, 0xa0, 0xe4, 0x86, 0xc9, 0xbe, 0x4b, 0xd8, 0xc2, 0xeb, 0x60, 0x82, 0x7a, 0xfe, + 0x56, 0xb4, 0x01, 0xc9, 0xe3, 0x6f, 0x59, 0xc5, 0x0e, 0xdb, 0xa0, 0x6a, 0x29, 0x3b, 0x81, 0x83, + 0xb7, 0xc0, 0xf4, 0x76, 0x22, 0x4d, 0x62, 0xc2, 0x6f, 0xd3, 0xfa, 0x78, 0x1e, 0x9e, 0xbd, 0x6b, + 0x29, 0x7b, 0x00, 0x0d, 0x97, 0xc1, 0x14, 0x49, 0xe4, 0x0e, 0x79, 0x3d, 0x95, 0xf8, 0xae, 0x64, + 0x76, 0xa9, 0xa5, 0xec, 0x3e, 0x0c, 0xbc, 0x01, 0xa6, 0x1a, 0x89, 0x00, 0xd2, 0xc7, 0x07, 0x57, + 0x35, 0x3c, 0xc4, 0xd8, 0x6c, 0x49, 0xac, 0x05, 0x7a, 0x91, 0x5e, 0xf9, 0x2b, 0x0b, 0x26, 0xa4, + 0x8e, 0xc5, 0xf9, 0xf9, 0x7c, 0x2c, 0x4d, 0x21, 0xe3, 0x93, 0x07, 0x49, 0x93, 0x9b, 0x2b, 0xca, + 0x7c, 0x27, 0x56, 0xa6, 0xd0, 0xf4, 0x5c, 0x2f, 0x7b, 0x70, 0x4d, 0x2a, 0x08, 0xa9, 0xc6, 0xa5, + 0x48, 0x8d, 0x42, 0xca, 0xc7, 0x87, 0xd7, 0xa2, 0x11, 0x4a, 0x4a, 0xf1, 0x12, 0x28, 0x38, 0xe2, + 0x0a, 0x6e, 0x98, 0x88, 0x07, 0x6f, 0xe8, 0x98, 0xb8, 0x24, 0x00, 0x2e, 0xf5, 0x24, 0x29, 0x94, + 0x7c, 0x6c, 0x50, 0x92, 0x31, 0x28, 0x52, 0xe4, 0x99, 0x58, 0x91, 0x79, 0x89, 0x19, 0x28, 0xe0, + 0xe2, 0x0f, 0x93, 0x72, 0xbc, 0x06, 0x26, 0x23, 0x07, 0xf2, 0x47, 0x52, 0x8f, 0x27, 0x0f, 0xda, + 0x8e, 0x22, 0x7c, 0x12, 0x05, 0x57, 0x06, 0xbc, 0x2e, 0xb4, 0x68, 0x1c, 0x9c, 0x97, 0xa3, 0x99, + 0xfa, 0x80, 0xb0, 0x06, 0xc6, 0x5a, 0x98, 0x22, 0x76, 0xe2, 0xd4, 0x0b, 0x3c, 0xbb, 0x9e, 0x4a, + 0x0a, 0xba, 0xa7, 0x00, 0xf3, 0xa6, 0x34, 0xbc, 0xd6, 0xa6, 0xc1, 0xae, 0x3c, 0x5c, 0xc4, 0xe8, + 0xf9, 0xcb, 0x60, 0x32, 0x61, 0x00, 0xa7, 0x41, 0x66, 0x0b, 0x47, 0xf7, 0x9f, 0xac, 0x09, 0x67, + 0x41, 0x6e, 0x07, 0xb9, 0x1d, 0xcc, 0x85, 0x50, 0xb4, 0x45, 0xe7, 0x52, 0xfa, 0x82, 0x66, 0x15, + 0x41, 0x21, 0x10, 0x6f, 0xb1, 0x1a, 0x8f, 0x9e, 0x94, 0x52, 0x8f, 0x9f, 0x94, 0x52, 0xcf, 0x9e, + 0x94, 0xb4, 0xaf, 0xf6, 0x4a, 0xda, 0x77, 0x7b, 0x25, 0xed, 0xe1, 0x5e, 0x49, 0x7b, 0xb4, 0x57, + 0xd2, 0xfe, 0xd8, 0x2b, 0x69, 0x7f, 0xee, 0x95, 0x52, 0xcf, 0xf6, 0x4a, 0xda, 0xbd, 0xa7, 0xa5, + 0xd4, 0xa3, 0xa7, 0xa5, 0xd4, 0xe3, 0xa7, 0xa5, 0xd4, 0x67, 0xe6, 0xe1, 0x12, 0xfd, 0x7a, 0x9e, + 0xd3, 0xb4, 0xf4, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x87, 0x26, 0x7a, 0x65, 0x1c, 0x19, 0x00, + 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -1903,6 +1972,42 @@ func (this *ShardsResponse) Equal(that interface{}) bool { } return true } +func (this *DetectedFieldsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedFieldsResponse) + if !ok { + that2, ok := that.(DetectedFieldsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} func (this *QueryResponse) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2152,6 +2257,30 @@ func (this *QueryResponse_ShardsResponse) Equal(that interface{}) bool { } return true } +func (this *QueryResponse_DetectedFields) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_DetectedFields) + if !ok { + that2, ok := that.(QueryResponse_DetectedFields) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedFields.Equal(that1.DetectedFields) { + return false + } + return true +} func (this *QueryRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2358,6 +2487,30 @@ func (this *QueryRequest_ShardsRequest) Equal(that interface{}) bool { } return true } +func (this *QueryRequest_DetectedFields) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_DetectedFields) + if !ok { + that2, ok := that.(QueryRequest_DetectedFields) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedFields.Equal(that1.DetectedFields) { + return false + } + return true +} func (this *LokiRequest) GoString() string { if this == nil { return "nil" @@ -2548,11 +2701,22 @@ func (this *ShardsResponse) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *DetectedFieldsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.DetectedFieldsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *QueryResponse) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 14) + s := make([]string, 0, 15) s = append(s, "&queryrange.QueryResponse{") if this.Status != nil { s = append(s, "Status: "+fmt.Sprintf("%#v", this.Status)+",\n") @@ -2635,11 +2799,19 @@ func (this *QueryResponse_ShardsResponse) GoString() string { `ShardsResponse:` + fmt.Sprintf("%#v", this.ShardsResponse) + `}`}, ", ") return s } +func (this *QueryResponse_DetectedFields) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_DetectedFields{` + + `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") + return s +} func (this *QueryRequest) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 12) + s := make([]string, 0, 13) s = append(s, "&queryrange.QueryRequest{") if this.Request != nil { s = append(s, "Request: "+fmt.Sprintf("%#v", this.Request)+",\n") @@ -2716,6 +2888,14 @@ func (this *QueryRequest_ShardsRequest) GoString() string { `ShardsRequest:` + fmt.Sprintf("%#v", this.ShardsRequest) + `}`}, ", ") return s } +func (this *QueryRequest_DetectedFields) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_DetectedFields{` + + `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") + return s +} func valueToGoStringQueryrange(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -3556,6 +3736,55 @@ func (m *ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *DetectedFieldsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedFieldsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedFieldsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Response != nil { + { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *QueryResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3780,6 +4009,26 @@ func (m *QueryResponse_ShardsResponse) MarshalToSizedBuffer(dAtA []byte) (int, e } return len(dAtA) - i, nil } +func (m *QueryResponse_DetectedFields) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedFields != nil { + { + size, err := m.DetectedFields.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x5a + } + return len(dAtA) - i, nil +} func (m *QueryRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3971,6 +4220,26 @@ func (m *QueryRequest_ShardsRequest) MarshalToSizedBuffer(dAtA []byte) (int, err } return len(dAtA) - i, nil } +func (m *QueryRequest_DetectedFields) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedFields != nil { + { + size, err := m.DetectedFields.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x4a + } + return len(dAtA) - i, nil +} func encodeVarintQueryrange(dAtA []byte, offset int, v uint64) int { offset -= sovQueryrange(v) base := offset @@ -4331,6 +4600,25 @@ func (m *ShardsResponse) Size() (n int) { return n } +func (m *DetectedFieldsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Response != nil { + l = m.Response.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + } + return n +} + func (m *QueryResponse) Size() (n int) { if m == nil { return 0 @@ -4455,6 +4743,18 @@ func (m *QueryResponse_ShardsResponse) Size() (n int) { } return n } +func (m *QueryResponse_DetectedFields) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedFields != nil { + l = m.DetectedFields.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func (m *QueryRequest) Size() (n int) { if m == nil { return 0 @@ -4559,6 +4859,18 @@ func (m *QueryRequest_ShardsRequest) Size() (n int) { } return n } +func (m *QueryRequest_DetectedFields) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedFields != nil { + l = m.DetectedFields.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func sovQueryrange(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 @@ -4753,6 +5065,17 @@ func (this *ShardsResponse) String() string { }, "") return s } +func (this *DetectedFieldsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedFieldsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} func (this *QueryResponse) String() string { if this == nil { return "nil" @@ -4854,6 +5177,16 @@ func (this *QueryResponse_ShardsResponse) String() string { }, "") return s } +func (this *QueryResponse_DetectedFields) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_DetectedFields{`, + `DetectedFields:` + strings.Replace(fmt.Sprintf("%v", this.DetectedFields), "DetectedFieldsResponse", "DetectedFieldsResponse", 1) + `,`, + `}`, + }, "") + return s +} func (this *QueryRequest) String() string { if this == nil { return "nil" @@ -4945,6 +5278,16 @@ func (this *QueryRequest_ShardsRequest) String() string { }, "") return s } +func (this *QueryRequest_DetectedFields) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_DetectedFields{`, + `DetectedFields:` + strings.Replace(fmt.Sprintf("%v", this.DetectedFields), "DetectedFieldsRequest", "logproto.DetectedFieldsRequest", 1) + `,`, + `}`, + }, "") + return s +} func valueToStringQueryrange(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -7408,6 +7751,129 @@ func (m *ShardsResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedFieldsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedFieldsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Response == nil { + m.Response = &github_com_grafana_loki_pkg_logproto.DetectedFieldsResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *QueryResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -7788,6 +8254,41 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { } m.Response = &QueryResponse_ShardsResponse{v} iNdEx = postIndex + case 11: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedFields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &DetectedFieldsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_DetectedFields{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) @@ -8213,6 +8714,41 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { } m.Request = &QueryRequest_ShardsRequest{v} iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedFields", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.DetectedFieldsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_DetectedFields{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index 4d9fb84853757..33ae8e90357bc 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -175,6 +175,14 @@ message ShardsResponse { ]; } +message DetectedFieldsResponse { + logproto.DetectedFieldsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.DetectedFieldsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + message QueryResponse { google.rpc.Status status = 1; oneof response { @@ -187,6 +195,7 @@ message QueryResponse { TopKSketchesResponse topkSketches = 8; QuantileSketchResponse quantileSketches = 9; ShardsResponse shardsResponse = 10; + DetectedFieldsResponse detectedFields = 11; } } @@ -199,6 +208,7 @@ message QueryRequest { LokiRequest streams = 5; logproto.VolumeRequest volume = 6; indexgatewaypb.ShardsRequest shardsRequest = 8; + logproto.DetectedFieldsRequest detectedFields = 9; } map metadata = 7 [(gogoproto.nullable) = false]; } diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 28d71f8fa880a..d5ad9ce705dea 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -237,42 +237,44 @@ func NewMiddleware( return base.MiddlewareFunc(func(next base.Handler) base.Handler { var ( - metricRT = metricsTripperware.Wrap(next) - limitedRT = limitedTripperware.Wrap(next) - logFilterRT = logFilterTripperware.Wrap(next) - seriesRT = seriesTripperware.Wrap(next) - labelsRT = labelsTripperware.Wrap(next) - instantRT = instantMetricTripperware.Wrap(next) - statsRT = indexStatsTripperware.Wrap(next) - seriesVolumeRT = seriesVolumeTripperware.Wrap(next) + metricRT = metricsTripperware.Wrap(next) + limitedRT = limitedTripperware.Wrap(next) + logFilterRT = logFilterTripperware.Wrap(next) + seriesRT = seriesTripperware.Wrap(next) + labelsRT = labelsTripperware.Wrap(next) + instantRT = instantMetricTripperware.Wrap(next) + statsRT = indexStatsTripperware.Wrap(next) + seriesVolumeRT = seriesVolumeTripperware.Wrap(next) + detectedFieldsRT = next //TODO(twhitney): add middlewares for detected fields ) - return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, limits) + return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, detectedFieldsRT, limits) }), StopperWrapper{resultsCache, statsCache, volumeCache}, nil } type roundTripper struct { logger log.Logger - next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume base.Handler + next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields base.Handler limits Limits } // newRoundTripper creates a new queryrange roundtripper -func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume base.Handler, limits Limits) roundTripper { +func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields base.Handler, limits Limits) roundTripper { return roundTripper{ - logger: logger, - limited: limited, - log: log, - limits: limits, - metric: metric, - series: series, - labels: labels, - instantMetric: instantMetric, - indexStats: indexStats, - seriesVolume: seriesVolume, - next: next, + logger: logger, + limited: limited, + log: log, + limits: limits, + metric: metric, + series: series, + labels: labels, + instantMetric: instantMetric, + indexStats: indexStats, + seriesVolume: seriesVolume, + detectedFields: detectedFields, + next: next, } } @@ -365,6 +367,17 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, ) return r.seriesVolume.Do(ctx, req) + case *DetectedFieldsRequest: + level.Info(logger).Log( + "msg", "executing query", + "type", "detected fields", + "query", op.Query, + "length", op.End.Sub(*op.Start), + "start", op.Start, + "end", op.End, + ) + + return r.detectedFields.Do(ctx, req) default: return r.next.Do(ctx, req) } @@ -390,14 +403,15 @@ func transformRegexQuery(req *http.Request, expr syntax.LogSelectorExpr) (syntax } const ( - InstantQueryOp = "instant_query" - QueryRangeOp = "query_range" - SeriesOp = "series" - LabelNamesOp = "labels" - IndexStatsOp = "index_stats" - VolumeOp = "volume" - VolumeRangeOp = "volume_range" - IndexShardsOp = "index_shards" + InstantQueryOp = "instant_query" + QueryRangeOp = "query_range" + SeriesOp = "series" + LabelNamesOp = "labels" + IndexStatsOp = "index_stats" + VolumeOp = "volume" + VolumeRangeOp = "volume_range" + IndexShardsOp = "index_shards" + DetectedFieldsOp = "detected_fields" ) func getOperation(path string) string { @@ -418,6 +432,8 @@ func getOperation(path string) string { return VolumeRangeOp case path == "/loki/api/v1/index/shards": return IndexShardsOp + case path == "/loki/api/experimental/detected_fields": + return DetectedFieldsOp default: return "" } diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index f7ce311eee3f7..ff03a8339cd6a 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -865,6 +865,7 @@ func TestPostQueries(t *testing.T) { handler, handler, handler, + handler, fakeLimits{}, ).Do(ctx, lreq) require.NoError(t, err) diff --git a/pkg/querier/queryrange/stats.go b/pkg/querier/queryrange/stats.go index 4cc9de2f3d2c9..029c6df720fa7 100644 --- a/pkg/querier/queryrange/stats.go +++ b/pkg/querier/queryrange/stats.go @@ -29,13 +29,14 @@ type ctxKeyType string const ctxKey ctxKeyType = "stats" const ( - queryTypeLog = "log" - queryTypeMetric = "metric" - queryTypeSeries = "series" - queryTypeLabel = "label" - queryTypeStats = "stats" - queryTypeVolume = "volume" - queryTypeShards = "shards" + queryTypeLog = "log" + queryTypeMetric = "metric" + queryTypeSeries = "series" + queryTypeLabel = "label" + queryTypeStats = "stats" + queryTypeVolume = "volume" + queryTypeShards = "shards" + queryTypeDetectedFields = "detected_fields" ) var ( @@ -61,6 +62,8 @@ func recordQueryMetrics(data *queryData) { logql.RecordStatsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) case queryTypeVolume: logql.RecordVolumeQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.params.Limit(), data.params.Step(), data.status, *data.statistics) + case queryTypeDetectedFields: + logql.RecordDetectedFieldsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) default: level.Error(logger).Log("msg", "failed to record query metrics", "err", fmt.Errorf("expected one of the *LokiRequest, *LokiInstantRequest, *LokiSeriesRequest, *LokiLabelNamesRequest, got %s", data.queryType)) } @@ -164,6 +167,10 @@ func StatsCollectorMiddleware() queryrangebase.Middleware { case *ShardsResponse: responseStats = &r.Response.Statistics queryType = queryTypeShards + case *DetectedFieldsResponse: + responseStats = &stats.Result{} // TODO: support stats in detected fields + totalEntries = 1 + queryType = queryTypeDetectedFields default: level.Warn(logger).Log("msg", fmt.Sprintf("cannot compute stats, unexpected type: %T", resp)) } diff --git a/pkg/util/marshal/marshal.go b/pkg/util/marshal/marshal.go index 8b9f71ecc5782..09a9d8ea8af6f 100644 --- a/pkg/util/marshal/marshal.go +++ b/pkg/util/marshal/marshal.go @@ -174,3 +174,13 @@ func WriteVolumeResponseJSON(r *logproto.VolumeResponse, w io.Writer) error { s.WriteRaw("\n") return s.Flush() } + +// WriteDetectedFieldsResponseJSON marshals a logproto.DetectedFieldsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteDetectedFieldsResponseJSON(r *logproto.DetectedFieldsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} From cc941fe42a93fb6f6233f5e67d9cc524d12737e1 Mon Sep 17 00:00:00 2001 From: Anton Kolesnikov Date: Thu, 28 Mar 2024 17:47:00 +0800 Subject: [PATCH 26/54] chore: refactor line filter MatchType (#12388) --- pkg/loghttp/params.go | 4 +- pkg/logql/log/filter.go | 36 +++- pkg/logql/log/ip.go | 11 +- pkg/logql/log/ip_test.go | 10 +- pkg/logql/log/metrics_extraction_test.go | 4 +- pkg/logql/log/pipeline_test.go | 10 +- pkg/logql/shardmapper_test.go | 5 +- pkg/logql/syntax/ast.go | 19 +- pkg/logql/syntax/ast_test.go | 12 +- pkg/logql/syntax/expr.y | 16 +- pkg/logql/syntax/expr.y.go | 19 +- pkg/logql/syntax/linefilter.go | 5 +- pkg/logql/syntax/linefilter_test.go | 13 +- pkg/logql/syntax/parser_test.go | 242 +++++++++++------------ pkg/querier/queryrange/roundtrip.go | 4 +- pkg/storage/bloom/v1/bloom_tester.go | 7 +- 16 files changed, 215 insertions(+), 202 deletions(-) diff --git a/pkg/loghttp/params.go b/pkg/loghttp/params.go index 654c52e7725df..74597a1970d4f 100644 --- a/pkg/loghttp/params.go +++ b/pkg/loghttp/params.go @@ -11,9 +11,9 @@ import ( "github.com/c2h5oh/datasize" "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" ) @@ -193,7 +193,7 @@ func parseRegexQuery(httpRequest *http.Request) (string, error) { if err != nil { return "", err } - newExpr, err := syntax.AddFilterExpr(expr, labels.MatchRegexp, "", regexp) + newExpr, err := syntax.AddFilterExpr(expr, log.LineMatchRegexp, "", regexp) if err != nil { return "", err } diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 7b613947c8b8b..164741f4c8c96 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -14,6 +14,32 @@ import ( "github.com/grafana/loki/pkg/util" ) +// LineMatchType is an enum for line matching types. +type LineMatchType int + +// Possible LineMatchTypes. +const ( + LineMatchEqual LineMatchType = iota + LineMatchNotEqual + LineMatchRegexp + LineMatchNotRegexp +) + +func (t LineMatchType) String() string { + switch t { + case LineMatchEqual: + return "|=" + case LineMatchNotEqual: + return "!=" + case LineMatchRegexp: + return "|~" + case LineMatchNotRegexp: + return "!~" + default: + return "" + } +} + // Checker is an interface that matches against the input line or regexp. type Checker interface { Test(line []byte, caseInsensitive bool, equal bool) bool @@ -517,15 +543,15 @@ func (f containsAllFilter) Matches(test Checker) bool { } // NewFilter creates a new line filter from a match string and type. -func NewFilter(match string, mt labels.MatchType) (Filterer, error) { +func NewFilter(match string, mt LineMatchType) (Filterer, error) { switch mt { - case labels.MatchRegexp: + case LineMatchRegexp: return parseRegexpFilter(match, true, false) - case labels.MatchNotRegexp: + case LineMatchNotRegexp: return parseRegexpFilter(match, false, false) - case labels.MatchEqual: + case LineMatchEqual: return newContainsFilter([]byte(match), false), nil - case labels.MatchNotEqual: + case LineMatchNotEqual: return NewNotFilter(newContainsFilter([]byte(match), false)), nil default: return nil, fmt.Errorf("unknown matcher: %v", match) diff --git a/pkg/logql/log/ip.go b/pkg/logql/log/ip.go index 1508432d245c5..851cc1a9fa6c7 100644 --- a/pkg/logql/log/ip.go +++ b/pkg/logql/log/ip.go @@ -6,7 +6,6 @@ import ( "net/netip" "unicode" - "github.com/prometheus/prometheus/model/labels" "go4.org/netipx" ) @@ -27,14 +26,14 @@ type IPMatcher interface{} type IPLineFilter struct { ip *ipFilter - ty labels.MatchType + ty LineMatchType } // NewIPLineFilter is used to construct ip filter as a `LineFilter` -func NewIPLineFilter(pattern string, ty labels.MatchType) (*IPLineFilter, error) { +func NewIPLineFilter(pattern string, ty LineMatchType) (*IPLineFilter, error) { // check if `ty` supported in ip matcher. switch ty { - case labels.MatchEqual, labels.MatchNotEqual: + case LineMatchEqual, LineMatchNotEqual: default: return nil, ErrIPFilterInvalidOperation } @@ -69,8 +68,8 @@ func (f *IPLineFilter) RequiredLabelNames() []string { return []string{} // empty for line filter } -func (f *IPLineFilter) filterTy(line []byte, ty labels.MatchType) bool { - if ty == labels.MatchNotEqual { +func (f *IPLineFilter) filterTy(line []byte, ty LineMatchType) bool { + if ty == LineMatchNotEqual { return !f.ip.filter(line) } return f.ip.filter(line) diff --git a/pkg/logql/log/ip_test.go b/pkg/logql/log/ip_test.go index 105b3badd58f0..32b98169f7a60 100644 --- a/pkg/logql/log/ip_test.go +++ b/pkg/logql/log/ip_test.go @@ -189,7 +189,7 @@ func Test_IPLineFilterTy(t *testing.T) { cases := []struct { name string pat string - ty labels.MatchType + ty LineMatchType line []byte expectedMatch bool @@ -199,21 +199,21 @@ func Test_IPLineFilterTy(t *testing.T) { { name: "equal operator", pat: "192.168.0.1", - ty: labels.MatchEqual, + ty: LineMatchEqual, line: []byte("192.168.0.1"), expectedMatch: true, }, { name: "not equal operator", pat: "192.168.0.2", - ty: labels.MatchNotEqual, + ty: LineMatchNotEqual, line: []byte("192.168.0.1"), // match because !=ip("192.168.0.2") expectedMatch: true, }, { name: "regex not equal", pat: "192.168.0.2", - ty: labels.MatchNotRegexp, // not supported + ty: LineMatchNotRegexp, // not supported line: []byte("192.168.0.1"), fail: true, err: ErrIPFilterInvalidOperation, @@ -221,7 +221,7 @@ func Test_IPLineFilterTy(t *testing.T) { { name: "regex equal", pat: "192.168.0.2", - ty: labels.MatchRegexp, // not supported + ty: LineMatchRegexp, // not supported line: []byte("192.168.0.1"), fail: true, err: ErrIPFilterInvalidOperation, diff --git a/pkg/logql/log/metrics_extraction_test.go b/pkg/logql/log/metrics_extraction_test.go index f059271cb8c65..9cc5ff4411f51 100644 --- a/pkg/logql/log/metrics_extraction_test.go +++ b/pkg/logql/log/metrics_extraction_test.go @@ -346,7 +346,7 @@ func TestNewLineSampleExtractor(t *testing.T) { require.Equal(t, 1., f) assertLabelResult(t, lbs, l) - stage := mustFilter(NewFilter("foo", labels.MatchEqual)).ToStage() + stage := mustFilter(NewFilter("foo", LineMatchEqual)).ToStage() se, err = NewLineSampleExtractor(BytesExtractor, []Stage{stage}, []string{"namespace"}, false, false) require.NoError(t, err) @@ -404,7 +404,7 @@ func TestNewLineSampleExtractorWithStructuredMetadata(t *testing.T) { se, err = NewLineSampleExtractor(BytesExtractor, []Stage{ NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "user", "bob")), - mustFilter(NewFilter("foo", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("foo", LineMatchEqual)).ToStage(), }, []string{"foo"}, false, false) require.NoError(t, err) diff --git a/pkg/logql/log/pipeline_test.go b/pkg/logql/log/pipeline_test.go index 12a1a61fcc2cf..9b2aff1332d47 100644 --- a/pkg/logql/log/pipeline_test.go +++ b/pkg/logql/log/pipeline_test.go @@ -240,7 +240,7 @@ func newPipelineFilter(start, end int64, lbls, structuredMetadata labels.Labels, stages = append(stages, s) }) - stages = append(stages, mustFilter(NewFilter(filter, labels.MatchEqual)).ToStage()) + stages = append(stages, mustFilter(NewFilter(filter, LineMatchEqual)).ToStage()) return PipelineFilter{start, end, matchers, NewPipeline(stages)} } @@ -527,7 +527,7 @@ func Benchmark_Pipeline(b *testing.B) { b.ReportAllocs() stages := []Stage{ - mustFilter(NewFilter("metrics.go", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("metrics.go", LineMatchEqual)).ToStage(), NewLogfmtParser(false, false), NewAndLabelFilter( NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 10*time.Millisecond), @@ -611,7 +611,7 @@ func jsonBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("metrics.go", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("metrics.go", LineMatchEqual)).ToStage(), parser, }) line := []byte(`{"ts":"2020-12-27T09:15:54.333026285Z","error":"action could not be completed", "context":{"file": "metrics.go"}}`) @@ -643,7 +643,7 @@ func invalidJSONBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("invalid json", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("invalid json", LineMatchEqual)).ToStage(), parser, }) line := []byte(`invalid json`) @@ -696,7 +696,7 @@ func logfmtBenchmark(b *testing.B, parser Stage) { b.ReportAllocs() p := NewPipeline([]Stage{ - mustFilter(NewFilter("ts", labels.MatchEqual)).ToStage(), + mustFilter(NewFilter("ts", LineMatchEqual)).ToStage(), parser, }) diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 472bc51806041..c6b8e9c4b34cc 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -7,6 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" + "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel" "github.com/grafana/loki/pkg/querier/astmapper" @@ -529,7 +530,7 @@ func TestMapping(t *testing.T) { MultiStages: syntax.MultiStageExpr{ &syntax.LineFilterExpr{ LineFilter: syntax.LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "error", Op: "", }, @@ -550,7 +551,7 @@ func TestMapping(t *testing.T) { MultiStages: syntax.MultiStageExpr{ &syntax.LineFilterExpr{ LineFilter: syntax.LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "error", Op: "", }, diff --git a/pkg/logql/syntax/ast.go b/pkg/logql/syntax/ast.go index 060fc3cd11711..e1b796c4fbedd 100644 --- a/pkg/logql/syntax/ast.go +++ b/pkg/logql/syntax/ast.go @@ -329,7 +329,7 @@ func (e *PipelineExpr) HasFilter() bool { } type LineFilter struct { - Ty labels.MatchType + Ty log.LineMatchType Match string Op string } @@ -342,7 +342,7 @@ type LineFilterExpr struct { implicit } -func newLineFilterExpr(ty labels.MatchType, op, match string) *LineFilterExpr { +func newLineFilterExpr(ty log.LineMatchType, op, match string) *LineFilterExpr { return &LineFilterExpr{ LineFilter: LineFilter{ Ty: ty, @@ -355,7 +355,7 @@ func newLineFilterExpr(ty labels.MatchType, op, match string) *LineFilterExpr { func newOrLineFilter(left, right *LineFilterExpr) *LineFilterExpr { right.Ty = left.Ty - if left.Ty == labels.MatchEqual || left.Ty == labels.MatchRegexp { + if left.Ty == log.LineMatchEqual || left.Ty == log.LineMatchRegexp { left.Or = right right.IsOrChild = true return left @@ -389,7 +389,7 @@ func (e *LineFilterExpr) Accept(v RootVisitor) { } // AddFilterExpr adds a filter expression to a logselector expression. -func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, op, match string) (LogSelectorExpr, error) { +func AddFilterExpr(expr LogSelectorExpr, ty log.LineMatchType, op, match string) (LogSelectorExpr, error) { filter := newLineFilterExpr(ty, op, match) switch e := expr.(type) { case *MatchersExpr: @@ -412,16 +412,7 @@ func (e *LineFilterExpr) String() string { } if !e.IsOrChild { // Only write the type when we're not chaining "or" filters - switch e.Ty { - case labels.MatchRegexp: - sb.WriteString("|~") - case labels.MatchNotRegexp: - sb.WriteString("!~") - case labels.MatchEqual: - sb.WriteString("|=") - case labels.MatchNotEqual: - sb.WriteString("!=") - } + sb.WriteString(e.Ty.String()) sb.WriteString(" ") } diff --git a/pkg/logql/syntax/ast_test.go b/pkg/logql/syntax/ast_test.go index ece470516eb45..95f654d2c647f 100644 --- a/pkg/logql/syntax/ast_test.go +++ b/pkg/logql/syntax/ast_test.go @@ -449,16 +449,16 @@ func Test_FilterMatcher(t *testing.T) { func TestOrLineFilterTypes(t *testing.T) { for _, tt := range []struct { - ty labels.MatchType + ty log.LineMatchType }{ - {labels.MatchEqual}, - {labels.MatchNotEqual}, - {labels.MatchRegexp}, - {labels.MatchNotRegexp}, + {log.LineMatchEqual}, + {log.LineMatchNotEqual}, + {log.LineMatchRegexp}, + {log.LineMatchNotRegexp}, } { t.Run("right inherits left's type", func(t *testing.T) { left := &LineFilterExpr{LineFilter: LineFilter{Ty: tt.ty, Match: "something"}} - right := &LineFilterExpr{LineFilter: LineFilter{Ty: labels.MatchEqual, Match: "something"}} + right := &LineFilterExpr{LineFilter: LineFilter{Ty: log.LineMatchEqual, Match: "something"}} _ = newOrLineFilter(left, right) require.Equal(t, tt.ty, right.Ty) diff --git a/pkg/logql/syntax/expr.y b/pkg/logql/syntax/expr.y index 7e801480f4808..043642d526ad1 100644 --- a/pkg/logql/syntax/expr.y +++ b/pkg/logql/syntax/expr.y @@ -11,7 +11,7 @@ import ( %union{ Expr Expr - Filter labels.MatchType + Filter log.LineMatchType Grouping *Grouping Labels []string LogExpr LogSelectorExpr @@ -239,10 +239,10 @@ labelReplaceExpr: ; filter: - PIPE_MATCH { $$ = labels.MatchRegexp } - | PIPE_EXACT { $$ = labels.MatchEqual } - | NRE { $$ = labels.MatchNotRegexp } - | NEQ { $$ = labels.MatchNotEqual } + PIPE_MATCH { $$ = log.LineMatchRegexp } + | PIPE_EXACT { $$ = log.LineMatchEqual } + | NRE { $$ = log.LineMatchNotRegexp } + | NEQ { $$ = log.LineMatchNotEqual } ; selector: @@ -287,9 +287,9 @@ filterOp: ; orFilter: - STRING { $$ = newLineFilterExpr(labels.MatchEqual, "", $1) } - | filterOp OPEN_PARENTHESIS STRING CLOSE_PARENTHESIS { $$ = newLineFilterExpr(labels.MatchEqual, $1, $3) } - | STRING OR orFilter { $$ = newOrLineFilter(newLineFilterExpr(labels.MatchEqual, "", $1), $3) } + STRING { $$ = newLineFilterExpr(log.LineMatchEqual, "", $1) } + | filterOp OPEN_PARENTHESIS STRING CLOSE_PARENTHESIS { $$ = newLineFilterExpr(log.LineMatchEqual, $1, $3) } + | STRING OR orFilter { $$ = newOrLineFilter(newLineFilterExpr(log.LineMatchEqual, "", $1), $3) } ; lineFilter: diff --git a/pkg/logql/syntax/expr.y.go b/pkg/logql/syntax/expr.y.go index 1f38ab579f10b..41da7466fbc20 100644 --- a/pkg/logql/syntax/expr.y.go +++ b/pkg/logql/syntax/expr.y.go @@ -4,7 +4,6 @@ package syntax import __yyfmt__ "fmt" - import ( "github.com/grafana/loki/pkg/logql/log" "github.com/prometheus/prometheus/model/labels" @@ -14,7 +13,7 @@ import ( type exprSymType struct { yys int Expr Expr - Filter labels.MatchType + Filter log.LineMatchType Grouping *Grouping Labels []string LogExpr LogSelectorExpr @@ -266,7 +265,6 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 - var exprExca = [...]int{ -1, 1, 1, -1, @@ -554,7 +552,6 @@ var exprErrorMessages = [...]struct { msg string }{} - /* parser for yacc output */ var ( @@ -1162,22 +1159,22 @@ exprdefault: case 57: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchRegexp + exprVAL.Filter = log.LineMatchRegexp } case 58: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchEqual + exprVAL.Filter = log.LineMatchEqual } case 59: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchNotRegexp + exprVAL.Filter = log.LineMatchNotRegexp } case 60: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = labels.MatchNotEqual + exprVAL.Filter = log.LineMatchNotEqual } case 61: exprDollar = exprS[exprpt-3 : exprpt+1] @@ -1296,17 +1293,17 @@ exprdefault: case 84: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.OrFilter = newLineFilterExpr(labels.MatchEqual, "", exprDollar[1].str) + exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str) } case 85: exprDollar = exprS[exprpt-4 : exprpt+1] { - exprVAL.OrFilter = newLineFilterExpr(labels.MatchEqual, exprDollar[1].FilterOp, exprDollar[3].str) + exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, exprDollar[1].FilterOp, exprDollar[3].str) } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] { - exprVAL.OrFilter = newOrLineFilter(newLineFilterExpr(labels.MatchEqual, "", exprDollar[1].str), exprDollar[3].OrFilter) + exprVAL.OrFilter = newOrLineFilter(newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str), exprDollar[3].OrFilter) } case 87: exprDollar = exprS[exprpt-2 : exprpt+1] diff --git a/pkg/logql/syntax/linefilter.go b/pkg/logql/syntax/linefilter.go index f85b210234139..9b07e95deb12d 100644 --- a/pkg/logql/syntax/linefilter.go +++ b/pkg/logql/syntax/linefilter.go @@ -1,8 +1,7 @@ package syntax import ( - "github.com/prometheus/prometheus/model/labels" - + "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/util/encoding" ) @@ -40,7 +39,7 @@ func (lf LineFilter) MarshalTo(b []byte) (int, error) { func (lf *LineFilter) Unmarshal(b []byte) error { buf := encoding.DecWith(b) - lf.Ty = labels.MatchType(buf.Uvarint()) + lf.Ty = log.LineMatchType(buf.Uvarint()) lf.Match = buf.UvarintStr() lf.Op = buf.UvarintStr() return nil diff --git a/pkg/logql/syntax/linefilter_test.go b/pkg/logql/syntax/linefilter_test.go index 6ce5a601c2815..d0cc700ce4601 100644 --- a/pkg/logql/syntax/linefilter_test.go +++ b/pkg/logql/syntax/linefilter_test.go @@ -4,18 +4,19 @@ import ( "fmt" "testing" - "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/logql/log" ) func TestLineFilterSerialization(t *testing.T) { for i, orig := range []LineFilter{ {}, - {Ty: labels.MatchEqual, Match: "match"}, - {Ty: labels.MatchEqual, Match: "match", Op: "OR"}, - {Ty: labels.MatchNotEqual, Match: "not match"}, - {Ty: labels.MatchNotEqual, Match: "not match", Op: "OR"}, - {Ty: labels.MatchRegexp, Op: "OR"}, + {Ty: log.LineMatchEqual, Match: "match"}, + {Ty: log.LineMatchEqual, Match: "match", Op: "OR"}, + {Ty: log.LineMatchNotEqual, Match: "not match"}, + {Ty: log.LineMatchNotEqual, Match: "not match", Op: "OR"}, + {Ty: log.LineMatchRegexp, Op: "OR"}, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { b := make([]byte, orig.Size()) diff --git a/pkg/logql/syntax/parser_test.go b/pkg/logql/syntax/parser_test.go index 7152d78adac12..faa55015e5838 100644 --- a/pkg/logql/syntax/parser_test.go +++ b/pkg/logql/syntax/parser_test.go @@ -30,7 +30,7 @@ var ParseTestCases = []struct { Left: &LogRange{ Left: &PipelineExpr{ MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchRegexp, "", "error\\"), + newLineFilterExpr(log.LineMatchRegexp, "", "error\\"), }, Left: &MatchersExpr{ Mts: []*labels.Matcher{ @@ -60,7 +60,7 @@ var ParseTestCases = []struct { Left: newPipelineExpr( newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "error"), + newLineFilterExpr(log.LineMatchEqual, "", "error"), }, ), Interval: 12 * time.Hour, @@ -75,7 +75,7 @@ var ParseTestCases = []struct { Left: &LogRange{ Left: newPipelineExpr( newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), - MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "error")}, + MultiStageExpr{newLineFilterExpr(log.LineMatchEqual, "", "error")}, ), Interval: 12 * time.Hour, }, @@ -392,8 +392,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -404,7 +404,7 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar"), mustNewMatcher(labels.MatchEqual, "ip", "foo")}), MultiStageExpr{ newLogfmtParserExpr(nil), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "127.0.0.1"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "127.0.0.1"), newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "2.3.4.5"))), newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "abc"))), newLabelFilterExpr(log.NewIPLabelFilter("4.5.6.7", "ipaddr", log.LabelFilterEqual)), @@ -417,7 +417,7 @@ var ParseTestCases = []struct { exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), }, ), }, @@ -427,8 +427,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), }, ), @@ -440,10 +440,10 @@ var ParseTestCases = []struct { MultiStageExpr{ newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -454,8 +454,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -465,7 +465,7 @@ var ParseTestCases = []struct { exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), }, ), }, @@ -475,8 +475,8 @@ var ParseTestCases = []struct { newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), MultiStageExpr{ newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), }, ), @@ -488,10 +488,10 @@ var ParseTestCases = []struct { MultiStageExpr{ newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), - newLineFilterExpr(labels.MatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), ), - newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"), + newLineFilterExpr(log.LineMatchNotEqual, OpFilterIP, "123.123.123.123"), ), }, ), @@ -662,7 +662,7 @@ var ParseTestCases = []struct { in: `{foo="bar"} |= "baz"`, exp: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "baz")}, + MultiStageExpr{newLineFilterExpr(log.LineMatchEqual, "", "baz")}, ), }, { @@ -673,12 +673,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -693,12 +693,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -715,12 +715,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -737,12 +737,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), newLabelParserExpr(OpParserTypeUnpack, ""), }, @@ -769,12 +769,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -796,12 +796,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -824,12 +824,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -852,12 +852,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -882,12 +882,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -913,12 +913,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -935,12 +935,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -963,12 +963,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -993,12 +993,12 @@ var ParseTestCases = []struct { newNestedLineFilterExpr( newNestedLineFilterExpr( newNestedLineFilterExpr( - newLineFilterExpr(labels.MatchEqual, "", "baz"), - newLineFilterExpr(labels.MatchRegexp, "", "blip"), + newLineFilterExpr(log.LineMatchEqual, "", "baz"), + newLineFilterExpr(log.LineMatchRegexp, "", "blip"), ), - newLineFilterExpr(labels.MatchNotEqual, "", "flip"), + newLineFilterExpr(log.LineMatchNotEqual, "", "flip"), ), - newLineFilterExpr(labels.MatchNotRegexp, "", "flap"), + newLineFilterExpr(log.LineMatchNotRegexp, "", "flap"), ), }, ), @@ -1257,7 +1257,7 @@ var ParseTestCases = []struct { mustNewMatcher(labels.MatchEqual, "namespace", "tns"), }), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), }), Interval: 5 * time.Minute, }, OpRangeTypeCount, nil, nil), @@ -1291,7 +1291,7 @@ var ParseTestCases = []struct { mustNewMatcher(labels.MatchEqual, "namespace", "tns"), }), MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), }), Interval: 5 * time.Minute, }, OpRangeTypeCount, nil, nil), @@ -1368,7 +1368,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1387,7 +1387,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeUnpack, ""), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ @@ -1407,7 +1407,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1426,7 +1426,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypePattern, " bar "), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1445,7 +1445,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1464,7 +1464,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1483,7 +1483,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1503,7 +1503,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1534,7 +1534,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLineFmtExpr("blip{{ .foo }}blop"), }, }, @@ -1545,7 +1545,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1566,7 +1566,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1592,7 +1592,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1638,7 +1638,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1659,7 +1659,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1690,7 +1690,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1720,7 +1720,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1742,7 +1742,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1764,7 +1764,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1786,7 +1786,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "level=error"), + newLineFilterExpr(log.LineMatchEqual, "", "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewAndLabelFilter( @@ -1808,7 +1808,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), }, }, 5*time.Minute, @@ -1890,7 +1890,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1921,7 +1921,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1952,7 +1952,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -1983,7 +1983,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2018,7 +2018,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2057,7 +2057,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2096,7 +2096,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2135,7 +2135,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2174,7 +2174,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2213,7 +2213,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2263,7 +2263,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2295,7 +2295,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2344,7 +2344,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2376,7 +2376,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2425,7 +2425,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2457,7 +2457,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2506,7 +2506,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2538,7 +2538,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2655,7 +2655,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2687,7 +2687,7 @@ var ParseTestCases = []struct { newLogRange(&PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &LabelFilterExpr{ LabelFilterer: log.NewOrLabelFilter( @@ -2932,7 +2932,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "bar"), + newLineFilterExpr(log.LineMatchEqual, "", "bar"), newLabelParserExpr(OpParserTypeJSON, ""), }, }, @@ -2963,7 +2963,7 @@ var ParseTestCases = []struct { exp: &PipelineExpr{ Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), MultiStages: MultiStageExpr{ - newLineFilterExpr(labels.MatchEqual, "", "#"), + newLineFilterExpr(log.LineMatchEqual, "", "#"), }, }, }, @@ -3147,23 +3147,23 @@ var ParseTestCases = []struct { Left: newOrLineFilter( &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "foo", }, }, &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "bar", }, }), LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "buzz", }, Or: &LineFilterExpr{ LineFilter: LineFilter{ - Ty: labels.MatchEqual, + Ty: log.LineMatchEqual, Match: "fizz", }, IsOrChild: true, diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index d5ad9ce705dea..ee896639f8a8c 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -15,10 +15,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" + logqllog "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel/stats" base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" @@ -387,7 +387,7 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, func transformRegexQuery(req *http.Request, expr syntax.LogSelectorExpr) (syntax.LogSelectorExpr, error) { regexp := req.Form.Get("regexp") if regexp != "" { - filterExpr, err := syntax.AddFilterExpr(expr, labels.MatchRegexp, "", regexp) + filterExpr, err := syntax.AddFilterExpr(expr, logqllog.LineMatchRegexp, "", regexp) if err != nil { return nil, err } diff --git a/pkg/storage/bloom/v1/bloom_tester.go b/pkg/storage/bloom/v1/bloom_tester.go index 99b76c3a1a0d6..5aa688bfc2657 100644 --- a/pkg/storage/bloom/v1/bloom_tester.go +++ b/pkg/storage/bloom/v1/bloom_tester.go @@ -3,7 +3,6 @@ package v1 import ( "github.com/grafana/regexp" regexpsyntax "github.com/grafana/regexp/syntax" - "github.com/prometheus/prometheus/model/labels" "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" @@ -90,16 +89,16 @@ func FiltersToBloomTest(b NGramBuilder, filters ...syntax.LineFilterExpr) BloomT func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest { switch filter.Ty { - case labels.MatchNotEqual, labels.MatchNotRegexp: + case log.LineMatchNotEqual, log.LineMatchNotRegexp: // We cannot test _negated_ filters with a bloom filter since blooms are probabilistic // filters that can only tell us if a string _might_ exist. // For example, for `!= "foo"`, the bloom filter might tell us that the string "foo" might exist // but because we are not sure, we cannot discard that chunk because it might actually not be there. // Therefore, we return a test that always returns true. return MatchAll - case labels.MatchEqual: + case log.LineMatchEqual: return newStringTest(b, filter.Match) - case labels.MatchRegexp: + case log.LineMatchRegexp: reg, err := regexpsyntax.Parse(filter.Match, regexpsyntax.Perl) if err != nil { // TODO: log error From 86c768c3ccf4cdf5c346f1b31ff60d115a1e637b Mon Sep 17 00:00:00 2001 From: Salva Corts Date: Thu, 28 Mar 2024 11:44:23 +0100 Subject: [PATCH 27/54] feat: Blooms retention (#12258) --- docs/sources/configure/_index.md | 9 + pkg/bloomcompactor/bloomcompactor.go | 28 +- pkg/bloomcompactor/bloomcompactor_test.go | 8 + pkg/bloomcompactor/config.go | 10 +- pkg/bloomcompactor/metrics.go | 48 + pkg/bloomcompactor/retention.go | 320 +++++++ pkg/bloomcompactor/retention_test.go | 882 ++++++++++++++++++ pkg/bloomgateway/processor_test.go | 5 + pkg/loki/modules.go | 6 +- pkg/storage/config/schema_config.go | 3 +- .../stores/shipper/bloomshipper/client.go | 5 + .../stores/shipper/bloomshipper/resolver.go | 23 + .../stores/shipper/bloomshipper/store.go | 112 +++ .../stores/shipper/bloomshipper/store_test.go | 133 ++- 14 files changed, 1573 insertions(+), 19 deletions(-) create mode 100644 pkg/bloomcompactor/retention.go create mode 100644 pkg/bloomcompactor/retention_test.go diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 2d5e667b13683..6a4fd280c0a59 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2734,6 +2734,15 @@ ring: # and compact as many tables. # CLI flag: -bloom-compactor.max-compaction-parallelism [max_compaction_parallelism: | default = 1] + +retention: + # Enable bloom retention. + # CLI flag: -bloom-compactor.retention.enabled + [enabled: | default = false] + + # Max lookback days for retention. + # CLI flag: -bloom-compactor.retention.max-lookback-days + [max_lookback_days: | default = 365] ``` ### limits_config diff --git a/pkg/bloomcompactor/bloomcompactor.go b/pkg/bloomcompactor/bloomcompactor.go index 25994c08b6a35..7b91eca0565b0 100644 --- a/pkg/bloomcompactor/bloomcompactor.go +++ b/pkg/bloomcompactor/bloomcompactor.go @@ -48,7 +48,8 @@ type Compactor struct { tsdbStore TSDBStore // TODO(owen-d): ShardingStrategy - controller *SimpleBloomController + controller *SimpleBloomController + retentionManager *RetentionManager // temporary workaround until bloomStore has implemented read/write shipper interface bloomStore bloomshipper.Store @@ -64,7 +65,8 @@ func New( storeCfg storage.Config, clientMetrics storage.ClientMetrics, fetcherProvider stores.ChunkFetcherProvider, - sharding util_ring.TenantSharding, + ring ring.ReadRing, + ringLifeCycler *ring.BasicLifecycler, limits Limits, store bloomshipper.StoreWithMetrics, logger log.Logger, @@ -74,7 +76,7 @@ func New( cfg: cfg, schemaCfg: schemaCfg, logger: logger, - sharding: sharding, + sharding: util_ring.NewTenantShuffleSharding(ring, ringLifeCycler, limits.BloomCompactorShardSize), limits: limits, bloomStore: store, metrics: NewMetrics(r, store.BloomMetrics()), @@ -100,6 +102,15 @@ func New( c.logger, ) + c.retentionManager = NewRetentionManager( + c.cfg.RetentionConfig, + c.limits, + c.bloomStore, + newFirstTokenRetentionSharding(ring, ringLifeCycler), + c.metrics, + c.logger, + ) + c.Service = services.NewBasicService(c.starting, c.running, c.stopping) return c, nil } @@ -214,10 +225,17 @@ func (c *Compactor) runOne(ctx context.Context) error { c.metrics.compactionsStarted.Inc() start := time.Now() level.Info(c.logger).Log("msg", "running bloom compaction", "workers", c.cfg.WorkerParallelism) - var workersErr error + var workersErr, retentionErr error var wg sync.WaitGroup input := make(chan *tenantTableRange) + // Launch retention (will return instantly if retention is disabled or not owned by this compactor) + wg.Add(1) + go func() { + retentionErr = c.retentionManager.Apply(ctx) + wg.Done() + }() + tables := c.tables(time.Now()) level.Debug(c.logger).Log("msg", "loaded tables", "tables", tables.TotalDays()) @@ -236,7 +254,7 @@ func (c *Compactor) runOne(ctx context.Context) error { wg.Wait() duration := time.Since(start) - err = multierror.New(workersErr, err, ctx.Err()).Err() + err = multierror.New(retentionErr, workersErr, err, ctx.Err()).Err() if err != nil { level.Error(c.logger).Log("msg", "compaction iteration failed", "err", err, "duration", duration) diff --git a/pkg/bloomcompactor/bloomcompactor_test.go b/pkg/bloomcompactor/bloomcompactor_test.go index db1221fe58d2f..9ffa1a88ba65b 100644 --- a/pkg/bloomcompactor/bloomcompactor_test.go +++ b/pkg/bloomcompactor/bloomcompactor_test.go @@ -149,6 +149,14 @@ type mockLimits struct { shardSize int } +func (m mockLimits) RetentionPeriod(_ string) time.Duration { + panic("implement me") +} + +func (m mockLimits) StreamRetention(_ string) []validation.StreamRetention { + panic("implement me") +} + func (m mockLimits) AllByUserID() map[string]*validation.Limits { panic("implement me") } diff --git a/pkg/bloomcompactor/config.go b/pkg/bloomcompactor/config.go index 72ff9c5cc2f19..e0b2afd924f4a 100644 --- a/pkg/bloomcompactor/config.go +++ b/pkg/bloomcompactor/config.go @@ -7,7 +7,6 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" "github.com/grafana/loki/pkg/util/ring" ) @@ -32,6 +31,8 @@ type Config struct { CompactionRetries int `yaml:"compaction_retries"` MaxCompactionParallelism int `yaml:"max_compaction_parallelism"` + + RetentionConfig RetentionConfig `yaml:"retention"` } // RegisterFlags registers flags for the Bloom-Compactor configuration. @@ -52,6 +53,7 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.DurationVar(&cfg.RetryMaxBackoff, "bloom-compactor.compaction-retries-max-backoff", time.Minute, "Maximum backoff time between retries.") f.IntVar(&cfg.CompactionRetries, "bloom-compactor.compaction-retries", 3, "Number of retries to perform when compaction fails.") f.IntVar(&cfg.MaxCompactionParallelism, "bloom-compactor.max-compaction-parallelism", 1, "Maximum number of tables to compact in parallel. While increasing this value, please make sure compactor has enough disk space allocated to be able to store and compact as many tables.") + cfg.RetentionConfig.RegisterFlags(f) // Ring skipFlags := []string{ @@ -66,6 +68,10 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { } func (cfg *Config) Validate() error { + if err := cfg.RetentionConfig.Validate(); err != nil { + return err + } + if cfg.MinTableOffset > cfg.MaxTableOffset { return fmt.Errorf("min-table-offset (%d) must be less than or equal to max-table-offset (%d)", cfg.MinTableOffset, cfg.MaxTableOffset) } @@ -76,7 +82,7 @@ func (cfg *Config) Validate() error { } type Limits interface { - downloads.Limits + RetentionLimits BloomCompactorShardSize(tenantID string) int BloomCompactorEnabled(tenantID string) bool BloomNGramLength(tenantID string) int diff --git a/pkg/bloomcompactor/metrics.go b/pkg/bloomcompactor/metrics.go index 30bb6d4022e58..d4b4b0e53d50c 100644 --- a/pkg/bloomcompactor/metrics.go +++ b/pkg/bloomcompactor/metrics.go @@ -43,6 +43,13 @@ type Metrics struct { progress prometheus.Gauge timePerTenant *prometheus.CounterVec + + // Retention metrics + retentionRunning prometheus.Gauge + retentionTime *prometheus.HistogramVec + retentionDaysPerIteration *prometheus.HistogramVec + retentionTenantsPerIteration *prometheus.HistogramVec + retentionTenantsExceedingLookback prometheus.Gauge } func NewMetrics(r prometheus.Registerer, bloomMetrics *v1.Metrics) *Metrics { @@ -175,6 +182,47 @@ func NewMetrics(r prometheus.Registerer, bloomMetrics *v1.Metrics) *Metrics { Name: "tenant_compaction_seconds_total", Help: "Time spent processing a tenant.", }, []string{tenantLabel}), + + // Retention + retentionRunning: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_running", + Help: "1 if retention is running in this compactor.", + }), + + retentionTime: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_time_seconds", + Help: "Time this retention process took to complete.", + Buckets: prometheus.DefBuckets, + }, []string{"status"}), + + retentionDaysPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_days_processed", + Help: "Number of days iterated over during the retention process.", + // 1day -> 5 years, 10 buckets + Buckets: prometheus.ExponentialBucketsRange(1, 365*5, 10), + }, []string{"status"}), + + retentionTenantsPerIteration: promauto.With(r).NewHistogramVec(prometheus.HistogramOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_tenants_processed", + Help: "Number of tenants on which retention was applied during the retention process.", + // 1 tenant -> 10k tenants, 10 buckets + Buckets: prometheus.ExponentialBucketsRange(1, 10000, 10), + }, []string{"status"}), + + retentionTenantsExceedingLookback: promauto.With(r).NewGauge(prometheus.GaugeOpts{ + Namespace: metricsNamespace, + Subsystem: metricsSubsystem, + Name: "retention_tenants_exceeding_lookback", + Help: "Number of tenants with a retention exceeding the configured retention lookback.", + }), } return &m diff --git a/pkg/bloomcompactor/retention.go b/pkg/bloomcompactor/retention.go new file mode 100644 index 0000000000000..4c7fc39c1ce64 --- /dev/null +++ b/pkg/bloomcompactor/retention.go @@ -0,0 +1,320 @@ +package bloomcompactor + +import ( + "context" + "flag" + "math" + "slices" + "time" + + "github.com/go-kit/log" + "github.com/go-kit/log/level" + "github.com/grafana/dskit/ring" + "github.com/pkg/errors" + "github.com/prometheus/common/model" + + "github.com/grafana/loki/pkg/storage/chunk/client" + storageconfig "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/pkg/validation" +) + +type retentionSharding interface { + OwnsRetention() (bool, error) +} + +type firstTokenRetentionSharding struct { + ring ring.ReadRing + ringLifeCycler *ring.BasicLifecycler +} + +func newFirstTokenRetentionSharding(ring ring.ReadRing, ringLifeCycler *ring.BasicLifecycler) *firstTokenRetentionSharding { + return &firstTokenRetentionSharding{ + ring: ring, + ringLifeCycler: ringLifeCycler, + } +} + +// OwnsRetention returns true if the compactor should apply retention. +// This is determined by checking if the compactor owns the smaller token in the ring. +// Note that during a ring topology change, more than one compactor may attempt to apply retention. +// This is fine since retention consists on deleting old data which should be idempotent. +func (s *firstTokenRetentionSharding) OwnsRetention() (bool, error) { + rs, err := s.ring.GetAllHealthy(RingOp) + if err != nil { + return false, errors.Wrap(err, "getting ring healthy instances") + } + if len(rs.Instances) == 0 { + return false, errors.New("no healthy instances in ring") + } + + // Lookup the instance with smaller token + instance := slices.MinFunc(rs.Instances, func(a, b ring.InstanceDesc) int { + smallerA := slices.Min(a.GetTokens()) + smallerB := slices.Min(b.GetTokens()) + if smallerA < smallerB { + return -1 + } + if smallerA > smallerB { + return 1 + } + return 0 + }) + + return instance.GetId() == s.ringLifeCycler.GetInstanceID(), nil +} + +type RetentionConfig struct { + Enabled bool `yaml:"enabled"` + MaxLookbackDays int `yaml:"max_lookback_days"` +} + +func (cfg *RetentionConfig) RegisterFlags(f *flag.FlagSet) { + f.BoolVar(&cfg.Enabled, "bloom-compactor.retention.enabled", false, "Enable bloom retention.") + f.IntVar(&cfg.MaxLookbackDays, "bloom-compactor.retention.max-lookback-days", 365, "Max lookback days for retention.") +} + +func (cfg *RetentionConfig) Validate() error { + if !cfg.Enabled { + return nil + } + + if cfg.MaxLookbackDays < 1 { + return errors.New("max lookback days must be a positive number") + } + return nil +} + +type RetentionLimits interface { + RetentionPeriod(userID string) time.Duration + StreamRetention(userID string) []validation.StreamRetention + AllByUserID() map[string]*validation.Limits + DefaultLimits() *validation.Limits +} + +type RetentionManager struct { + cfg RetentionConfig + limits RetentionLimits + bloomStore bloomshipper.Store + sharding retentionSharding + metrics *Metrics + logger log.Logger + lastDayRun storageconfig.DayTime + + // For testing + now func() model.Time +} + +func NewRetentionManager( + cfg RetentionConfig, + limits RetentionLimits, + bloomStore bloomshipper.Store, + sharding retentionSharding, + metrics *Metrics, + logger log.Logger, +) *RetentionManager { + return &RetentionManager{ + cfg: cfg, + limits: limits, + bloomStore: bloomStore, + sharding: sharding, + metrics: metrics, + logger: log.With(logger, "subcomponent", "retention-manager"), + now: model.Now, + lastDayRun: storageconfig.NewDayTime(0), + } +} + +func (r *RetentionManager) Apply(ctx context.Context) error { + if !r.cfg.Enabled { + level.Debug(r.logger).Log("msg", "retention is disabled") + return nil + } + + start := r.now() + today := storageconfig.NewDayTime(start) + if !today.After(r.lastDayRun) { + // We've already run retention for today + return nil + } + + ownsRetention, err := r.sharding.OwnsRetention() + if err != nil { + return errors.Wrap(err, "checking if compactor owns retention") + } + if !ownsRetention { + level.Debug(r.logger).Log("msg", "this compactor doesn't own retention") + return nil + } + + level.Info(r.logger).Log("msg", "Applying retention", "today", today.String(), "lastDayRun", r.lastDayRun.String()) + r.metrics.retentionRunning.Set(1) + defer r.metrics.retentionRunning.Set(0) + + tenantsRetention := retentionByTenant(r.limits) + r.reportTenantsExceedingLookback(tenantsRetention) + + defaultLimits := r.limits.DefaultLimits() + defaultRetention := findLongestRetention(time.Duration(defaultLimits.RetentionPeriod), defaultLimits.StreamRetention) + + smallestRetention := smallestEnabledRetention(defaultRetention, tenantsRetention) + if smallestRetention == 0 { + level.Debug(r.logger).Log("msg", "no retention period set for any tenant, skipping retention") + return nil + } + + // Start day is today minus the smallest retention period. + // Note that the last retention day is exclusive. E.g. 30 days retention means we keep 30 days of data, + // thus we start deleting data from the 31st day onwards. + startDay := storageconfig.NewDayTime(today.Add(-smallestRetention)).Dec() + // End day is today minus the max lookback days + endDay := storageconfig.NewDayTime(today.Add(-time.Duration(r.cfg.MaxLookbackDays) * 24 * time.Hour)) + + var daysProcessed int + tenantsRetentionApplied := make(map[string]struct{}, 100) + for day := startDay; day.After(endDay); day = day.Dec() { + dayLogger := log.With(r.logger, "day", day.String()) + bloomClient, err := r.bloomStore.Client(day.ModelTime()) + if err != nil { + level.Error(dayLogger).Log("msg", "failed to get bloom store client", "err", err) + break + } + objectClient := bloomClient.ObjectClient() + + tenants, err := r.bloomStore.TenantFilesForInterval( + ctx, bloomshipper.NewInterval(day.Bounds()), + func(tenant string, _ client.StorageObject) bool { + // Filter out tenants whose retention hasn't expired yet + globalRetention := r.limits.RetentionPeriod(tenant) + streamRetention := r.limits.StreamRetention(tenant) + tenantRetention := findLongestRetention(globalRetention, streamRetention) + expirationDay := storageconfig.NewDayTime(today.Add(-tenantRetention)) + return day.Before(expirationDay) + }, + ) + if err != nil { + r.metrics.retentionTime.WithLabelValues(statusFailure).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusFailure).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusFailure).Observe(float64(len(tenantsRetentionApplied))) + return errors.Wrap(err, "getting users for period") + } + + if len(tenants) == 0 { + // No tenants for this day means we can break here since previous + // retention iterations have already deleted all tenants + break + } + + for tenant, objects := range tenants { + if len(objects) == 0 { + continue + } + + tenantLogger := log.With(dayLogger, "tenant", tenant) + level.Info(tenantLogger).Log("msg", "applying retention to tenant", "keys", len(objects)) + + // Note: we cannot delete the tenant directory directly because it is not an + // actual key in the object store. Instead, we need to delete all keys one by one. + for _, object := range objects { + if err := objectClient.DeleteObject(ctx, object.Key); err != nil { + r.metrics.retentionTime.WithLabelValues(statusFailure).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusFailure).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusFailure).Observe(float64(len(tenantsRetentionApplied))) + return errors.Wrapf(err, "deleting key %s", object.Key) + } + } + + tenantsRetentionApplied[tenant] = struct{}{} + } + + daysProcessed++ + } + + r.lastDayRun = today + r.metrics.retentionTime.WithLabelValues(statusSuccess).Observe(time.Since(start.Time()).Seconds()) + r.metrics.retentionDaysPerIteration.WithLabelValues(statusSuccess).Observe(float64(daysProcessed)) + r.metrics.retentionTenantsPerIteration.WithLabelValues(statusSuccess).Observe(float64(len(tenantsRetentionApplied))) + level.Info(r.logger).Log("msg", "finished applying retention", "daysProcessed", daysProcessed, "tenants", len(tenantsRetentionApplied)) + + return nil +} + +func (r *RetentionManager) reportTenantsExceedingLookback(retentionByTenant map[string]time.Duration) { + if len(retentionByTenant) == 0 { + r.metrics.retentionTenantsExceedingLookback.Set(0) + return + } + + var tenantsExceedingLookback int + for tenant, retention := range retentionByTenant { + if retention > time.Duration(r.cfg.MaxLookbackDays)*24*time.Hour { + level.Warn(r.logger).Log("msg", "tenant retention exceeds max lookback days", "tenant", tenant, "retention", retention.String()) + } + tenantsExceedingLookback++ + } + + r.metrics.retentionTenantsExceedingLookback.Set(float64(tenantsExceedingLookback)) +} + +func findLongestRetention(globalRetention time.Duration, streamRetention []validation.StreamRetention) time.Duration { + if len(streamRetention) == 0 { + return globalRetention + } + + maxStreamRetention := slices.MaxFunc(streamRetention, func(a, b validation.StreamRetention) int { + return int(a.Period - b.Period) + }) + + if time.Duration(maxStreamRetention.Period) > globalRetention { + return time.Duration(maxStreamRetention.Period) + } + return globalRetention +} + +func retentionByTenant(limits RetentionLimits) map[string]time.Duration { + all := limits.AllByUserID() + if len(all) == 0 { + return nil + } + + retentions := make(map[string]time.Duration, len(all)) + for tenant, lim := range all { + retention := findLongestRetention(time.Duration(lim.RetentionPeriod), lim.StreamRetention) + if retention == 0 { + continue + } + retentions[tenant] = retention + } + + return retentions +} + +// smallestEnabledRetention returns the smallest retention period across all tenants and the default. +func smallestEnabledRetention(defaultRetention time.Duration, perTenantRetention map[string]time.Duration) time.Duration { + if len(perTenantRetention) == 0 { + return defaultRetention + } + + smallest := time.Duration(math.MaxInt64) + if defaultRetention != 0 { + smallest = defaultRetention + } + + for _, retention := range perTenantRetention { + // Skip unlimited retention + if retention == 0 { + continue + } + + if retention < smallest { + smallest = retention + } + } + + if smallest == time.Duration(math.MaxInt64) { + // No tenant nor defaults configures a retention + return 0 + } + + return smallest +} diff --git a/pkg/bloomcompactor/retention_test.go b/pkg/bloomcompactor/retention_test.go new file mode 100644 index 0000000000000..6c3c82c426c3e --- /dev/null +++ b/pkg/bloomcompactor/retention_test.go @@ -0,0 +1,882 @@ +package bloomcompactor + +import ( + "context" + "flag" + "fmt" + "math" + "os" + "testing" + "time" + + "github.com/go-kit/log" + "github.com/grafana/dskit/services" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/storage" + v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/pkg/storage/chunk/client/local" + storageconfig "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + util_log "github.com/grafana/loki/pkg/util/log" + lokiring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/pkg/validation" +) + +var testTime = parseDayTime("2024-12-31").ModelTime() + +func TestRetention(t *testing.T) { + for _, tc := range []struct { + name string + ownsRetention bool + cfg RetentionConfig + lim mockRetentionLimits + prePopulate func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) + expectErr bool + check func(t *testing.T, bloomStore *bloomshipper.BloomStore) + }{ + { + name: "retention disabled", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: false, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 200 * 24 * time.Hour, + "3": 500 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 50, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) + }, + }, + { + name: "compactor does not own retention", + ownsRetention: false, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 200 * 24 * time.Hour, + "3": 500 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 50, len(metas[0])) + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) + }, + }, + { + name: "unlimited retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 0, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 200, len(metas[0])) + }, + }, + { + name: "default retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + defaultRetention: 30 * 24 * time.Hour, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 31, len(metas[0])) + }, + }, + { + name: "retention lookback smaller than max retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 100, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + "3": 200 * 24 * time.Hour, + "4": 400 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + putMetasForLastNDays(t, schemaCfg, bloomStore, "4", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // Tenant 1 has 40 days of retention, and we wrote 200 days of metas + // We should get two groups: 0th-40th and 101th-200th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 2, len(metas)) + require.Equal(t, 41, len(metas[0])) // 0-40th day + require.Equal(t, 100, len(metas[1])) // 100th-200th day + + // Tenant 2 has 20 days of retention, and we wrote 50 days of metas + // We should get one group: 0th-20th + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 21, len(metas[0])) // 0th-20th + + // Tenant 3 has 200 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) // 0th-500th + + // Tenant 4 has 400 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "4", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 500, len(metas[0])) // 0th-500th + }, + }, + { + name: "retention lookback bigger than max retention", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + "3": 200 * 24 * time.Hour, + "4": 400 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 200) + putMetasForLastNDays(t, schemaCfg, bloomStore, "2", testTime, 50) + putMetasForLastNDays(t, schemaCfg, bloomStore, "3", testTime, 500) + putMetasForLastNDays(t, schemaCfg, bloomStore, "4", testTime, 500) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // Tenant 1 has 40 days of retention, and we wrote 200 days of metas + // We should get one groups: 0th-40th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 41, len(metas[0])) // 0-40th day + + // Tenant 2 has 20 days of retention, and we wrote 50 days of metas + // We should get one group: 0th-20th + metas = getGroupedMetasForLastNDays(t, bloomStore, "2", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 21, len(metas[0])) // 0th-20th + + // Tenant 3 has 200 days of retention, and we wrote 500 days of metas + // We should get one group: 0th-200th + metas = getGroupedMetasForLastNDays(t, bloomStore, "3", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 201, len(metas[0])) // 0th-200th + + // Tenant 4 has 400 days of retention, and we wrote 500 days of metas + // Since the manager looks up to 100 days, we shouldn't have deleted any metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "4", testTime, 500) + require.Equal(t, 1, len(metas)) + require.Equal(t, 401, len(metas[0])) // 0th-400th + }, + }, + { + name: "hit no tenants in table", + ownsRetention: true, + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + lim: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + }, + prePopulate: func(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore) { + // Place metas with a gap of 50 days. [0th-100th], [151th-200th] + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime, 100) + putMetasForLastNDays(t, schemaCfg, bloomStore, "1", testTime.Add(-150*24*time.Hour), 50) + }, + check: func(t *testing.T, bloomStore *bloomshipper.BloomStore) { + // We should get two groups: 0th-30th and 151th-200th + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 500) + require.Equal(t, 2, len(metas)) + require.Equal(t, 31, len(metas[0])) // 0th-30th day + require.Equal(t, 50, len(metas[1])) // 151th-200th day + }, + }, + } { + t.Run(tc.name, func(t *testing.T) { + bloomStore, schema, _, err := NewMockBloomStore(t) + require.NoError(t, err) + + rm := NewRetentionManager( + tc.cfg, + tc.lim, + bloomStore, + mockSharding{ + ownsRetention: tc.ownsRetention, + }, + NewMetrics(nil, v1.NewMetrics(nil)), + util_log.Logger, + ) + rm.now = func() model.Time { + return testTime + } + + tc.prePopulate(t, schema, bloomStore) + + err = rm.Apply(context.Background()) + if tc.expectErr { + require.Error(t, err) + return + } + require.NoError(t, err) + + tc.check(t, bloomStore) + }) + } +} + +func TestRetentionRunsOncePerDay(t *testing.T) { + bloomStore, schema, _, err := NewMockBloomStore(t) + require.NoError(t, err) + + rm := NewRetentionManager( + RetentionConfig{ + Enabled: true, + MaxLookbackDays: 365, + }, + mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + }, + bloomStore, + mockSharding{ + ownsRetention: true, + }, + NewMetrics(nil, v1.NewMetrics(nil)), + util_log.Logger, + ) + rm.now = func() model.Time { + return testTime + } + + // Write metas for the last 100 days and run retention + putMetasForLastNDays(t, schema, bloomStore, "1", testTime, 100) + err = rm.Apply(context.Background()) + require.NoError(t, err) + + // We should get only the first 30 days of metas + metas := getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 31, len(metas[0])) // 0th-30th day + + // We now change the now() time to be a bit later in the day + rm.now = func() model.Time { + return testTime.Add(1 * time.Hour) + } + + // Write metas again and run retention. Since we already ran retention at now()'s day, + // Apply should be a noop, and therefore we should be able to get all the 100 days of metas + putMetasForLastNDays(t, schema, bloomStore, "1", testTime, 100) + err = rm.Apply(context.Background()) + require.NoError(t, err) + + metas = getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 100, len(metas[0])) + + // We now change the now() time to be the next day, retention should run again + rm.now = func() model.Time { + return testTime.Add(24 * time.Hour) + } + err = rm.Apply(context.Background()) + require.NoError(t, err) + + // We should only see the first 30 days of metas + metas = getGroupedMetasForLastNDays(t, bloomStore, "1", testTime, 100) + require.Equal(t, 1, len(metas)) + require.Equal(t, 30, len(metas[0])) // 0th-30th day +} + +func TestOwnsRetention(t *testing.T) { + for _, tc := range []struct { + name string + numCompactors int + }{ + { + name: "single compactor", + numCompactors: 1, + }, + { + name: "multiple compactors", + numCompactors: 100, + }, + } { + t.Run(tc.name, func(t *testing.T) { + var ringManagers []*lokiring.RingManager + for i := 0; i < tc.numCompactors; i++ { + var cfg Config + cfg.RegisterFlags(flag.NewFlagSet("ring", flag.PanicOnError)) + cfg.Ring.KVStore.Store = "inmemory" + cfg.Ring.InstanceID = fmt.Sprintf("bloom-compactor-%d", i) + cfg.Ring.InstanceAddr = fmt.Sprintf("localhost-%d", i) + + ringManager, err := lokiring.NewRingManager("bloom-compactor", lokiring.ServerMode, cfg.Ring, 1, cfg.Ring.NumTokens, util_log.Logger, prometheus.NewRegistry()) + require.NoError(t, err) + require.NoError(t, ringManager.StartAsync(context.Background())) + + ringManagers = append(ringManagers, ringManager) + } + t.Cleanup(func() { + // Stop all rings and wait for them to stop. + for _, ringManager := range ringManagers { + ringManager.StopAsync() + require.Eventually(t, func() bool { + return ringManager.State() == services.Terminated + }, 1*time.Minute, 100*time.Millisecond) + } + }) + + // Wait for all rings to see each other. + for _, ringManager := range ringManagers { + require.Eventually(t, func() bool { + running := ringManager.State() == services.Running + discovered := ringManager.Ring.InstancesCount() == tc.numCompactors + return running && discovered + }, 1*time.Minute, 100*time.Millisecond) + } + + var shardings []retentionSharding + for _, ringManager := range ringManagers { + shardings = append(shardings, newFirstTokenRetentionSharding(ringManager.Ring, ringManager.RingLifecycler)) + } + + var ownsRetention int + for _, sharding := range shardings { + owns, err := sharding.OwnsRetention() + require.NoError(t, err) + if owns { + ownsRetention++ + } + } + + require.Equal(t, 1, ownsRetention) + }) + } +} + +func TestFindLongestRetention(t *testing.T) { + for _, tc := range []struct { + name string + globalRetention time.Duration + streamRetention []validation.StreamRetention + expectedRetention time.Duration + }{ + { + name: "no retention", + expectedRetention: 0, + }, + { + name: "global retention", + globalRetention: 30 * 24 * time.Hour, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "stream retention", + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "two stream retention", + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "stream retention bigger than global", + globalRetention: 20 * 24 * time.Hour, + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "global retention bigger than stream", + globalRetention: 40 * 24 * time.Hour, + streamRetention: []validation.StreamRetention{ + { + Period: model.Duration(20 * 24 * time.Hour), + }, + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + } { + t.Run(tc.name, func(t *testing.T) { + retention := findLongestRetention(tc.globalRetention, tc.streamRetention) + require.Equal(t, tc.expectedRetention, retention) + }) + } +} + +func TestSmallestRetention(t *testing.T) { + for _, tc := range []struct { + name string + limits RetentionLimits + expectedRetention time.Duration + expectedHasRetention bool + }{ + { + name: "no retention", + limits: mockRetentionLimits{}, + expectedRetention: 0, + }, + { + name: "default global retention", + limits: mockRetentionLimits{ + defaultRetention: 30 * 24 * time.Hour, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "default stream retention", + limits: mockRetentionLimits{ + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "tenant configured unlimited", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 0, + }, + defaultRetention: 30 * 24 * time.Hour, + }, + expectedRetention: 30 * 24 * time.Hour, + }, + { + name: "no default one tenant", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + }, + }, + expectedRetention: 40 * 24 * time.Hour, + }, + { + name: "no default two tenants", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + "2": 20 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + "2": { + { + Period: model.Duration(10 * 24 * time.Hour), + }, + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + { + name: "default bigger than tenant", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 10 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(20 * 24 * time.Hour), + }, + }, + }, + defaultRetention: 40 * 24 * time.Hour, + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(30 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + { + name: "tenant bigger than default", + limits: mockRetentionLimits{ + retention: map[string]time.Duration{ + "1": 30 * 24 * time.Hour, + }, + streamRetention: map[string][]validation.StreamRetention{ + "1": { + { + Period: model.Duration(40 * 24 * time.Hour), + }, + }, + }, + defaultRetention: 10 * 24 * time.Hour, + defaultStreamRetention: []validation.StreamRetention{ + { + Period: model.Duration(20 * 24 * time.Hour), + }, + }, + }, + expectedRetention: 20 * 24 * time.Hour, + }, + } { + t.Run(tc.name, func(t *testing.T) { + defaultLim := tc.limits.DefaultLimits() + defaultRetention := findLongestRetention(time.Duration(defaultLim.RetentionPeriod), defaultLim.StreamRetention) + tenantsRetention := retentionByTenant(tc.limits) + + retention := smallestEnabledRetention(defaultRetention, tenantsRetention) + require.Equal(t, tc.expectedRetention, retention) + }) + } +} + +func TestRetentionConfigValidate(t *testing.T) { + for _, tc := range []struct { + name string + cfg RetentionConfig + expectErr bool + }{ + { + name: "enabled and valid", + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 2 * 365, + }, + expectErr: false, + }, + { + name: "invalid max lookback days", + cfg: RetentionConfig{ + Enabled: true, + MaxLookbackDays: 0, + }, + expectErr: true, + }, + { + name: "disabled and invalid", + cfg: RetentionConfig{ + Enabled: false, + MaxLookbackDays: 0, + }, + expectErr: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + err := tc.cfg.Validate() + if tc.expectErr { + require.Error(t, err) + return + } + require.NoError(t, err) + }) + } +} + +func putMetasForLastNDays(t *testing.T, schemaCfg storageconfig.SchemaConfig, bloomStore *bloomshipper.BloomStore, tenant string, start model.Time, days int) { + const metasPerDay = 2 + + startDay := storageconfig.NewDayTime(start) + endDay := storageconfig.NewDayTime(startDay.Add(-time.Duration(days) * 24 * time.Hour)) + for day := startDay; day.After(endDay); day = day.Dec() { + period, err := schemaCfg.SchemaForTime(day.ModelTime()) + require.NoError(t, err) + + dayTable := storageconfig.NewDayTable(day, period.IndexTables.Prefix) + bloomClient, err := bloomStore.Client(dayTable.ModelTime()) + require.NoErrorf(t, err, "failed to get bloom client for day %d: %s", day, err) + + for i := 0; i < metasPerDay; i++ { + err = bloomClient.PutMeta(context.Background(), bloomshipper.Meta{ + MetaRef: bloomshipper.MetaRef{ + Ref: bloomshipper.Ref{ + TenantID: tenant, + TableName: dayTable.String(), + Bounds: v1.NewBounds(model.Fingerprint(i*100), model.Fingerprint(i*100+100)), + }, + }, + Blocks: []bloomshipper.BlockRef{}, + }) + require.NoError(t, err) + } + } +} + +// getMetasForLastNDays returns groups of continuous metas for the last N days. +func getGroupedMetasForLastNDays(t *testing.T, bloomStore *bloomshipper.BloomStore, tenant string, start model.Time, days int) [][][]bloomshipper.Meta { + metasGrouped := make([][][]bloomshipper.Meta, 0) + currentGroup := make([][]bloomshipper.Meta, 0) + + startDay := storageconfig.NewDayTime(start) + endDay := storageconfig.NewDayTime(startDay.Add(-time.Duration(days) * 24 * time.Hour)) + + for day := startDay; day.After(endDay); day = day.Dec() { + metas, err := bloomStore.FetchMetas(context.Background(), bloomshipper.MetaSearchParams{ + TenantID: tenant, + Interval: bloomshipper.NewInterval(day.Bounds()), + Keyspace: v1.NewBounds(0, math.MaxUint64), + }) + require.NoError(t, err) + if len(metas) == 0 { + // We have reached the end of the metas group: cut a new group + if len(currentGroup) > 0 { + metasGrouped = append(metasGrouped, currentGroup) + currentGroup = make([][]bloomshipper.Meta, 0) + } + continue + } + currentGroup = append(currentGroup, metas) + } + + // Append the last group if it's not empty + if len(currentGroup) > 0 { + metasGrouped = append(metasGrouped, currentGroup) + } + + return metasGrouped +} + +func NewMockBloomStore(t *testing.T) (*bloomshipper.BloomStore, storageconfig.SchemaConfig, string, error) { + workDir := t.TempDir() + return NewMockBloomStoreWithWorkDir(t, workDir) +} + +func NewMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*bloomshipper.BloomStore, storageconfig.SchemaConfig, string, error) { + schemaCfg := storageconfig.SchemaConfig{ + Configs: []storageconfig.PeriodConfig{ + { + ObjectType: storageconfig.StorageTypeFileSystem, + From: storageconfig.DayTime{ + Time: testTime.Add(-2 * 365 * 24 * time.Hour), // -2 year + }, + IndexTables: storageconfig.IndexPeriodicTableConfig{ + PeriodicTableConfig: storageconfig.PeriodicTableConfig{ + Period: 24 * time.Hour, + Prefix: "schema_a_table_", + }}, + }, + { + ObjectType: storageconfig.StorageTypeFileSystem, + From: storageconfig.DayTime{ + Time: testTime.Add(-365 * 24 * time.Hour), // -1 year + }, + IndexTables: storageconfig.IndexPeriodicTableConfig{ + PeriodicTableConfig: storageconfig.PeriodicTableConfig{ + Period: 24 * time.Hour, + Prefix: "schema_b_table_", + }}, + }, + }, + } + + storageConfig := storage.Config{ + FSConfig: local.FSConfig{ + Directory: workDir, + }, + BloomShipperConfig: config.Config{ + WorkingDirectory: []string{workDir}, + BlocksDownloadingQueue: config.DownloadingQueueConfig{ + WorkersCount: 1, + }, + BlocksCache: config.BlocksCacheConfig{ + SoftLimit: 1 << 20, + HardLimit: 2 << 20, + TTL: time.Hour, + PurgeInterval: time.Hour, + }, + }, + } + + reg := prometheus.NewPedanticRegistry() + metrics := storage.NewClientMetrics() + t.Cleanup(metrics.Unregister) + logger := log.NewLogfmtLogger(os.Stderr) + + metasCache := cache.NewMockCache() + blocksCache := bloomshipper.NewFsBlocksCache(storageConfig.BloomShipperConfig.BlocksCache, prometheus.NewPedanticRegistry(), logger) + + store, err := bloomshipper.NewBloomStore(schemaCfg.Configs, storageConfig, metrics, metasCache, blocksCache, reg, logger) + if err == nil { + t.Cleanup(store.Stop) + } + + return store, schemaCfg, workDir, err +} + +type mockRetentionLimits struct { + retention map[string]time.Duration + streamRetention map[string][]validation.StreamRetention + defaultRetention time.Duration + defaultStreamRetention []validation.StreamRetention +} + +func (m mockRetentionLimits) RetentionPeriod(tenant string) time.Duration { + return m.retention[tenant] +} + +func (m mockRetentionLimits) StreamRetention(tenant string) []validation.StreamRetention { + return m.streamRetention[tenant] +} + +func (m mockRetentionLimits) AllByUserID() map[string]*validation.Limits { + tenants := make(map[string]*validation.Limits, len(m.retention)) + + for tenant, retention := range m.retention { + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = &validation.Limits{} + } + tenants[tenant].RetentionPeriod = model.Duration(retention) + } + + for tenant, streamRetention := range m.streamRetention { + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = &validation.Limits{} + } + tenants[tenant].StreamRetention = streamRetention + } + + return tenants +} + +func (m mockRetentionLimits) DefaultLimits() *validation.Limits { + return &validation.Limits{ + RetentionPeriod: model.Duration(m.defaultRetention), + StreamRetention: m.defaultStreamRetention, + } +} + +type mockSharding struct { + ownsRetention bool +} + +func (m mockSharding) OwnsRetention() (bool, error) { + return m.ownsRetention, nil +} diff --git a/pkg/bloomgateway/processor_test.go b/pkg/bloomgateway/processor_test.go index d70451a127867..d0ecaf769944f 100644 --- a/pkg/bloomgateway/processor_test.go +++ b/pkg/bloomgateway/processor_test.go @@ -15,6 +15,7 @@ import ( "go.uber.org/atomic" "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/pkg/storage/chunk/client" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" "github.com/grafana/loki/pkg/util/constants" @@ -55,6 +56,10 @@ func (s *dummyStore) FetchMetas(_ context.Context, _ bloomshipper.MetaSearchPara return s.metas, nil } +func (s *dummyStore) TenantFilesForInterval(_ context.Context, _ bloomshipper.Interval, _ func(tenant string, object client.StorageObject) bool) (map[string][]client.StorageObject, error) { + return nil, nil +} + func (s *dummyStore) Fetcher(_ model.Time) (*bloomshipper.Fetcher, error) { return nil, nil } diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index b62a122fe5d8d..e9fc10bfc09e8 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -77,7 +77,6 @@ import ( util_log "github.com/grafana/loki/pkg/util/log" "github.com/grafana/loki/pkg/util/querylimits" lokiring "github.com/grafana/loki/pkg/util/ring" - util_ring "github.com/grafana/loki/pkg/util/ring" serverutil "github.com/grafana/loki/pkg/util/server" "github.com/grafana/loki/pkg/validation" ) @@ -1483,15 +1482,14 @@ func (t *Loki) initBloomCompactor() (services.Service, error) { } logger := log.With(util_log.Logger, "component", "bloom-compactor") - shuffleSharding := util_ring.NewTenantShuffleSharding(t.bloomCompactorRingManager.Ring, t.bloomCompactorRingManager.RingLifecycler, t.Overrides.BloomCompactorShardSize) - return bloomcompactor.New( t.Cfg.BloomCompactor, t.Cfg.SchemaConfig, t.Cfg.StorageConfig, t.ClientMetrics, t.Store, - shuffleSharding, + t.bloomCompactorRingManager.Ring, + t.bloomCompactorRingManager.RingLifecycler, t.Overrides, t.BloomStore, logger, diff --git a/pkg/storage/config/schema_config.go b/pkg/storage/config/schema_config.go index 968ca87e609b7..4b96a722cadb8 100644 --- a/pkg/storage/config/schema_config.go +++ b/pkg/storage/config/schema_config.go @@ -201,7 +201,8 @@ func (cfg *PeriodConfig) GetIndexTableNumberRange(schemaEndDate DayTime) TableRa } func NewDayTime(d model.Time) DayTime { - return DayTime{d} + beginningOfDay := model.TimeFromUnix(d.Time().Truncate(24 * time.Hour).Unix()) + return DayTime{beginningOfDay} } // DayTime is a model.Time what holds day-aligned values, and marshals to/from diff --git a/pkg/storage/stores/shipper/bloomshipper/client.go b/pkg/storage/stores/shipper/bloomshipper/client.go index 7e9128a9971a3..eef3b667d253e 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client.go +++ b/pkg/storage/stores/shipper/bloomshipper/client.go @@ -241,6 +241,7 @@ type Client interface { MetaClient BlockClient IsObjectNotFoundErr(err error) bool + ObjectClient() client.ObjectClient Stop() } @@ -270,6 +271,10 @@ func NewBloomClient(cfg bloomStoreConfig, client client.ObjectClient, logger log }, nil } +func (b *BloomClient) ObjectClient() client.ObjectClient { + return b.client +} + func (b *BloomClient) IsObjectNotFoundErr(err error) bool { return b.client.IsObjectNotFoundErr(err) } diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver.go b/pkg/storage/stores/shipper/bloomshipper/resolver.go index 7fb6652ebd174..b93f84f827048 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver.go @@ -32,6 +32,8 @@ type KeyResolver interface { ParseMetaKey(Location) (MetaRef, error) Block(BlockRef) Location ParseBlockKey(Location) (BlockRef, error) + Tenant(tenant, table string) Location + TenantPrefix(loc Location) (string, error) } type defaultKeyResolver struct{} @@ -126,6 +128,27 @@ func (defaultKeyResolver) ParseBlockKey(loc Location) (BlockRef, error) { }, nil } +func (defaultKeyResolver) Tenant(tenant, table string) Location { + return simpleLocation{ + BloomPrefix, + table, + tenant, + } +} + +func (defaultKeyResolver) TenantPrefix(loc Location) (string, error) { + dir, fn := path.Split(loc.Addr()) + + dirParts := strings.Split(path.Clean(dir), "/") + dirParts = append(dirParts, path.Clean(fn)) + if len(dirParts) < 3 { + return "", fmt.Errorf("directory parts count must be 3 or greater, but was %d : [%s]", len(dirParts), loc) + } + + // The tenant is the third part of the directory. E.g. bloom/schema_b_table_20088/1/metas where 1 is the tenant + return dirParts[2], nil +} + type PrefixedResolver struct { prefix string KeyResolver diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index aed16cd8c2532..ce15d4cc2663a 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -30,6 +30,10 @@ type Store interface { ResolveMetas(ctx context.Context, params MetaSearchParams) ([][]MetaRef, []*Fetcher, error) FetchMetas(ctx context.Context, params MetaSearchParams) ([]Meta, error) FetchBlocks(ctx context.Context, refs []BlockRef, opts ...FetchOption) ([]*CloseableBlockQuerier, error) + TenantFilesForInterval( + ctx context.Context, interval Interval, + filter func(tenant string, object client.StorageObject) bool, + ) (map[string][]client.StorageObject, error) Fetcher(ts model.Time) (*Fetcher, error) Client(ts model.Time) (Client, error) Stop() @@ -133,6 +137,86 @@ func (b *bloomStoreEntry) FetchBlocks(ctx context.Context, refs []BlockRef, opts return b.fetcher.FetchBlocks(ctx, refs, opts...) } +func (b *bloomStoreEntry) TenantFilesForInterval( + ctx context.Context, + interval Interval, + filter func(tenant string, object client.StorageObject) bool, +) (map[string][]client.StorageObject, error) { + tables := tablesForRange(b.cfg, interval) + if len(tables) == 0 { + return nil, nil + } + + tenants := make(map[string][]client.StorageObject, 100) + for _, table := range tables { + prefix := path.Join(rootFolder, table) + level.Debug(b.fetcher.logger).Log( + "msg", "listing tenants", + "store", b.cfg.From, + "table", table, + "prefix", prefix, + ) + objects, _, err := b.objectClient.List(ctx, prefix, "") + if err != nil { + if b.objectClient.IsObjectNotFoundErr(err) { + continue + } + + return nil, fmt.Errorf("error listing tenants under prefix [%s]: %w", prefix, err) + } + if len(objects) == 0 { + continue + } + + // Sort objects by the key to ensure keys are sorted by tenant. + cmpObj := func(a, b client.StorageObject) int { + if a.Key < b.Key { + return -1 + } + if a.Key > b.Key { + return 1 + } + return 0 + } + if !slices.IsSortedFunc(objects, cmpObj) { + slices.SortFunc(objects, cmpObj) + } + + for i := 0; i < len(objects); i++ { + tenant, err := b.TenantPrefix(key(objects[i].Key)) + if err != nil { + return nil, fmt.Errorf("error parsing tenant key [%s]: %w", objects[i].Key, err) + } + + // Search next object with different tenant + var j int + for j = i + 1; j < len(objects); j++ { + nextTenant, err := b.TenantPrefix(key(objects[j].Key)) + if err != nil { + return nil, fmt.Errorf("error parsing tenant key [%s]: %w", objects[i].Key, err) + } + if nextTenant != tenant { + break + } + } + + if _, ok := tenants[tenant]; !ok { + tenants[tenant] = nil // Initialize tenant with empty slice + } + + if filter != nil && !filter(tenant, objects[i]) { + continue + } + + // Add all objects for this tenant + tenants[tenant] = append(tenants[tenant], objects[i:j]...) + i = j - 1 // -1 because the loop will increment i by 1 + } + } + + return tenants, nil +} + // Fetcher implements Store. func (b *bloomStoreEntry) Fetcher(_ model.Time) (*Fetcher, error) { return b.fetcher, nil @@ -269,6 +353,34 @@ func (b *BloomStore) Block(ref BlockRef) (loc Location) { return } +func (b *BloomStore) TenantFilesForInterval( + ctx context.Context, + interval Interval, + filter func(tenant string, object client.StorageObject) bool, +) (map[string][]client.StorageObject, error) { + var allTenants map[string][]client.StorageObject + + err := b.forStores(ctx, interval, func(innerCtx context.Context, interval Interval, store Store) error { + tenants, err := store.TenantFilesForInterval(innerCtx, interval, filter) + if err != nil { + return err + } + + if allTenants == nil { + allTenants = tenants + return nil + } + + for tenant, files := range tenants { + allTenants[tenant] = append(allTenants[tenant], files...) + } + + return nil + }) + + return allTenants, err +} + // Fetcher implements Store. func (b *BloomStore) Fetcher(ts model.Time) (*Fetcher, error) { if store := b.getStore(ts); store != nil { diff --git a/pkg/storage/stores/shipper/bloomshipper/store_test.go b/pkg/storage/stores/shipper/bloomshipper/store_test.go index 074a965ddb5b4..6c206161839a6 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/store_test.go @@ -18,19 +18,23 @@ import ( "github.com/grafana/loki/pkg/storage" v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/pkg/storage/chunk/client/local" storageconfig "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" ) func newMockBloomStore(t *testing.T) (*BloomStore, string, error) { - workDir := t.TempDir() - return newMockBloomStoreWithWorkDir(t, workDir) + dir := t.TempDir() + workDir := filepath.Join(dir, "bloomshipper") + storeDir := filepath.Join(dir, "fs-storage") + return newMockBloomStoreWithWorkDir(t, workDir, storeDir) } -func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, string, error) { +func newMockBloomStoreWithWorkDir(t *testing.T, workDir, storeDir string) (*BloomStore, string, error) { periodicConfigs := []storageconfig.PeriodConfig{ { - ObjectType: storageconfig.StorageTypeInMemory, + ObjectType: storageconfig.StorageTypeFileSystem, From: parseDayTime("2024-01-01"), IndexTables: storageconfig.IndexPeriodicTableConfig{ PeriodicTableConfig: storageconfig.PeriodicTableConfig{ @@ -39,7 +43,7 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, st }}, }, { - ObjectType: storageconfig.StorageTypeInMemory, + ObjectType: storageconfig.StorageTypeFileSystem, From: parseDayTime("2024-02-01"), IndexTables: storageconfig.IndexPeriodicTableConfig{ PeriodicTableConfig: storageconfig.PeriodicTableConfig{ @@ -50,6 +54,9 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*BloomStore, st } storageConfig := storage.Config{ + FSConfig: local.FSConfig{ + Directory: storeDir, + }, BloomShipperConfig: config.Config{ WorkingDirectory: []string{workDir}, BlocksDownloadingQueue: config.DownloadingQueueConfig{ @@ -272,6 +279,118 @@ func TestBloomStore_FetchBlocks(t *testing.T) { ) } +func TestBloomStore_TenantFilesForInterval(t *testing.T) { + ctx := context.Background() + var keyResolver defaultKeyResolver + + store, _, err := newMockBloomStore(t) + require.NoError(t, err) + + // schema 1 + // day 1 - 1 tenant + s1d1t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-01-19 00:00"), 0x00010000, 0x0001ffff) + s1d1t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-01-19 00:00"), 0x00000000, 0x0000ffff) + // day 2 - 2 tenants + s1d2t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-01-20 00:00"), 0x00010000, 0x0001ffff) + s1d2t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) + s1d2t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-01-20 00:00"), 0x00010000, 0x0001ffff) + s1d2t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) + + // schema 2 + // day 1 - 2 tenants + s2d1t1m1, _ := createMetaInStorage(store, "1", parseTime("2024-02-07 00:00"), 0x00010000, 0x0001ffff) + s2d1t1m2, _ := createMetaInStorage(store, "1", parseTime("2024-02-07 00:00"), 0x00000000, 0x0000ffff) + s2d1t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-02-07 00:00"), 0x00010000, 0x0001ffff) + s2d1t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-02-07 00:00"), 0x00000000, 0x0000ffff) + // day 2 - 1 tenant + s2d2t2m1, _ := createMetaInStorage(store, "2", parseTime("2024-02-10 00:00"), 0x00010000, 0x0001ffff) + s2d2t2m2, _ := createMetaInStorage(store, "2", parseTime("2024-02-10 00:00"), 0x00000000, 0x0000ffff) + + t.Run("no filter", func(t *testing.T) { + tenantFiles, err := store.TenantFilesForInterval( + ctx, + NewInterval(parseTime("2024-01-18 00:00"), parseTime("2024-02-12 00:00")), + nil, + ) + require.NoError(t, err) + + var tenants []string + for tenant := range tenantFiles { + tenants = append(tenants, tenant) + } + require.ElementsMatch(t, []string{"1", "2"}, tenants) + + tenant1Keys := keysFromStorageObjects(tenantFiles["1"]) + expectedTenant1Keys := []string{ + // schema 1 - day 1 + keyResolver.Meta(s1d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d1t1m2.MetaRef).Addr(), + // schema 1 - day 2 + keyResolver.Meta(s1d2t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t1m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t1m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant1Keys, tenant1Keys) + + tenant2Keys := keysFromStorageObjects(tenantFiles["2"]) + expectedTenant2Keys := []string{ + // schema 1 - day 2 + keyResolver.Meta(s1d2t2m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t2m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t2m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t2m2.MetaRef).Addr(), + // schema 2 - day 2 + keyResolver.Meta(s2d2t2m1.MetaRef).Addr(), + keyResolver.Meta(s2d2t2m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant2Keys, tenant2Keys) + }) + + t.Run("filter tenant 1", func(t *testing.T) { + tenantFiles, err := store.TenantFilesForInterval( + ctx, + NewInterval(parseTime("2024-01-18 00:00"), parseTime("2024-02-12 00:00")), + func(tenant string, object client.StorageObject) bool { + return tenant == "1" + }, + ) + require.NoError(t, err) + + var tenants []string + for tenant := range tenantFiles { + tenants = append(tenants, tenant) + } + require.ElementsMatch(t, []string{"1", "2"}, tenants) + + tenant1Keys := keysFromStorageObjects(tenantFiles["1"]) + expectedTenant1Keys := []string{ + // schema 1 - day 1 + keyResolver.Meta(s1d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d1t1m2.MetaRef).Addr(), + // schema 1 - day 2 + keyResolver.Meta(s1d2t1m1.MetaRef).Addr(), + keyResolver.Meta(s1d2t1m2.MetaRef).Addr(), + // schema 2 - day 1 + keyResolver.Meta(s2d1t1m1.MetaRef).Addr(), + keyResolver.Meta(s2d1t1m2.MetaRef).Addr(), + } + require.ElementsMatch(t, expectedTenant1Keys, tenant1Keys) + + tenant2Keys := keysFromStorageObjects(tenantFiles["2"]) + require.Empty(t, tenant2Keys) + }) +} + +func keysFromStorageObjects(objects []client.StorageObject) (keys []string) { + for _, object := range objects { + keys = append(keys, object.Key) + } + return keys +} + func TestBloomShipper_WorkingDir(t *testing.T) { t.Run("insufficient permissions on directory yields error", func(t *testing.T) { base := t.TempDir() @@ -281,7 +400,7 @@ func TestBloomShipper_WorkingDir(t *testing.T) { fi, _ := os.Stat(wd) t.Log("working directory", wd, fi.Mode()) - _, _, err = newMockBloomStoreWithWorkDir(t, wd) + _, _, err = newMockBloomStoreWithWorkDir(t, wd, base) require.ErrorContains(t, err, "insufficient permissions") }) @@ -291,7 +410,7 @@ func TestBloomShipper_WorkingDir(t *testing.T) { wd := filepath.Join(base, "doesnotexist") t.Log("working directory", wd) - store, _, err := newMockBloomStoreWithWorkDir(t, wd) + store, _, err := newMockBloomStoreWithWorkDir(t, wd, base) require.NoError(t, err) b, err := createBlockInStorage(t, store, "tenant", parseTime("2024-01-20 00:00"), 0x00000000, 0x0000ffff) require.NoError(t, err) From 111c19b38147837041c6cc7c7483abe00c0c1599 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Thu, 28 Mar 2024 13:02:24 +0200 Subject: [PATCH 28/54] feat(helm): add support for additional `PodLogs` pipeline stages in `selfMonitoring` (#12389) Signed-off-by: Danny Kopping --- docs/sources/setup/install/helm/reference.md | 9 +++++++++ production/helm/loki/CHANGELOG.md | 4 ++++ production/helm/loki/Chart.yaml | 2 +- production/helm/loki/README.md | 2 +- production/helm/loki/templates/monitoring/pod-logs.yaml | 3 +++ production/helm/loki/values.yaml | 3 +++ 6 files changed, 21 insertions(+), 2 deletions(-) diff --git a/docs/sources/setup/install/helm/reference.md b/docs/sources/setup/install/helm/reference.md index 62f474672f446..d44ee934d0c92 100644 --- a/docs/sources/setup/install/helm/reference.md +++ b/docs/sources/setup/install/helm/reference.md @@ -2851,6 +2851,15 @@ null
 {}
 
+ + + + monitoring.selfMonitoring.podLogs.additionalPipelineStages + list + Additional pipeline stages to process logs after scraping https://grafana.com/docs/agent/latest/operator/api/#pipelinestagespec-a-namemonitoringgrafanacomv1alpha1pipelinestagespeca +
+[]
+
diff --git a/production/helm/loki/CHANGELOG.md b/production/helm/loki/CHANGELOG.md index 7f8a5043c1505..60ff91c766e1e 100644 --- a/production/helm/loki/CHANGELOG.md +++ b/production/helm/loki/CHANGELOG.md @@ -13,6 +13,10 @@ Entries should include a reference to the pull request that introduced the chang [//]: # ( : do not remove this line. This locator is used by the CI pipeline to automatically create a changelog entry for each new Loki release. Add other chart versions and respective changelog entries bellow this line.) +## 5.47.2 + +- [ENHANCEMENT] Allow for additional pipeline stages to be configured on the `selfMonitoring` `Podlogs` resource. + ## 5.47.1 - [BUGFIX] Increase default value of backend minReplicas to 3 diff --git a/production/helm/loki/Chart.yaml b/production/helm/loki/Chart.yaml index 6b3423e2cca15..21972fe007099 100644 --- a/production/helm/loki/Chart.yaml +++ b/production/helm/loki/Chart.yaml @@ -3,7 +3,7 @@ name: loki description: Helm chart for Grafana Loki in simple, scalable mode type: application appVersion: 2.9.6 -version: 5.47.1 +version: 5.47.2 home: https://grafana.github.io/helm-charts sources: - https://github.com/grafana/loki diff --git a/production/helm/loki/README.md b/production/helm/loki/README.md index cdafd290f3685..942498d471476 100644 --- a/production/helm/loki/README.md +++ b/production/helm/loki/README.md @@ -1,6 +1,6 @@ # loki -![Version: 5.47.1](https://img.shields.io/badge/Version-5.47.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 2.9.6](https://img.shields.io/badge/AppVersion-2.9.6-informational?style=flat-square) +![Version: 5.47.2](https://img.shields.io/badge/Version-5.47.2-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 2.9.6](https://img.shields.io/badge/AppVersion-2.9.6-informational?style=flat-square) Helm chart for Grafana Loki in simple, scalable mode diff --git a/production/helm/loki/templates/monitoring/pod-logs.yaml b/production/helm/loki/templates/monitoring/pod-logs.yaml index 3fc95c44e22b6..317339d7668f9 100644 --- a/production/helm/loki/templates/monitoring/pod-logs.yaml +++ b/production/helm/loki/templates/monitoring/pod-logs.yaml @@ -18,6 +18,9 @@ metadata: spec: pipelineStages: - cri: { } + {{- with .additionalPipelineStages }} + {{- toYaml . | nindent 4 }} + {{- end }} relabelings: - action: replace sourceLabels: diff --git a/production/helm/loki/values.yaml b/production/helm/loki/values.yaml index 04d2660917299..70d853bca2ce8 100644 --- a/production/helm/loki/values.yaml +++ b/production/helm/loki/values.yaml @@ -691,6 +691,9 @@ monitoring: # -- PodLogs relabel configs to apply to samples before scraping # https://github.com/prometheus-operator/prometheus-operator/blob/master/Documentation/api.md#relabelconfig relabelings: [] + # -- Additional pipeline stages to process logs after scraping + # https://grafana.com/docs/agent/latest/operator/api/#pipelinestagespec-a-namemonitoringgrafanacomv1alpha1pipelinestagespeca + additionalPipelineStages: [] # LogsInstance configuration logsInstance: # -- LogsInstance annotations From ca190a3a7dbe58877b6afa903ce068b41a53f874 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 28 Mar 2024 15:20:57 +0100 Subject: [PATCH 29/54] feat: Allows to wrap distributor Tee (#12391) --- pkg/distributor/tee.go | 21 ++++++++++++++++ pkg/distributor/tee_test.go | 48 +++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 pkg/distributor/tee_test.go diff --git a/pkg/distributor/tee.go b/pkg/distributor/tee.go index 460f9622b2ea2..04acb1e22c0df 100644 --- a/pkg/distributor/tee.go +++ b/pkg/distributor/tee.go @@ -4,3 +4,24 @@ package distributor type Tee interface { Duplicate(tenant string, streams []KeyedStream) } + +// WrapTee wraps a new Tee around an existing Tee. +func WrapTee(existing, new Tee) Tee { + if existing == nil { + return new + } + if multi, ok := existing.(*multiTee); ok { + return &multiTee{append(multi.tees, new)} + } + return &multiTee{tees: []Tee{existing, new}} +} + +type multiTee struct { + tees []Tee +} + +func (m *multiTee) Duplicate(tenant string, streams []KeyedStream) { + for _, tee := range m.tees { + tee.Duplicate(tenant, streams) + } +} diff --git a/pkg/distributor/tee_test.go b/pkg/distributor/tee_test.go new file mode 100644 index 0000000000000..f953e09b75111 --- /dev/null +++ b/pkg/distributor/tee_test.go @@ -0,0 +1,48 @@ +package distributor + +import ( + "testing" + + "github.com/stretchr/testify/mock" + + "github.com/grafana/loki/pkg/push" +) + +type mockedTee struct { + mock.Mock +} + +func (m *mockedTee) Duplicate(tenant string, streams []KeyedStream) { + m.Called(tenant, streams) +} + +func TestWrapTee(t *testing.T) { + tee1 := new(mockedTee) + tee2 := new(mockedTee) + tee3 := new(mockedTee) + streams := []KeyedStream{ + { + HashKey: 1, + Stream: push.Stream{}, + }, + } + tee1.On("Duplicate", "1", streams).Once() + tee1.On("Duplicate", "2", streams).Once() + tee2.On("Duplicate", "2", streams).Once() + tee1.On("Duplicate", "3", streams).Once() + tee2.On("Duplicate", "3", streams).Once() + tee3.On("Duplicate", "3", streams).Once() + + wrappedTee := WrapTee(nil, tee1) + wrappedTee.Duplicate("1", streams) + + wrappedTee = WrapTee(wrappedTee, tee2) + wrappedTee.Duplicate("2", streams) + + wrappedTee = WrapTee(wrappedTee, tee3) + wrappedTee.Duplicate("3", streams) + + tee1.AssertExpectations(t) + tee2.AssertExpectations(t) + tee3.AssertExpectations(t) +} From 3202e347296b049666506a5a5c4d1036b720584f Mon Sep 17 00:00:00 2001 From: crazeteam <164632007+crazeteam@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:08:54 +0800 Subject: [PATCH 30/54] chore: fix misorganized annotations (#12351) Signed-off-by: crazeteam --- pkg/util/validation/limits.go | 2 +- pkg/validation/limits.go | 4 ++-- tools/querytee/response_comparator.go | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg/util/validation/limits.go b/pkg/util/validation/limits.go index fdbd31fa8bbe9..e5e80177c64d0 100644 --- a/pkg/util/validation/limits.go +++ b/pkg/util/validation/limits.go @@ -69,7 +69,7 @@ func MaxDurationPerTenant(tenantIDs []string, f func(string) time.Duration) time return result } -// MaxDurationOrDisabledPerTenant is returning the maximum duration per tenant or zero if one tenant has time.Duration(0). +// MaxDurationOrZeroPerTenant is returning the maximum duration per tenant or zero if one tenant has time.Duration(0). func MaxDurationOrZeroPerTenant(tenantIDs []string, f func(string) time.Duration) time.Duration { var result *time.Duration for _, tenantID := range tenantIDs { diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index e8f2ab2d994b3..757f88d25c608 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -575,7 +575,7 @@ func (o *Overrides) MaxQueryLength(_ context.Context, userID string) time.Durati // so nooping in Loki until then. func (o *Overrides) MaxChunksPerQueryFromStore(_ string) int { return 0 } -// MaxQueryLength returns the limit of the series of metric queries. +// MaxQuerySeries returns the limit of the series of metric queries. func (o *Overrides) MaxQuerySeries(_ context.Context, userID string) int { return o.getOverridesForUser(userID).MaxQuerySeries } @@ -688,7 +688,7 @@ func (o *Overrides) MaxLineSize(userID string) int { return o.getOverridesForUser(userID).MaxLineSize.Val() } -// MaxLineSizeShouldTruncate returns whether lines longer than max should be truncated. +// MaxLineSizeTruncate returns whether lines longer than max should be truncated. func (o *Overrides) MaxLineSizeTruncate(userID string) bool { return o.getOverridesForUser(userID).MaxLineSizeTruncate } diff --git a/tools/querytee/response_comparator.go b/tools/querytee/response_comparator.go index ae4db40185aee..a6cb8544fa081 100644 --- a/tools/querytee/response_comparator.go +++ b/tools/querytee/response_comparator.go @@ -50,7 +50,7 @@ type SamplesComparator struct { sampleTypesComparator map[string]SamplesComparatorFunc } -// RegisterSamplesComparator helps with registering custom sample types +// RegisterSamplesType helps with registering custom sample types func (s *SamplesComparator) RegisterSamplesType(samplesType string, comparator SamplesComparatorFunc) { s.sampleTypesComparator[samplesType] = comparator } From 523b5dea5b50d332a6a6f9782f2105eb965436fd Mon Sep 17 00:00:00 2001 From: Benoit Arnaud Date: Thu, 28 Mar 2024 20:32:11 +0100 Subject: [PATCH 31/54] docs: Update _index.md (#10773) Co-authored-by: Michel Hollands <42814411+MichelHollands@users.noreply.github.com> Co-authored-by: J Stickler --- .../setup/install/helm/install-monolithic/_index.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/sources/setup/install/helm/install-monolithic/_index.md b/docs/sources/setup/install/helm/install-monolithic/_index.md index 01a3d6d357e10..e85d6a52159b5 100644 --- a/docs/sources/setup/install/helm/install-monolithic/_index.md +++ b/docs/sources/setup/install/helm/install-monolithic/_index.md @@ -60,10 +60,17 @@ If you set the `singleBinary.replicas` value to 2 or more, this chart configures ruler: loki-ruler admin: loki-admin type: 's3' + bucketNames: + chunks: loki-chunks + ruler: loki-ruler + admin: loki-admin s3: endpoint: foo.aws.com + region: secretAccessKey: supersecret accessKeyId: secret + s3ForcePathStyle: false + insecure: false singleBinary: replicas: 3 ``` From 3922d3864a8a2af09df236bad34e6bbb053181ea Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Thu, 28 Mar 2024 17:16:42 -0700 Subject: [PATCH 32/54] fix(tsdb): correctly use bit prefix calculation in tsdb shard matching (#12394) --- pkg/logql/shards.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/logql/shards.go b/pkg/logql/shards.go index 7ca7f67cb367d..7d35cea26d761 100644 --- a/pkg/logql/shards.go +++ b/pkg/logql/shards.go @@ -159,7 +159,7 @@ func (s *Shard) Match(fp model.Fingerprint) bool { return v1.BoundsFromProto(s.Bounded.Bounds).Match(fp) } - return s.PowerOfTwo.Match(fp) + return s.PowerOfTwo.TSDB().Match(fp) } func (s *Shard) GetFromThrough() (model.Fingerprint, model.Fingerprint) { From 6e1547fe62403132d1453519a4750ce6746c86e8 Mon Sep 17 00:00:00 2001 From: Owen Diehl Date: Thu, 28 Mar 2024 17:59:15 -0700 Subject: [PATCH 33/54] fix(blooms): logql shard embeds tsdb (#12396) --- pkg/ingester/index/bitprefix.go | 4 +- pkg/ingester/index/bitprefix_test.go | 39 +++++----- pkg/ingester/index/index.go | 12 +-- pkg/ingester/index/index_test.go | 30 +++---- pkg/ingester/index/multi_test.go | 5 +- pkg/logql/downstream_test.go | 8 +- pkg/logql/shardmapper.go | 8 +- pkg/logql/shardmapper_test.go | 86 ++++++++++----------- pkg/logql/shards.go | 14 ++-- pkg/logql/shards_test.go | 12 +-- pkg/logql/test_utils.go | 8 +- pkg/querier/queryrange/downstreamer_test.go | 8 +- 12 files changed, 117 insertions(+), 117 deletions(-) diff --git a/pkg/ingester/index/bitprefix.go b/pkg/ingester/index/bitprefix.go index 524bd56a69858..fe24a885917bc 100644 --- a/pkg/ingester/index/bitprefix.go +++ b/pkg/ingester/index/bitprefix.go @@ -69,7 +69,7 @@ func (ii *BitPrefixInvertedIndex) getShards(shard *logql.Shard) ([]*indexShard, switch shard.Variant() { case logql.PowerOfTwoVersion: - if shard.PowerOfTwo.Of <= len(ii.shards) { + if int(shard.PowerOfTwo.Of) <= len(ii.shards) { filter = false } } @@ -114,7 +114,7 @@ func (ii *BitPrefixInvertedIndex) validateShard(shard *logql.Shard) error { switch shard.Variant() { case logql.PowerOfTwoVersion: - return shard.PowerOfTwo.TSDB().Validate() + return shard.PowerOfTwo.Validate() } return nil diff --git a/pkg/ingester/index/bitprefix_test.go b/pkg/ingester/index/bitprefix_test.go index 4c67ac4c5e32c..fbb297bd9c265 100644 --- a/pkg/ingester/index/bitprefix_test.go +++ b/pkg/ingester/index/bitprefix_test.go @@ -12,7 +12,6 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) @@ -24,20 +23,20 @@ func Test_BitPrefixGetShards(t *testing.T) { expected []uint32 }{ // equal factors - {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, - {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{4}}, - {16, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{15}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{4}}, + {16, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{15}}, // idx factor a larger factor of 2 - {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0, 1}}, - {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{8, 9}}, - {32, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{30, 31}}, - {64, false, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{60, 61, 62, 63}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0, 1}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{8, 9}}, + {32, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{30, 31}}, + {64, false, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{60, 61, 62, 63}}, // // idx factor a smaller factor of 2 - {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, - {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{2}}, - {8, true, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{7}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 16}).Ptr(), []uint32{0}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 4, Of: 16}).Ptr(), []uint32{2}}, + {8, true, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 15, Of: 16}).Ptr(), []uint32{7}}, } { tt := tt t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { @@ -151,8 +150,8 @@ func Test_BitPrefixGetShards_Bounded(t *testing.T) { func Test_BitPrefixValidateShards(t *testing.T) { ii, err := NewBitPrefixWithShards(32) require.Nil(t, err) - require.NoError(t, ii.validateShard(logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 16}).Ptr())) - require.Error(t, ii.validateShard(logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 15}).Ptr())) + require.NoError(t, ii.validateShard(logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 16}).Ptr())) + require.Error(t, ii.validateShard(logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 15}).Ptr())) } func Test_BitPrefixCreation(t *testing.T) { @@ -212,9 +211,9 @@ func Test_BitPrefix_hash_mapping(t *testing.T) { []*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ - Shard: int(expShard), - Of: requestedFactor, + logql.NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: expShard, + Of: uint32(requestedFactor), }).Ptr(), ) require.NoError(t, err) @@ -243,7 +242,7 @@ func Test_BitPrefixNoMatcherLookup(t *testing.T) { require.Nil(t, err) expShard := uint32(fp >> (64 - index.NewShard(0, 16).RequiredBits())) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), fp) - ids, err = ii.Lookup(nil, logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(expShard), Of: 16}).Ptr()) + ids, err = ii.Lookup(nil, logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: expShard, Of: 16}).Ptr()) require.Nil(t, err) require.Equal(t, fp, ids[0]) } @@ -265,9 +264,9 @@ func Test_BitPrefixConsistentMapping(t *testing.T) { b.Add(logproto.FromLabelsToLabelAdapters(lbs), fp) } - shardMax := 8 - for i := 0; i < shardMax; i++ { - shard := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shardMax := uint32(8) + for i := uint32(0); i < shardMax; i++ { + shard := logql.NewPowerOfTwoShard(index.ShardAnnotation{ Shard: i, Of: shardMax, }).Ptr() diff --git a/pkg/ingester/index/index.go b/pkg/ingester/index/index.go index 064c0ddc45ba7..6536cc7f7c449 100644 --- a/pkg/ingester/index/index.go +++ b/pkg/ingester/index/index.go @@ -19,8 +19,8 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/stores/series" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const DefaultIndexShards = 32 @@ -56,15 +56,15 @@ func NewWithShards(totalShards uint32) *InvertedIndex { } } -func (ii *InvertedIndex) getShards(shard *astmapper.ShardAnnotation) []*indexShard { +func (ii *InvertedIndex) getShards(shard *index.ShardAnnotation) []*indexShard { if shard == nil { return ii.shards } - totalRequested := int(ii.totalShards) / shard.Of + totalRequested := ii.totalShards / shard.Of result := make([]*indexShard, totalRequested) var j int - for i := 0; i < totalRequested; i++ { + for i := uint32(0); i < totalRequested; i++ { subShard := ((shard.Shard) + (i * shard.Of)) result[j] = ii.shards[subShard] j++ @@ -72,7 +72,7 @@ func (ii *InvertedIndex) getShards(shard *astmapper.ShardAnnotation) []*indexSha return result } -func (ii *InvertedIndex) validateShard(shard *logql.Shard) (*astmapper.ShardAnnotation, error) { +func (ii *InvertedIndex) validateShard(shard *logql.Shard) (*index.ShardAnnotation, error) { if shard == nil { return nil, nil } @@ -82,7 +82,7 @@ func (ii *InvertedIndex) validateShard(shard *logql.Shard) (*astmapper.ShardAnno return nil, errors.New("inverted index only supports shard annotations with `PowerOfTwo`") } - if int(ii.totalShards)%s.Of != 0 || uint32(s.Of) > ii.totalShards { + if ii.totalShards%s.Of != 0 || s.Of > ii.totalShards { return nil, fmt.Errorf("%w index_shard:%d query_shard:%v", ErrInvalidShardQuery, ii.totalShards, s) } return s, nil diff --git a/pkg/ingester/index/index_test.go b/pkg/ingester/index/index_test.go index 06625a357970d..f34633c0c6b54 100644 --- a/pkg/ingester/index/index_test.go +++ b/pkg/ingester/index/index_test.go @@ -11,26 +11,26 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" "github.com/grafana/loki/pkg/util" ) func Test_GetShards(t *testing.T) { for _, tt := range []struct { total uint32 - shard *astmapper.ShardAnnotation + shard *index.ShardAnnotation expected []uint32 }{ // equal factors - {16, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, - {16, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, - {16, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, + {16, &index.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0}}, + {16, &index.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4}}, + {16, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15}}, // idx factor a larger multiple of schema factor - {32, &astmapper.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 16}}, - {32, &astmapper.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4, 20}}, - {32, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31}}, - {64, &astmapper.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31, 47, 63}}, + {32, &index.ShardAnnotation{Shard: 0, Of: 16}, []uint32{0, 16}}, + {32, &index.ShardAnnotation{Shard: 4, Of: 16}, []uint32{4, 20}}, + {32, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31}}, + {64, &index.ShardAnnotation{Shard: 15, Of: 16}, []uint32{15, 31, 47, 63}}, } { tt := tt t.Run(tt.shard.String()+fmt.Sprintf("_total_%d", tt.total), func(t *testing.T) { @@ -48,7 +48,7 @@ func Test_GetShards(t *testing.T) { func Test_ValidateShards(t *testing.T) { ii := NewWithShards(32) _, err := ii.validateShard( - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 16}).Ptr(), + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 16}).Ptr(), ) require.NoError(t, err) } @@ -112,7 +112,7 @@ func Test_hash_mapping(t *testing.T) { ii := NewWithShards(shard) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - x := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: labelsSeriesIDHash(lbs) % 16, Of: 16}) res, err := ii.Lookup([]*labels.Matcher{{Type: labels.MatchEqual, Name: "compose_project", Value: "loki-tsdb-storage-s3"}}, &x) require.NoError(t, err) require.Len(t, res, 1) @@ -136,7 +136,7 @@ func Test_NoMatcherLookup(t *testing.T) { // with shard param ii = NewWithShards(16) ii.Add(logproto.FromLabelsToLabelAdapters(lbs), 1) - x := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(labelsSeriesIDHash(lbs) % 16), Of: 16}) + x := logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: labelsSeriesIDHash(lbs) % 16, Of: 16}) ids, err = ii.Lookup(nil, &x) require.Nil(t, err) require.Equal(t, model.Fingerprint(1), ids[0]) @@ -157,9 +157,9 @@ func Test_ConsistentMapping(t *testing.T) { shardMax := 8 for i := 0; i < shardMax; i++ { - shard := logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{ - Shard: i, - Of: shardMax, + shard := logql.NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: uint32(i), + Of: uint32(shardMax), }).Ptr() aIDs, err := a.Lookup([]*labels.Matcher{ diff --git a/pkg/ingester/index/multi_test.go b/pkg/ingester/index/multi_test.go index 50d5db945edb8..6be07effea8d0 100644 --- a/pkg/ingester/index/multi_test.go +++ b/pkg/ingester/index/multi_test.go @@ -11,7 +11,6 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/storage/config" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) @@ -131,7 +130,7 @@ func TestMultiIndex(t *testing.T) { labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, logql.NewPowerOfTwoShard( - astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}, + index.ShardAnnotation{Shard: expShard, Of: factor}, ).Ptr(), ) @@ -147,7 +146,7 @@ func TestMultiIndex(t *testing.T) { []*labels.Matcher{ labels.MustNewMatcher(labels.MatchEqual, "foo", "foo"), }, - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: int(expShard), Of: int(factor)}).Ptr(), + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: expShard, Of: factor}).Ptr(), ) require.Nil(t, err) diff --git a/pkg/logql/downstream_test.go b/pkg/logql/downstream_test.go index 68afe83cceead..9dbf261668a40 100644 --- a/pkg/logql/downstream_test.go +++ b/pkg/logql/downstream_test.go @@ -15,7 +15,7 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var nilShardMetrics = NewShardMapperMetrics(nil) @@ -624,7 +624,7 @@ func TestFormat_ShardedExpr(t *testing.T) { name: "ConcatSampleExpr", in: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 3, }).Ptr(), @@ -640,7 +640,7 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 3, }).Ptr(), @@ -656,7 +656,7 @@ func TestFormat_ShardedExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 3, }).Ptr(), diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index a1c17c86da036..3095fc0a1aafd 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" util_log "github.com/grafana/loki/pkg/util/log" ) @@ -464,9 +464,9 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *syntax.RangeAggregationExpr, downstreams := make([]DownstreamSampleExpr, 0, shards) expr.Operation = syntax.OpRangeTypeQuantileSketch for shard := shards - 1; shard >= 0; shard-- { - s := NewPowerOfTwoShard(astmapper.ShardAnnotation{ - Shard: shard, - Of: shards, + s := NewPowerOfTwoShard(index.ShardAnnotation{ + Shard: uint32(shard), + Of: uint32(shards), }) downstreams = append(downstreams, DownstreamSampleExpr{ shard: &s, diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index c6b8e9c4b34cc..9f5757b7d8eed 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestShardedStringer(t *testing.T) { @@ -21,7 +21,7 @@ func TestShardedStringer(t *testing.T) { { in: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -31,7 +31,7 @@ func TestShardedStringer(t *testing.T) { }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -72,7 +72,7 @@ func TestMapSampleExpr(t *testing.T) { }, out: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -88,7 +88,7 @@ func TestMapSampleExpr(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -493,7 +493,7 @@ func TestMapping(t *testing.T) { in: `{foo="bar"}`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -503,7 +503,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -519,7 +519,7 @@ func TestMapping(t *testing.T) { in: `{foo="bar"} |= "error"`, expr: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -540,7 +540,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatLogSelectorExpr{ DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -567,7 +567,7 @@ func TestMapping(t *testing.T) { in: `rate({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -583,7 +583,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -605,7 +605,7 @@ func TestMapping(t *testing.T) { in: `count_over_time({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -621,7 +621,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -646,7 +646,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -666,7 +666,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -697,7 +697,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeTopK, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -713,7 +713,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -739,7 +739,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -759,7 +759,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -791,7 +791,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -811,7 +811,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -838,7 +838,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -858,7 +858,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -898,7 +898,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -920,7 +920,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -960,7 +960,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -980,7 +980,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1015,7 +1015,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1035,7 +1035,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1077,7 +1077,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1099,7 +1099,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1129,7 +1129,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1149,7 +1149,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1191,7 +1191,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1213,7 +1213,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1242,7 +1242,7 @@ func TestMapping(t *testing.T) { Grouping: &syntax.Grouping{}, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1262,7 +1262,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1297,7 +1297,7 @@ func TestMapping(t *testing.T) { Operation: syntax.OpTypeSum, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1322,7 +1322,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1356,7 +1356,7 @@ func TestMapping(t *testing.T) { }, Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1378,7 +1378,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), @@ -1467,7 +1467,7 @@ func TestMapping(t *testing.T) { RHS: &syntax.VectorAggregationExpr{ Left: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 0, Of: 2, }).Ptr(), @@ -1490,7 +1490,7 @@ func TestMapping(t *testing.T) { }, next: &ConcatSampleExpr{ DownstreamSampleExpr: DownstreamSampleExpr{ - shard: NewPowerOfTwoShard(astmapper.ShardAnnotation{ + shard: NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }).Ptr(), diff --git a/pkg/logql/shards.go b/pkg/logql/shards.go index 7d35cea26d761..9265dac5f0e8a 100644 --- a/pkg/logql/shards.go +++ b/pkg/logql/shards.go @@ -12,6 +12,7 @@ import ( "github.com/grafana/loki/pkg/querier/astmapper" v1 "github.com/grafana/loki/pkg/storage/bloom/v1" "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) @@ -133,7 +134,7 @@ func (s PowerOfTwoStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { res := make(Shards, 0, factor) for i := 0; i < factor; i++ { - res = append(res, NewPowerOfTwoShard(astmapper.ShardAnnotation{Of: factor, Shard: i})) + res = append(res, NewPowerOfTwoShard(index.ShardAnnotation{Of: uint32(factor), Shard: uint32(i)})) } return res, bytesPerShard, nil } @@ -141,7 +142,7 @@ func (s PowerOfTwoStrategy) Shards(expr syntax.Expr) (Shards, uint64, error) { // Shard represents a shard annotation // It holds either a power of two shard (legacy) or a bounded shard type Shard struct { - PowerOfTwo *astmapper.ShardAnnotation + PowerOfTwo *index.ShardAnnotation Bounded *logproto.Shard } @@ -159,7 +160,7 @@ func (s *Shard) Match(fp model.Fingerprint) bool { return v1.BoundsFromProto(s.Bounded.Bounds).Match(fp) } - return s.PowerOfTwo.TSDB().Match(fp) + return s.PowerOfTwo.Match(fp) } func (s *Shard) GetFromThrough() (model.Fingerprint, model.Fingerprint) { @@ -167,7 +168,7 @@ func (s *Shard) GetFromThrough() (model.Fingerprint, model.Fingerprint) { return v1.BoundsFromProto(s.Bounded.Bounds).GetFromThrough() } - return s.PowerOfTwo.TSDB().GetFromThrough() + return s.PowerOfTwo.GetFromThrough() } // convenience method for unaddressability concerns using constructors in literals (tests) @@ -179,7 +180,7 @@ func NewBoundedShard(shard logproto.Shard) Shard { return Shard{Bounded: &shard} } -func NewPowerOfTwoShard(shard astmapper.ShardAnnotation) Shard { +func NewPowerOfTwoShard(shard index.ShardAnnotation) Shard { return Shard{PowerOfTwo: &shard} } @@ -236,8 +237,9 @@ func ParseShard(s string) (Shard, ShardVersion, error) { } old, v1Err := astmapper.ParseShard(s) + casted := old.TSDB() if v1Err == nil { - return Shard{PowerOfTwo: &old}, PowerOfTwoVersion, nil + return Shard{PowerOfTwo: &casted}, PowerOfTwoVersion, nil } err := errors.Wrap( diff --git a/pkg/logql/shards_test.go b/pkg/logql/shards_test.go index fd0adb35f881f..1a2d78889cc5a 100644 --- a/pkg/logql/shards_test.go +++ b/pkg/logql/shards_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestShardString(t *testing.T) { @@ -17,7 +17,7 @@ func TestShardString(t *testing.T) { }{ { shard: Shard{ - PowerOfTwo: &astmapper.ShardAnnotation{ + PowerOfTwo: &index.ShardAnnotation{ Shard: 1, Of: 2, }, @@ -62,7 +62,7 @@ func TestShardString(t *testing.T) { Max: 2, }, }, - PowerOfTwo: &astmapper.ShardAnnotation{ + PowerOfTwo: &index.ShardAnnotation{ Shard: 1, Of: 2, }, @@ -86,7 +86,7 @@ func TestParseShard(t *testing.T) { str: "1_of_2", version: PowerOfTwoVersion, exp: Shard{ - PowerOfTwo: &astmapper.ShardAnnotation{ + PowerOfTwo: &index.ShardAnnotation{ Shard: 1, Of: 2, }, @@ -140,11 +140,11 @@ func TestParseShards(t *testing.T) { strs: []string{"1_of_2", "1_of_2"}, version: PowerOfTwoVersion, exp: Shards{ - NewPowerOfTwoShard(astmapper.ShardAnnotation{ + NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }), - NewPowerOfTwoShard(astmapper.ShardAnnotation{ + NewPowerOfTwoShard(index.ShardAnnotation{ Shard: 1, Of: 2, }), diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 7f41e45be60d2..8154b18fb691a 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -18,7 +18,7 @@ import ( "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/log" "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier { @@ -34,7 +34,7 @@ type MockQuerier struct { streams []logproto.Stream } -func (q MockQuerier) extractOldShard(xs []string) (*astmapper.ShardAnnotation, error) { +func (q MockQuerier) extractOldShard(xs []string) (*index.ShardAnnotation, error) { parsed, version, err := ParseShards(xs) if err != nil { return nil, err @@ -60,7 +60,7 @@ func (q MockQuerier) SelectLogs(_ context.Context, req SelectLogParams) (iter.En matchers := expr.Matchers() - var shard *astmapper.ShardAnnotation + var shard *index.ShardAnnotation if len(req.Shards) > 0 { shard, err = q.extractOldShard(req.Shards) if err != nil { @@ -185,7 +185,7 @@ func (q MockQuerier) SelectSamples(_ context.Context, req SelectSampleParams) (i matchers := selector.Matchers() - var shard *astmapper.ShardAnnotation + var shard *index.ShardAnnotation if len(req.Shards) > 0 { shard, err = q.extractOldShard(req.Shards) if err != nil { diff --git a/pkg/querier/queryrange/downstreamer_test.go b/pkg/querier/queryrange/downstreamer_test.go index 8a305176b6870..95b79d72d30af 100644 --- a/pkg/querier/queryrange/downstreamer_test.go +++ b/pkg/querier/queryrange/downstreamer_test.go @@ -22,8 +22,8 @@ import ( "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/logqlmodel" "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func testSampleStreams() []queryrangebase.SampleStream { @@ -291,7 +291,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 2}), + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 2}), }.Encode(), }, }, @@ -299,7 +299,7 @@ func TestInstanceFor(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: newParams(), ShardsOverride: logql.Shards{ - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 1, Of: 2}), + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 1, Of: 2}), }.Encode(), }, }, @@ -366,7 +366,7 @@ func TestInstanceDownstream(t *testing.T) { Params: logql.ParamsWithShardsOverride{ Params: logql.ParamsWithExpressionOverride{Params: params, ExpressionOverride: expr}, ShardsOverride: logql.Shards{ - logql.NewPowerOfTwoShard(astmapper.ShardAnnotation{Shard: 0, Of: 2}), + logql.NewPowerOfTwoShard(index.ShardAnnotation{Shard: 0, Of: 2}), }.Encode(), }, }, From 288c0061bf76da4ae753797fcf7ac3093ad39459 Mon Sep 17 00:00:00 2001 From: Sandeep Sukhani Date: Fri, 29 Mar 2024 11:10:13 +0530 Subject: [PATCH 34/54] feat: discovery of name of services emitting the logs (#12392) --- CHANGELOG.md | 1 + docs/sources/configure/_index.md | 6 + integration/cluster/cluster.go | 1 + pkg/distributor/distributor.go | 25 ++++- pkg/distributor/distributor_test.go | 163 +++++++++++++++++++++++++++- pkg/distributor/limits.go | 1 + pkg/distributor/validator.go | 2 + pkg/validation/limits.go | 18 +++ pkg/validation/limits_test.go | 9 +- 9 files changed, 219 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ca0216ae345e5..fdc5c9c29ed06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,6 +62,7 @@ * [11897](https://github.com/grafana/loki/pull/11897) **ashwanthgoli** Metadata: Introduces a separate split interval of `split_recent_metadata_queries_by_interval` for `recent_metadata_query_window` to help with caching recent metadata query results. * [11970](https://github.com/grafana/loki/pull/11897) **masslessparticle** Ksonnet: Introduces memory limits to the compactor configuration to avoid unbounded memory usage. * [12318](https://github.com/grafana/loki/pull/12318) **DylanGuedes** Memcached: Add mTLS support. +* [12392](https://github.com/grafana/loki/pull/12392) **sandeepsukhani** Detect name of service emitting logs and add it as a label. ##### Fixes * [11074](https://github.com/grafana/loki/pull/11074) **hainenber** Fix panic in lambda-promtail due to mishandling of empty DROP_LABELS env var. diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 6a4fd280c0a59..dc0716a34fa23 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2824,6 +2824,12 @@ The `limits_config` block configures global and per-tenant limits in Loki. # CLI flag: -validation.increment-duplicate-timestamps [increment_duplicate_timestamp: | default = false] +# If no service_name label exists, Loki maps a single label from the configured +# list to service_name. If none of the configured labels exist in the stream, +# label is set to unknown_service. Empty list disables setting the label. +# CLI flag: -validation.discover-service-name +[discover_service_name: | default = [service app application name app_kubernetes_io_name container container_name component workload job]] + # Maximum number of active streams per user, per ingester. 0 to disable. # CLI flag: -ingester.max-streams-per-user [max_streams_per_user: | default = 0] diff --git a/integration/cluster/cluster.go b/integration/cluster/cluster.go index c7a0ba2d17dd7..5e29413a68c62 100644 --- a/integration/cluster/cluster.go +++ b/integration/cluster/cluster.go @@ -62,6 +62,7 @@ limits_config: ingestion_burst_size_mb: 50 reject_old_samples: false allow_structured_metadata: true + discover_service_name: otlp_config: resource_attributes: attributes_config: diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 818c0fe735ae8..9b34913d42a19 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -54,6 +54,9 @@ const ( ringKey = "distributor" ringAutoForgetUnhealthyPeriods = 2 + + labelServiceName = "service_name" + serviceUnknown = "unknown_service" ) var ( @@ -348,7 +351,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log d.truncateLines(validationContext, &stream) var lbs labels.Labels - lbs, stream.Labels, stream.Hash, err = d.parseStreamLabels(validationContext, stream.Labels, &stream) + lbs, stream.Labels, stream.Hash, err = d.parseStreamLabels(validationContext, stream.Labels, stream) if err != nil { d.writeFailuresManager.Log(tenantID, err) validationErrors.Add(err) @@ -425,7 +428,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log if d.usageTracker != nil { for _, stream := range req.Streams { - lbs, _, _, err := d.parseStreamLabels(validationContext, stream.Labels, &stream) + lbs, _, _, err := d.parseStreamLabels(validationContext, stream.Labels, stream) if err != nil { continue } @@ -717,7 +720,7 @@ type labelData struct { hash uint64 } -func (d *Distributor) parseStreamLabels(vContext validationContext, key string, stream *logproto.Stream) (labels.Labels, string, uint64, error) { +func (d *Distributor) parseStreamLabels(vContext validationContext, key string, stream logproto.Stream) (labels.Labels, string, uint64, error) { if val, ok := d.labelCache.Get(key); ok { labelVal := val.(labelData) return labelVal.ls, labelVal.ls.String(), labelVal.hash, nil @@ -728,10 +731,24 @@ func (d *Distributor) parseStreamLabels(vContext validationContext, key string, return nil, "", 0, fmt.Errorf(validation.InvalidLabelsErrorMsg, key, err) } - if err := d.validator.ValidateLabels(vContext, ls, *stream); err != nil { + if err := d.validator.ValidateLabels(vContext, ls, stream); err != nil { return nil, "", 0, err } + // We do not want to count service_name added by us in the stream limit so adding it after validating original labels. + if !ls.Has(labelServiceName) && len(vContext.discoverServiceName) > 0 { + serviceName := serviceUnknown + for _, labelName := range vContext.discoverServiceName { + if labelVal := ls.Get(labelName); labelVal != "" { + serviceName = labelVal + break + } + } + + ls = labels.NewBuilder(ls).Set(labelServiceName, serviceName).Labels() + stream.Labels = ls.String() + } + lsHash := ls.Hash() d.labelCache.Add(key, labelData{ls, lsHash}) diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index 04747ffb72334..81a7fb09b94a5 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -23,6 +23,7 @@ import ( "github.com/grafana/dskit/user" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/grpc" @@ -98,6 +99,7 @@ func TestDistributor(t *testing.T) { t.Run(fmt.Sprintf("[%d](lines=%v)", i, tc.lines), func(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil limits.IngestionRateMB = ingestionRateLimit limits.IngestionBurstSizeMB = ingestionRateLimit limits.MaxLineSize = fe.ByteSize(tc.maxLineSize) @@ -134,13 +136,19 @@ func TestDistributor(t *testing.T) { func Test_IncrementTimestamp(t *testing.T) { incrementingDisabled := &validation.Limits{} flagext.DefaultValues(incrementingDisabled) + incrementingDisabled.DiscoverServiceName = nil incrementingDisabled.RejectOldSamples = false incrementingEnabled := &validation.Limits{} flagext.DefaultValues(incrementingEnabled) + incrementingEnabled.DiscoverServiceName = nil incrementingEnabled.RejectOldSamples = false incrementingEnabled.IncrementDuplicateTimestamp = true + defaultLimits := &validation.Limits{} + flagext.DefaultValues(defaultLimits) + now := time.Now() + tests := map[string]struct { limits *validation.Limits push *logproto.PushRequest @@ -386,6 +394,34 @@ func Test_IncrementTimestamp(t *testing.T) { }, }, }, + "default limit adding service_name label": { + limits: defaultLimits, + push: &logproto.PushRequest{ + Streams: []logproto.Stream{ + { + Labels: "{job=\"foo\"}", + Entries: []logproto.Entry{ + {Timestamp: now.Add(-2 * time.Second), Line: "hey1"}, + {Timestamp: now.Add(-time.Second), Line: "hey2"}, + {Timestamp: now, Line: "hey3"}, + }, + }, + }, + }, + expectedPush: &logproto.PushRequest{ + Streams: []logproto.Stream{ + { + Labels: "{job=\"foo\", service_name=\"foo\"}", + Hash: 0x86ca305b6d86e8b0, + Entries: []logproto.Entry{ + {Timestamp: now.Add(-2 * time.Second), Line: "hey1"}, + {Timestamp: now.Add(-time.Second), Line: "hey2"}, + {Timestamp: now, Line: "hey3"}, + }, + }, + }, + }, + }, } for testName, testData := range tests { @@ -405,6 +441,7 @@ func Test_IncrementTimestamp(t *testing.T) { func TestDistributorPushConcurrently(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil distributors, ingesters := prepare(t, 1, 5, limits, nil) @@ -497,6 +534,7 @@ func TestDistributorPushErrors(t *testing.T) { func Test_SortLabelsOnPush(t *testing.T) { limits := &validation.Limits{} flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil ingester := &mockIngester{} distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) @@ -788,13 +826,136 @@ func Benchmark_SortLabelsOnPush(b *testing.B) { for n := 0; n < b.N; n++ { stream := request.Streams[0] stream.Labels = `{buzz="f", a="b"}` - _, _, _, err := d.parseStreamLabels(vCtx, stream.Labels, &stream) + _, _, _, err := d.parseStreamLabels(vCtx, stream.Labels, stream) if err != nil { panic("parseStreamLabels fail,err:" + err.Error()) } } } +func TestParseStreamLabels(t *testing.T) { + defaultLimit := &validation.Limits{} + flagext.DefaultValues(defaultLimit) + + for _, tc := range []struct { + name string + origLabels string + expectedLabels labels.Labels + expectedErr error + generateLimits func() *validation.Limits + }{ + { + name: "service name label mapping disabled", + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil + return limits + }, + origLabels: `{foo="bar"}`, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + }, + }, + { + name: "no labels defined - service name label mapping disabled", + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.DiscoverServiceName = nil + return limits + }, + origLabels: `{}`, + expectedErr: fmt.Errorf(validation.MissingLabelsErrorMsg), + }, + { + name: "service name label enabled", + origLabels: `{foo="bar"}`, + generateLimits: func() *validation.Limits { + return defaultLimit + }, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + { + Name: labelServiceName, + Value: serviceUnknown, + }, + }, + }, + { + name: "service name label should not get counted against max labels count", + origLabels: `{foo="bar"}`, + generateLimits: func() *validation.Limits { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + limits.MaxLabelNamesPerSeries = 1 + return limits + }, + expectedLabels: labels.Labels{ + { + Name: "foo", + Value: "bar", + }, + { + Name: labelServiceName, + Value: serviceUnknown, + }, + }, + }, + { + name: "use label service as service name", + origLabels: `{container="nginx", foo="bar", service="auth"}`, + generateLimits: func() *validation.Limits { + return defaultLimit + }, + expectedLabels: labels.Labels{ + { + Name: "container", + Value: "nginx", + }, + { + Name: "foo", + Value: "bar", + }, + { + Name: "service", + Value: "auth", + }, + { + Name: labelServiceName, + Value: "auth", + }, + }, + }, + } { + limits := tc.generateLimits() + distributors, _ := prepare(&testing.T{}, 1, 5, limits, nil) + d := distributors[0] + + vCtx := d.validator.getValidationContextForTime(testTime, "123") + + t.Run(tc.name, func(t *testing.T) { + lbs, lbsString, hash, err := d.parseStreamLabels(vCtx, tc.origLabels, logproto.Stream{ + Labels: tc.origLabels, + }) + if tc.expectedErr != nil { + require.Equal(t, tc.expectedErr, err) + return + } + require.NoError(t, err) + require.Equal(t, tc.expectedLabels.String(), lbsString) + require.Equal(t, tc.expectedLabels, lbs) + require.Equal(t, tc.expectedLabels.Hash(), hash) + }) + } +} + func Benchmark_Push(b *testing.B) { limits := &validation.Limits{} flagext.DefaultValues(limits) diff --git a/pkg/distributor/limits.go b/pkg/distributor/limits.go index 6db6995662dd2..d2f655f1c8329 100644 --- a/pkg/distributor/limits.go +++ b/pkg/distributor/limits.go @@ -22,6 +22,7 @@ type Limits interface { RejectOldSamplesMaxAge(userID string) time.Duration IncrementDuplicateTimestamps(userID string) bool + DiscoverServiceName(userID string) []string ShardStreams(userID string) *shardstreams.Config IngestionRateStrategy() string diff --git a/pkg/distributor/validator.go b/pkg/distributor/validator.go index 7e7006c836201..ca2186c1d2626 100644 --- a/pkg/distributor/validator.go +++ b/pkg/distributor/validator.go @@ -43,6 +43,7 @@ type validationContext struct { maxLabelValueLength int incrementDuplicateTimestamps bool + discoverServiceName []string allowStructuredMetadata bool maxStructuredMetadataSize int @@ -63,6 +64,7 @@ func (v Validator) getValidationContextForTime(now time.Time, userID string) val maxLabelNameLength: v.MaxLabelNameLength(userID), maxLabelValueLength: v.MaxLabelValueLength(userID), incrementDuplicateTimestamps: v.IncrementDuplicateTimestamps(userID), + discoverServiceName: v.DiscoverServiceName(userID), allowStructuredMetadata: v.AllowStructuredMetadata(userID), maxStructuredMetadataSize: v.MaxStructuredMetadataSize(userID), maxStructuredMetadataCount: v.MaxStructuredMetadataCount(userID), diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index 757f88d25c608..8c2197113a41f 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -80,6 +80,7 @@ type Limits struct { MaxLineSize flagext.ByteSize `yaml:"max_line_size" json:"max_line_size"` MaxLineSizeTruncate bool `yaml:"max_line_size_truncate" json:"max_line_size_truncate"` IncrementDuplicateTimestamp bool `yaml:"increment_duplicate_timestamp" json:"increment_duplicate_timestamp"` + DiscoverServiceName []string `yaml:"discover_service_name" json:"discover_service_name"` // Ingester enforced limits. MaxLocalStreamsPerUser int `yaml:"max_streams_per_user" json:"max_streams_per_user"` @@ -240,6 +241,19 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { f.IntVar(&l.MaxLabelNamesPerSeries, "validation.max-label-names-per-series", 15, "Maximum number of label names per series.") f.BoolVar(&l.RejectOldSamples, "validation.reject-old-samples", true, "Whether or not old samples will be rejected.") f.BoolVar(&l.IncrementDuplicateTimestamp, "validation.increment-duplicate-timestamps", false, "Alter the log line timestamp during ingestion when the timestamp is the same as the previous entry for the same stream. When enabled, if a log line in a push request has the same timestamp as the previous line for the same stream, one nanosecond is added to the log line. This will preserve the received order of log lines with the exact same timestamp when they are queried, by slightly altering their stored timestamp. NOTE: This is imperfect, because Loki accepts out of order writes, and another push request for the same stream could contain duplicate timestamps to existing entries and they will not be incremented.") + l.DiscoverServiceName = []string{ + "service", + "app", + "application", + "name", + "app_kubernetes_io_name", + "container", + "container_name", + "component", + "workload", + "job", + } + f.Var((*dskit_flagext.StringSlice)(&l.DiscoverServiceName), "validation.discover-service-name", "If no service_name label exists, Loki maps a single label from the configured list to service_name. If none of the configured labels exist in the stream, label is set to unknown_service. Empty list disables setting the label.") _ = l.RejectOldSamplesMaxAge.Set("7d") f.Var(&l.RejectOldSamplesMaxAge, "validation.reject-old-samples.max-age", "Maximum accepted sample age before rejecting.") @@ -897,6 +911,10 @@ func (o *Overrides) IncrementDuplicateTimestamps(userID string) bool { return o.getOverridesForUser(userID).IncrementDuplicateTimestamp } +func (o *Overrides) DiscoverServiceName(userID string) []string { + return o.getOverridesForUser(userID).DiscoverServiceName +} + // VolumeEnabled returns whether volume endpoints are enabled for a user. func (o *Overrides) VolumeEnabled(userID string) bool { return o.getOverridesForUser(userID).VolumeEnabled diff --git a/pkg/validation/limits_test.go b/pkg/validation/limits_test.go index 9096d9b179444..d8527bcdaa59c 100644 --- a/pkg/validation/limits_test.go +++ b/pkg/validation/limits_test.go @@ -215,6 +215,7 @@ ruler_remote_write_headers: `, exp: Limits{ RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"foo": "bar"}}, + DiscoverServiceName: []string{}, // Rest from new defaults StreamRetention: []StreamRetention{ @@ -232,6 +233,7 @@ ruler_remote_write_headers: ruler_remote_write_headers: `, exp: Limits{ + DiscoverServiceName: []string{}, // Rest from new defaults StreamRetention: []StreamRetention{ @@ -251,6 +253,7 @@ retention_stream: selector: '{foo="bar"}' `, exp: Limits{ + DiscoverServiceName: []string{}, StreamRetention: []StreamRetention{ { Period: model.Duration(24 * time.Hour), @@ -269,7 +272,8 @@ retention_stream: reject_old_samples: true `, exp: Limits{ - RejectOldSamples: true, + RejectOldSamples: true, + DiscoverServiceName: []string{}, // Rest from new defaults RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"a": "b"}}, @@ -288,7 +292,8 @@ reject_old_samples: true query_timeout: 5m `, exp: Limits{ - QueryTimeout: model.Duration(5 * time.Minute), + DiscoverServiceName: []string{}, + QueryTimeout: model.Duration(5 * time.Minute), // Rest from new defaults. RulerRemoteWriteHeaders: OverwriteMarshalingStringMap{map[string]string{"a": "b"}}, From d972ce0f2d726ead19be3ad4fd849dd1a688c0a8 Mon Sep 17 00:00:00 2001 From: Salva Corts Date: Fri, 29 Mar 2024 13:23:31 +0100 Subject: [PATCH 35/54] feat: Bring back bloom components to backend target in SSD (#12400) --- pkg/loki/loki.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index 4e2b7df3ae35f..3d8bd9f9bec14 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -663,7 +663,7 @@ func (t *Loki) setupModuleManager() error { Read: {QueryFrontend, Querier}, Write: {Ingester, Distributor}, - Backend: {QueryScheduler, Ruler, Compactor, IndexGateway}, + Backend: {QueryScheduler, Ruler, Compactor, IndexGateway, BloomGateway, BloomCompactor}, All: {QueryScheduler, QueryFrontend, Querier, Ingester, Distributor, Ruler, Compactor}, } From cf71ac7114a6fb3a2c4bf71c7fbcd9c4e91a05eb Mon Sep 17 00:00:00 2001 From: Salva Corts Date: Fri, 29 Mar 2024 15:43:18 +0100 Subject: [PATCH 36/54] fix(blooms): Remove blocks not matching any series in task (#12401) --- pkg/bloomgateway/processor.go | 2 +- pkg/bloomgateway/util.go | 2 +- pkg/bloomgateway/util_test.go | 20 ++++++++++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index 5cf805b11a74d..5d43e79eece3e 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -175,7 +175,7 @@ func (p *processor) processBlock(_ context.Context, blockQuerier *v1.BlockQuerie if sp := opentracing.SpanFromContext(task.ctx); sp != nil { md, _ := blockQuerier.Metadata() blk := bloomshipper.BlockRefFrom(task.Tenant, task.table.String(), md) - sp.LogKV("process block", blk.String()) + sp.LogKV("process block", blk.String(), "series", len(task.series)) } it := v1.NewPeekingIter(task.RequestIter(tokenizer)) diff --git a/pkg/bloomgateway/util.go b/pkg/bloomgateway/util.go index 5f2f2e31f79c2..e07c5740fdc6d 100644 --- a/pkg/bloomgateway/util.go +++ b/pkg/bloomgateway/util.go @@ -78,7 +78,7 @@ func partitionTasks(tasks []Task, blocks []bloomshipper.BlockRef) []blockWithTas }) // All fingerprints fall outside of the consumer's range - if min == len(refs) || max == 0 { + if min == len(refs) || max == 0 || min == max { continue } diff --git a/pkg/bloomgateway/util_test.go b/pkg/bloomgateway/util_test.go index 9bd158219e134..f624d337092b0 100644 --- a/pkg/bloomgateway/util_test.go +++ b/pkg/bloomgateway/util_test.go @@ -136,6 +136,26 @@ func TestPartitionTasks(t *testing.T) { require.Len(t, res.tasks[0].series, 90) } }) + + t.Run("block series before and after task series", func(t *testing.T) { + bounds := []bloomshipper.BlockRef{ + mkBlockRef(100, 200), + } + + tasks := []Task{ + { + series: []*logproto.GroupedChunkRefs{ + {Fingerprint: 50}, + {Fingerprint: 75}, + {Fingerprint: 250}, + {Fingerprint: 300}, + }, + }, + } + + results := partitionTasks(tasks, bounds) + require.Len(t, results, 0) + }) } func TestPartitionRequest(t *testing.T) { From c8c8477c93fe2728d0c7517b6f85a707d5e5a2a7 Mon Sep 17 00:00:00 2001 From: Ashwanth Date: Fri, 29 Mar 2024 21:57:22 +0530 Subject: [PATCH 37/54] chore(stats): add pipeline_wrapper_filtered_lines (#12399) --- pkg/logql/metrics.go | 1 + pkg/logqlmodel/stats/context.go | 9 + pkg/logqlmodel/stats/context_test.go | 36 ++-- pkg/logqlmodel/stats/stats.pb.go | 213 +++++++++++++--------- pkg/logqlmodel/stats/stats.proto | 3 + pkg/querier/queryrange/codec_test.go | 20 +- pkg/querier/queryrange/prometheus_test.go | 2 + pkg/util/marshal/legacy/marshal_test.go | 2 + pkg/util/marshal/marshal_test.go | 2 + 9 files changed, 182 insertions(+), 106 deletions(-) diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index 19c1875475351..b4cc8632ff918 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -162,6 +162,7 @@ func RecordRangeAndInstantQueryMetrics( "splits", stats.Summary.Splits, "shards", stats.Summary.Shards, "query_referenced_structured_metadata", stats.QueryReferencedStructuredMetadata(), + "pipeline_wrapper_filtered_lines", stats.PipelineWrapperFilteredLines(), "chunk_refs_fetch_time", stats.ChunkRefsFetchTime(), "cache_chunk_req", stats.Caches.Chunk.EntriesRequested, "cache_chunk_hit", stats.Caches.Chunk.EntriesFound, diff --git a/pkg/logqlmodel/stats/context.go b/pkg/logqlmodel/stats/context.go index 7f2b9cfaf40bb..f895f7fc6c9b0 100644 --- a/pkg/logqlmodel/stats/context.go +++ b/pkg/logqlmodel/stats/context.go @@ -206,6 +206,7 @@ func (s *Store) Merge(m Store) { s.TotalChunksRef += m.TotalChunksRef s.TotalChunksDownloaded += m.TotalChunksDownloaded s.CongestionControlLatency += m.CongestionControlLatency + s.PipelineWrapperFilteredLines += m.PipelineWrapperFilteredLines s.ChunksDownloadTime += m.ChunksDownloadTime s.ChunkRefsFetchTime += m.ChunkRefsFetchTime s.Chunk.HeadChunkBytes += m.Chunk.HeadChunkBytes @@ -312,6 +313,10 @@ func (r Result) CongestionControlLatency() time.Duration { return time.Duration(r.Querier.Store.CongestionControlLatency) } +func (r Result) PipelineWrapperFilteredLines() int64 { + return r.Querier.Store.PipelineWrapperFilteredLines + r.Ingester.Store.PipelineWrapperFilteredLines +} + func (r Result) TotalDuplicates() int64 { return r.Querier.Store.Chunk.TotalDuplicates + r.Ingester.Store.Chunk.TotalDuplicates } @@ -397,6 +402,10 @@ func (c *Context) AddCongestionControlLatency(i time.Duration) { atomic.AddInt64(&c.store.CongestionControlLatency, int64(i)) } +func (c *Context) AddPipelineWrapperFilterdLines(i int64) { + atomic.AddInt64(&c.store.PipelineWrapperFilteredLines, i) +} + func (c *Context) AddChunksDownloaded(i int64) { atomic.AddInt64(&c.store.TotalChunksDownloaded, i) } diff --git a/pkg/logqlmodel/stats/context_test.go b/pkg/logqlmodel/stats/context_test.go index e40a5372a8968..b7e37e311718c 100644 --- a/pkg/logqlmodel/stats/context_test.go +++ b/pkg/logqlmodel/stats/context_test.go @@ -26,6 +26,7 @@ func TestResult(t *testing.T) { stats.AddCacheRequest(IndexCache, 4) stats.AddCacheRequest(ResultCache, 1) stats.SetQueryReferencedStructuredMetadata() + stats.AddPipelineWrapperFilterdLines(1) fakeIngesterQuery(ctx) fakeIngesterQuery(ctx) @@ -39,6 +40,7 @@ func TestResult(t *testing.T) { TotalLinesSent: 60, TotalReached: 2, Store: Store{ + PipelineWrapperFilteredLines: 2, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -51,10 +53,11 @@ func TestResult(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 50, - TotalChunksDownloaded: 60, - ChunksDownloadTime: time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 50, + TotalChunksDownloaded: 60, + ChunksDownloadTime: time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -148,6 +151,7 @@ func fakeIngesterQuery(ctx context.Context) { TotalBatches: 25, TotalLinesSent: 30, Store: Store{ + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 5, HeadChunkLines: 10, @@ -173,6 +177,7 @@ func TestResult_Merge(t *testing.T) { TotalLinesSent: 60, TotalReached: 2, Store: Store{ + PipelineWrapperFilteredLines: 4, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -185,10 +190,11 @@ func TestResult_Merge(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 50, - TotalChunksDownloaded: 60, - ChunksDownloadTime: time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 50, + TotalChunksDownloaded: 60, + ChunksDownloadTime: time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 2, Chunk: Chunk{ HeadChunkBytes: 10, HeadChunkLines: 20, @@ -235,6 +241,7 @@ func TestResult_Merge(t *testing.T) { TotalBatches: 2 * 50, TotalLinesSent: 2 * 60, Store: Store{ + PipelineWrapperFilteredLines: 8, Chunk: Chunk{ HeadChunkBytes: 2 * 10, HeadChunkLines: 2 * 20, @@ -248,10 +255,11 @@ func TestResult_Merge(t *testing.T) { }, Querier: Querier{ Store: Store{ - TotalChunksRef: 2 * 50, - TotalChunksDownloaded: 2 * 60, - ChunksDownloadTime: 2 * time.Second.Nanoseconds(), - QueryReferencedStructured: true, + TotalChunksRef: 2 * 50, + TotalChunksDownloaded: 2 * 60, + ChunksDownloadTime: 2 * time.Second.Nanoseconds(), + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 4, Chunk: Chunk{ HeadChunkBytes: 2 * 10, HeadChunkLines: 2 * 20, @@ -306,13 +314,15 @@ func TestIngester(t *testing.T) { statsCtx.AddDuplicates(10) statsCtx.AddHeadChunkBytes(200) statsCtx.SetQueryReferencedStructuredMetadata() + statsCtx.AddPipelineWrapperFilterdLines(1) require.Equal(t, Ingester{ TotalReached: 1, TotalChunksMatched: 100, TotalBatches: 25, TotalLinesSent: 30, Store: Store{ - QueryReferencedStructured: true, + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 1, Chunk: Chunk{ HeadChunkBytes: 200, CompressedBytes: 100, diff --git a/pkg/logqlmodel/stats/stats.pb.go b/pkg/logqlmodel/stats/stats.pb.go index e26bedf55d33b..facdcb2a910e9 100644 --- a/pkg/logqlmodel/stats/stats.pb.go +++ b/pkg/logqlmodel/stats/stats.pb.go @@ -542,6 +542,8 @@ type Store struct { ChunkRefsFetchTime int64 `protobuf:"varint,5,opt,name=chunkRefsFetchTime,proto3" json:"chunkRefsFetchTime"` // Time spent being blocked on congestion control. CongestionControlLatency int64 `protobuf:"varint,6,opt,name=congestionControlLatency,proto3" json:"congestionControlLatency"` + // Total number of lines filtered by pipeline wrapper. + PipelineWrapperFilteredLines int64 `protobuf:"varint,7,opt,name=pipelineWrapperFilteredLines,proto3" json:"pipelineWrapperFilteredLines"` } func (m *Store) Reset() { *m = Store{} } @@ -625,6 +627,13 @@ func (m *Store) GetCongestionControlLatency() int64 { return 0 } +func (m *Store) GetPipelineWrapperFilteredLines() int64 { + if m != nil { + return m.PipelineWrapperFilteredLines + } + return 0 +} + type Chunk struct { // Total bytes processed but was already in memory (found in the headchunk). Includes structured metadata bytes. HeadChunkBytes int64 `protobuf:"varint,4,opt,name=headChunkBytes,proto3" json:"headChunkBytes"` @@ -855,90 +864,92 @@ func init() { func init() { proto.RegisterFile("pkg/logqlmodel/stats/stats.proto", fileDescriptor_6cdfe5d2aea33ebb) } var fileDescriptor_6cdfe5d2aea33ebb = []byte{ - // 1325 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xcf, 0x8f, 0xdb, 0x44, - 0x14, 0x4e, 0x36, 0x75, 0xb2, 0x9d, 0xcd, 0xee, 0xb6, 0xb3, 0x5b, 0x9a, 0x52, 0x64, 0x2f, 0x81, - 0x8a, 0x22, 0xd0, 0x46, 0x05, 0x24, 0x04, 0xa2, 0x12, 0x72, 0xca, 0x4a, 0x95, 0xb6, 0xa2, 0xbc, - 0x80, 0x84, 0xe0, 0xe4, 0xd8, 0xb3, 0x89, 0x55, 0xc7, 0xce, 0xda, 0xe3, 0xd2, 0x95, 0x90, 0xe0, - 0x4f, 0xe0, 0xc4, 0x85, 0x3b, 0xe2, 0xc2, 0x89, 0x13, 0x67, 0x2e, 0x3d, 0xf6, 0xd8, 0x93, 0x45, - 0xd3, 0x0b, 0xf2, 0xa9, 0x7f, 0x02, 0x9a, 0x37, 0x13, 0xff, 0x8a, 0xd3, 0xee, 0x25, 0x9e, 0xf7, - 0xbd, 0xef, 0x7b, 0xf3, 0xf3, 0xbd, 0x99, 0x90, 0x83, 0xf9, 0x83, 0xc9, 0xc0, 0x0b, 0x26, 0xa7, - 0xde, 0x2c, 0x70, 0x98, 0x37, 0x88, 0xb8, 0xc5, 0x23, 0xf9, 0x7b, 0x38, 0x0f, 0x03, 0x1e, 0x50, - 0x0d, 0x8d, 0xd7, 0xf7, 0x27, 0xc1, 0x24, 0x40, 0x64, 0x20, 0x5a, 0xd2, 0xd9, 0xff, 0x7d, 0x83, - 0xb4, 0x81, 0x45, 0xb1, 0xc7, 0xe9, 0x27, 0xa4, 0x13, 0xc5, 0xb3, 0x99, 0x15, 0x9e, 0xf5, 0x9a, - 0x07, 0xcd, 0x9b, 0x5b, 0x1f, 0xec, 0x1c, 0xca, 0x30, 0x23, 0x89, 0x9a, 0xbb, 0x8f, 0x13, 0xa3, - 0x91, 0x26, 0xc6, 0x92, 0x06, 0xcb, 0x86, 0x90, 0x9e, 0xc6, 0x2c, 0x74, 0x59, 0xd8, 0xdb, 0x28, - 0x49, 0xbf, 0x92, 0x68, 0x2e, 0x55, 0x34, 0x58, 0x36, 0xe8, 0x6d, 0xb2, 0xe9, 0xfa, 0x13, 0x16, - 0x71, 0x16, 0xf6, 0x5a, 0xa8, 0xdd, 0x55, 0xda, 0xbb, 0x0a, 0x36, 0x2f, 0x29, 0x71, 0x46, 0x84, - 0xac, 0x45, 0x3f, 0x22, 0x6d, 0xdb, 0xb2, 0xa7, 0x2c, 0xea, 0x5d, 0x40, 0xf1, 0xb6, 0x12, 0x0f, - 0x11, 0x34, 0xb7, 0x95, 0x54, 0x43, 0x12, 0x28, 0x2e, 0xbd, 0x45, 0x34, 0xd7, 0x77, 0xd8, 0xa3, - 0x9e, 0x86, 0xa2, 0x6e, 0xd6, 0xa3, 0xc3, 0x1e, 0xe5, 0x1a, 0xa4, 0x80, 0xfc, 0xf4, 0x7f, 0xbb, - 0x40, 0xda, 0xc3, 0x4c, 0x6d, 0x4f, 0x63, 0xff, 0x81, 0x5a, 0xa6, 0x6e, 0xb1, 0xcb, 0x42, 0x8f, - 0x82, 0x02, 0xf2, 0x93, 0x77, 0xb8, 0xf1, 0x32, 0x49, 0xb1, 0x43, 0x31, 0xb3, 0x10, 0x37, 0x46, - 0x2d, 0x4b, 0x59, 0xb3, 0xa3, 0x34, 0x8a, 0x03, 0xea, 0x4b, 0x87, 0x64, 0x0b, 0x69, 0x72, 0x4f, - 0xd5, 0xa2, 0x94, 0xa5, 0x7b, 0x4a, 0x5a, 0x24, 0x42, 0xd1, 0xa0, 0x47, 0xa4, 0xfb, 0x30, 0xf0, - 0xe2, 0x19, 0x53, 0x51, 0xb4, 0x9a, 0x28, 0xfb, 0x2a, 0x4a, 0x89, 0x09, 0x25, 0x4b, 0xc4, 0x89, - 0xc4, 0x2e, 0x2f, 0x47, 0xd3, 0x7e, 0x59, 0x9c, 0x22, 0x13, 0x4a, 0x96, 0x98, 0x94, 0x67, 0x8d, - 0x99, 0xa7, 0xc2, 0x74, 0x5e, 0x36, 0xa9, 0x02, 0x11, 0x8a, 0x06, 0xfd, 0x9e, 0xec, 0xb9, 0x7e, - 0xc4, 0x2d, 0x9f, 0xdf, 0x63, 0x3c, 0x74, 0x6d, 0x15, 0x6c, 0xb3, 0x26, 0xd8, 0x75, 0x15, 0xac, - 0x4e, 0x00, 0x75, 0x60, 0xff, 0xef, 0x36, 0xe9, 0xa8, 0x34, 0xa1, 0xdf, 0x90, 0xab, 0xe3, 0x33, - 0xce, 0xa2, 0xfb, 0x61, 0x60, 0xb3, 0x28, 0x62, 0xce, 0x7d, 0x16, 0x8e, 0x98, 0x1d, 0xf8, 0x0e, - 0x1e, 0x98, 0x96, 0x79, 0x3d, 0x4d, 0x8c, 0x75, 0x14, 0x58, 0xe7, 0x10, 0x61, 0x3d, 0xd7, 0xaf, - 0x0d, 0xbb, 0x91, 0x87, 0x5d, 0x43, 0x81, 0x75, 0x0e, 0x7a, 0x97, 0xec, 0xf1, 0x80, 0x5b, 0x9e, - 0x59, 0xea, 0x16, 0xcf, 0x5c, 0xcb, 0xbc, 0x2a, 0x16, 0xa1, 0xc6, 0x0d, 0x75, 0x60, 0x16, 0xea, - 0xb8, 0xd4, 0x15, 0x9e, 0xc1, 0x62, 0xa8, 0xb2, 0x1b, 0xea, 0x40, 0x7a, 0x93, 0x6c, 0xb2, 0x47, - 0xcc, 0xfe, 0xda, 0x9d, 0x31, 0x3c, 0x7d, 0x4d, 0xb3, 0x2b, 0x0a, 0xc0, 0x12, 0x83, 0xac, 0x45, - 0xdf, 0x23, 0x17, 0x4f, 0x63, 0x16, 0x33, 0xa4, 0xb6, 0x91, 0xba, 0x9d, 0x26, 0x46, 0x0e, 0x42, - 0xde, 0xa4, 0x87, 0x84, 0x44, 0xf1, 0x58, 0x96, 0x9e, 0x08, 0xcf, 0x51, 0xcb, 0xdc, 0x49, 0x13, - 0xa3, 0x80, 0x42, 0xa1, 0x4d, 0x8f, 0xc9, 0x3e, 0x8e, 0xee, 0x0b, 0x9f, 0xcb, 0xe3, 0xc8, 0xe3, - 0xd0, 0x67, 0x0e, 0x1e, 0x9a, 0x96, 0xd9, 0x4b, 0x13, 0xa3, 0xd6, 0x0f, 0xb5, 0x28, 0xed, 0x93, - 0x76, 0x34, 0xf7, 0x5c, 0x1e, 0xf5, 0x2e, 0xa2, 0x9e, 0x88, 0xfc, 0x95, 0x08, 0xa8, 0x2f, 0x72, - 0xa6, 0x56, 0xe8, 0x44, 0x3d, 0x52, 0xe0, 0x20, 0x02, 0xea, 0x9b, 0x8d, 0xea, 0x7e, 0x10, 0xf1, - 0x23, 0xd7, 0xe3, 0x2c, 0xc4, 0xd5, 0xeb, 0x6d, 0x55, 0x46, 0x55, 0xf1, 0x43, 0x2d, 0x4a, 0x7f, - 0x22, 0x37, 0x10, 0x1f, 0xf1, 0x30, 0xb6, 0x79, 0x1c, 0x32, 0xe7, 0x1e, 0xe3, 0x96, 0x63, 0x71, - 0xab, 0x72, 0x24, 0xba, 0x18, 0xfe, 0xdd, 0x34, 0x31, 0xce, 0x27, 0x80, 0xf3, 0xd1, 0xfa, 0x3f, - 0x12, 0x0d, 0x0b, 0x2f, 0xbd, 0x45, 0xb6, 0x50, 0x31, 0x14, 0x25, 0x33, 0x52, 0xc9, 0xb2, 0x2b, - 0x92, 0xba, 0x00, 0x43, 0xd1, 0xa0, 0x9f, 0x93, 0x4b, 0xf3, 0x6c, 0x3e, 0x4a, 0x27, 0xb3, 0x61, - 0x3f, 0x4d, 0x8c, 0x15, 0x1f, 0xac, 0x20, 0xfd, 0xcf, 0x48, 0x47, 0x5d, 0x52, 0xa2, 0x48, 0x47, - 0x3c, 0x08, 0x59, 0xa5, 0xae, 0x8f, 0x04, 0x96, 0x17, 0x69, 0xa4, 0x80, 0xfc, 0xf4, 0xff, 0xdc, - 0x20, 0x9b, 0x77, 0xf3, 0xbb, 0xa8, 0x8b, 0x63, 0x03, 0x26, 0xaa, 0x88, 0xcc, 0x76, 0xcd, 0xbc, - 0x24, 0x8a, 0x5b, 0x11, 0x87, 0x92, 0x45, 0x8f, 0x08, 0x2d, 0xcc, 0xe8, 0x9e, 0xc5, 0x51, 0x2b, - 0x27, 0xf1, 0x5a, 0x9a, 0x18, 0x35, 0x5e, 0xa8, 0xc1, 0xb2, 0xde, 0x4d, 0xb4, 0x23, 0x95, 0xc1, - 0x79, 0xef, 0x0a, 0x87, 0x92, 0x45, 0x3f, 0x25, 0x3b, 0x79, 0xfe, 0x8d, 0x98, 0xcf, 0x55, 0xba, - 0xd2, 0x34, 0x31, 0x2a, 0x1e, 0xa8, 0xd8, 0xf9, 0x7a, 0x69, 0xe7, 0x5e, 0xaf, 0x5f, 0x2f, 0x10, - 0x0d, 0xfd, 0x59, 0xc7, 0x6a, 0x63, 0xd8, 0x89, 0xda, 0xef, 0xbc, 0xe3, 0xcc, 0x03, 0x15, 0x9b, - 0x7e, 0x49, 0xae, 0x14, 0x90, 0x3b, 0xc1, 0x0f, 0xbe, 0x17, 0x58, 0x4e, 0xb6, 0x6a, 0xd7, 0xd2, - 0xc4, 0xa8, 0x27, 0x40, 0x3d, 0x2c, 0xf6, 0xc0, 0x2e, 0x61, 0x58, 0x4d, 0x5a, 0xf9, 0x1e, 0xac, - 0x7a, 0xa1, 0x06, 0xa3, 0x36, 0xb9, 0x26, 0x4a, 0xc7, 0x19, 0xb0, 0x13, 0x16, 0x32, 0xdf, 0x66, - 0x4e, 0x7e, 0xfa, 0x7b, 0xdb, 0x07, 0xcd, 0x9b, 0x9b, 0xe6, 0x8d, 0x34, 0x31, 0xde, 0x5c, 0x4b, - 0x5a, 0xa6, 0x08, 0xac, 0x8f, 0x93, 0x3f, 0x3f, 0x2a, 0x97, 0xbb, 0xc0, 0xd6, 0x3c, 0x3f, 0x96, - 0xf3, 0x03, 0x76, 0x12, 0x1d, 0x31, 0x6e, 0x4f, 0xb3, 0xc2, 0x5a, 0x9c, 0x5f, 0xc9, 0x0b, 0x35, - 0x18, 0xfd, 0x96, 0xf4, 0xec, 0x00, 0x8f, 0xbb, 0x1b, 0xf8, 0xc3, 0xc0, 0xe7, 0x61, 0xe0, 0x1d, - 0x5b, 0x9c, 0xf9, 0xf6, 0x19, 0xd6, 0xde, 0x96, 0xf9, 0x46, 0x9a, 0x18, 0x6b, 0x39, 0xb0, 0xd6, - 0xd3, 0xff, 0x4b, 0x23, 0x1a, 0xce, 0x40, 0x1c, 0x8c, 0x29, 0xb3, 0x1c, 0x39, 0x1d, 0x51, 0x29, - 0x8a, 0x27, 0xb2, 0xec, 0x81, 0x8a, 0x5d, 0xd2, 0xca, 0x9a, 0xa8, 0xd5, 0x68, 0x65, 0x35, 0xac, - 0xd8, 0x74, 0x48, 0x2e, 0x3b, 0xcc, 0x0e, 0x66, 0xf3, 0x10, 0xcb, 0x92, 0xec, 0x5a, 0x4e, 0xea, - 0x4a, 0x9a, 0x18, 0xab, 0x4e, 0x58, 0x85, 0xaa, 0x41, 0xe4, 0x18, 0x3a, 0xf5, 0x41, 0xe4, 0x30, - 0x56, 0x21, 0x7a, 0x9b, 0xec, 0x56, 0xc7, 0x21, 0x2f, 0x9c, 0xbd, 0x34, 0x31, 0xaa, 0x2e, 0xa8, - 0x02, 0x42, 0x8e, 0xa7, 0xfc, 0x4e, 0x3c, 0xf7, 0x5c, 0xdb, 0x12, 0xf2, 0x8b, 0xb9, 0xbc, 0xe2, - 0x82, 0x2a, 0x20, 0xe4, 0xf3, 0xca, 0xc5, 0x42, 0x72, 0x79, 0xc5, 0x05, 0x55, 0x80, 0xce, 0xc9, - 0x41, 0xb6, 0xb0, 0x6b, 0x4a, 0xbf, 0xba, 0xa8, 0xde, 0x4e, 0x13, 0xe3, 0x95, 0x5c, 0x78, 0x25, - 0x83, 0x9e, 0x91, 0xb7, 0x8a, 0x6b, 0xb8, 0xae, 0x53, 0x79, 0x7d, 0xbd, 0x93, 0x26, 0xc6, 0x79, - 0xe8, 0x70, 0x1e, 0x52, 0xff, 0x9f, 0x16, 0xd1, 0xf0, 0xc9, 0x28, 0xaa, 0x2f, 0x93, 0xd7, 0xfd, - 0x51, 0x10, 0xfb, 0xa5, 0xda, 0x5f, 0xc4, 0xa1, 0x64, 0x89, 0xeb, 0x8b, 0x2d, 0x1f, 0x09, 0xa7, - 0xb1, 0xb8, 0x45, 0x64, 0x0d, 0xd3, 0xe4, 0xf5, 0x55, 0xf5, 0xc1, 0x0a, 0x42, 0x3f, 0x26, 0xdb, - 0x0a, 0xc3, 0xb2, 0x2a, 0x1f, 0x6e, 0x9a, 0x79, 0x39, 0x4d, 0x8c, 0xb2, 0x03, 0xca, 0xa6, 0x10, - 0xe2, 0x4b, 0x13, 0x98, 0xcd, 0xdc, 0x87, 0xd9, 0x33, 0x0d, 0x85, 0x25, 0x07, 0x94, 0x4d, 0xf1, - 0xe0, 0x42, 0x00, 0x2f, 0x0b, 0x99, 0x5e, 0xf8, 0xe0, 0xca, 0x40, 0xc8, 0x9b, 0xe2, 0x1d, 0x17, - 0xca, 0xb1, 0xca, 0x5c, 0xd2, 0xe4, 0x3b, 0x6e, 0x89, 0x41, 0xd6, 0x12, 0x0b, 0xe8, 0x14, 0x8b, - 0x6f, 0x27, 0xbf, 0xbe, 0x8a, 0x38, 0x94, 0x2c, 0x91, 0x6f, 0x58, 0x28, 0x8f, 0x99, 0x3f, 0xe1, - 0xd3, 0x11, 0x0b, 0x1f, 0x66, 0xaf, 0x33, 0xcc, 0xb7, 0x15, 0x27, 0xac, 0x42, 0xe6, 0xf8, 0xc9, - 0x33, 0xbd, 0xf1, 0xf4, 0x99, 0xde, 0x78, 0xf1, 0x4c, 0x6f, 0xfe, 0xbc, 0xd0, 0x9b, 0x7f, 0x2c, - 0xf4, 0xe6, 0xe3, 0x85, 0xde, 0x7c, 0xb2, 0xd0, 0x9b, 0xff, 0x2e, 0xf4, 0xe6, 0x7f, 0x0b, 0xbd, - 0xf1, 0x62, 0xa1, 0x37, 0x7f, 0x79, 0xae, 0x37, 0x9e, 0x3c, 0xd7, 0x1b, 0x4f, 0x9f, 0xeb, 0x8d, - 0xef, 0xde, 0x9f, 0xb8, 0x7c, 0x1a, 0x8f, 0x0f, 0xed, 0x60, 0x36, 0x98, 0x84, 0xd6, 0x89, 0xe5, - 0x5b, 0x03, 0x2f, 0x78, 0xe0, 0x0e, 0xea, 0xfe, 0x90, 0x8f, 0xdb, 0xf8, 0x77, 0xfb, 0xc3, 0xff, - 0x03, 0x00, 0x00, 0xff, 0xff, 0x22, 0x3f, 0x1a, 0x51, 0xaf, 0x0f, 0x00, 0x00, + // 1357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0x4f, 0x6f, 0xdc, 0x44, + 0x14, 0xdf, 0xcd, 0xd6, 0x9b, 0x74, 0xf2, 0xaf, 0x9d, 0xa4, 0x74, 0x4b, 0x2b, 0x3b, 0x2c, 0x54, + 0x14, 0x81, 0x12, 0x15, 0x90, 0x10, 0x88, 0x4a, 0xc8, 0x29, 0x91, 0x2a, 0xa5, 0xa2, 0xbc, 0x80, + 0x40, 0x70, 0xf2, 0xda, 0x93, 0x5d, 0xab, 0x5e, 0x7b, 0x63, 0x8f, 0x4b, 0x23, 0x21, 0xc1, 0x47, + 0xe0, 0xce, 0x1d, 0x71, 0xe1, 0xc4, 0x89, 0x33, 0x97, 0x1e, 0x7b, 0xec, 0xc9, 0xa2, 0xdb, 0x0b, + 0xf2, 0xa9, 0x1f, 0x80, 0x03, 0x9a, 0x37, 0xb3, 0xf6, 0xd8, 0xeb, 0x4d, 0x73, 0x59, 0xcf, 0xfb, + 0xbd, 0xf7, 0x7b, 0x33, 0xf3, 0x66, 0xde, 0x7b, 0xb3, 0x64, 0x67, 0xf2, 0x70, 0xb8, 0x17, 0x44, + 0xc3, 0x93, 0x60, 0x1c, 0x79, 0x2c, 0xd8, 0x4b, 0xb8, 0xc3, 0x13, 0xf9, 0xbb, 0x3b, 0x89, 0x23, + 0x1e, 0x51, 0x03, 0x85, 0xd7, 0xb7, 0x87, 0xd1, 0x30, 0x42, 0x64, 0x4f, 0x8c, 0xa4, 0xb2, 0xff, + 0xdb, 0x12, 0xe9, 0x02, 0x4b, 0xd2, 0x80, 0xd3, 0x8f, 0xc9, 0x72, 0x92, 0x8e, 0xc7, 0x4e, 0x7c, + 0xda, 0x6b, 0xef, 0xb4, 0x6f, 0xad, 0xbe, 0xbf, 0xb1, 0x2b, 0xdd, 0x1c, 0x49, 0xd4, 0xde, 0x7c, + 0x92, 0x59, 0xad, 0x3c, 0xb3, 0x66, 0x66, 0x30, 0x1b, 0x08, 0xea, 0x49, 0xca, 0x62, 0x9f, 0xc5, + 0xbd, 0xa5, 0x0a, 0xf5, 0x4b, 0x89, 0x96, 0x54, 0x65, 0x06, 0xb3, 0x01, 0xbd, 0x43, 0x56, 0xfc, + 0x70, 0xc8, 0x12, 0xce, 0xe2, 0x5e, 0x07, 0xb9, 0x9b, 0x8a, 0x7b, 0x4f, 0xc1, 0xf6, 0x25, 0x45, + 0x2e, 0x0c, 0xa1, 0x18, 0xd1, 0x0f, 0x49, 0xd7, 0x75, 0xdc, 0x11, 0x4b, 0x7a, 0x17, 0x90, 0xbc, + 0xae, 0xc8, 0xfb, 0x08, 0xda, 0xeb, 0x8a, 0x6a, 0xa0, 0x11, 0x28, 0x5b, 0x7a, 0x9b, 0x18, 0x7e, + 0xe8, 0xb1, 0xc7, 0x3d, 0x03, 0x49, 0x6b, 0xc5, 0x8c, 0x1e, 0x7b, 0x5c, 0x72, 0xd0, 0x04, 0xe4, + 0xa7, 0xff, 0xeb, 0x05, 0xd2, 0xdd, 0x2f, 0xd8, 0xee, 0x28, 0x0d, 0x1f, 0xaa, 0x30, 0xad, 0xe9, + 0x53, 0x6a, 0x33, 0x0a, 0x13, 0x90, 0x9f, 0x72, 0xc2, 0xa5, 0xb3, 0x28, 0xfa, 0x84, 0x62, 0x67, + 0x31, 0x1e, 0x8c, 0x0a, 0x4b, 0x95, 0xb3, 0xa1, 0x38, 0xca, 0x06, 0xd4, 0x97, 0xee, 0x93, 0x55, + 0x34, 0x93, 0x67, 0xaa, 0x82, 0x52, 0xa5, 0x6e, 0x29, 0xaa, 0x6e, 0x08, 0xba, 0x40, 0x0f, 0xc8, + 0xda, 0xa3, 0x28, 0x48, 0xc7, 0x4c, 0x79, 0x31, 0x1a, 0xbc, 0x6c, 0x2b, 0x2f, 0x15, 0x4b, 0xa8, + 0x48, 0xc2, 0x4f, 0x22, 0x4e, 0x79, 0xb6, 0x9a, 0xee, 0x59, 0x7e, 0x74, 0x4b, 0xa8, 0x48, 0x62, + 0x53, 0x81, 0x33, 0x60, 0x81, 0x72, 0xb3, 0x7c, 0xd6, 0xa6, 0x34, 0x43, 0xd0, 0x05, 0xfa, 0x3d, + 0xd9, 0xf2, 0xc3, 0x84, 0x3b, 0x21, 0xbf, 0xcf, 0x78, 0xec, 0xbb, 0xca, 0xd9, 0x4a, 0x83, 0xb3, + 0xeb, 0xca, 0x59, 0x13, 0x01, 0x9a, 0xc0, 0xfe, 0x5f, 0x5d, 0xb2, 0xac, 0xd2, 0x84, 0x7e, 0x4d, + 0xae, 0x0e, 0x4e, 0x39, 0x4b, 0x1e, 0xc4, 0x91, 0xcb, 0x92, 0x84, 0x79, 0x0f, 0x58, 0x7c, 0xc4, + 0xdc, 0x28, 0xf4, 0xf0, 0xc2, 0x74, 0xec, 0xeb, 0x79, 0x66, 0x2d, 0x32, 0x81, 0x45, 0x0a, 0xe1, + 0x36, 0xf0, 0xc3, 0x46, 0xb7, 0x4b, 0xa5, 0xdb, 0x05, 0x26, 0xb0, 0x48, 0x41, 0xef, 0x91, 0x2d, + 0x1e, 0x71, 0x27, 0xb0, 0x2b, 0xd3, 0xe2, 0x9d, 0xeb, 0xd8, 0x57, 0x45, 0x10, 0x1a, 0xd4, 0xd0, + 0x04, 0x16, 0xae, 0x0e, 0x2b, 0x53, 0xe1, 0x1d, 0xd4, 0x5d, 0x55, 0xd5, 0xd0, 0x04, 0xd2, 0x5b, + 0x64, 0x85, 0x3d, 0x66, 0xee, 0x57, 0xfe, 0x98, 0xe1, 0xed, 0x6b, 0xdb, 0x6b, 0xa2, 0x00, 0xcc, + 0x30, 0x28, 0x46, 0xf4, 0x5d, 0x72, 0xf1, 0x24, 0x65, 0x29, 0x43, 0xd3, 0x2e, 0x9a, 0xae, 0xe7, + 0x99, 0x55, 0x82, 0x50, 0x0e, 0xe9, 0x2e, 0x21, 0x49, 0x3a, 0x90, 0xa5, 0x27, 0xc1, 0x7b, 0xd4, + 0xb1, 0x37, 0xf2, 0xcc, 0xd2, 0x50, 0xd0, 0xc6, 0xf4, 0x90, 0x6c, 0xe3, 0xea, 0x3e, 0x0f, 0xb9, + 0xbc, 0x8e, 0x3c, 0x8d, 0x43, 0xe6, 0xe1, 0xa5, 0xe9, 0xd8, 0xbd, 0x3c, 0xb3, 0x1a, 0xf5, 0xd0, + 0x88, 0xd2, 0x3e, 0xe9, 0x26, 0x93, 0xc0, 0xe7, 0x49, 0xef, 0x22, 0xf2, 0x89, 0xc8, 0x5f, 0x89, + 0x80, 0xfa, 0xa2, 0xcd, 0xc8, 0x89, 0xbd, 0xa4, 0x47, 0x34, 0x1b, 0x44, 0x40, 0x7d, 0x8b, 0x55, + 0x3d, 0x88, 0x12, 0x7e, 0xe0, 0x07, 0x9c, 0xc5, 0x18, 0xbd, 0xde, 0x6a, 0x6d, 0x55, 0x35, 0x3d, + 0x34, 0xa2, 0xf4, 0x27, 0x72, 0x13, 0xf1, 0x23, 0x1e, 0xa7, 0x2e, 0x4f, 0x63, 0xe6, 0xdd, 0x67, + 0xdc, 0xf1, 0x1c, 0xee, 0xd4, 0xae, 0xc4, 0x1a, 0xba, 0x7f, 0x27, 0xcf, 0xac, 0xf3, 0x11, 0xe0, + 0x7c, 0x66, 0xfd, 0x1f, 0x89, 0x81, 0x85, 0x97, 0xde, 0x26, 0xab, 0xc8, 0xd8, 0x17, 0x25, 0x33, + 0x51, 0xc9, 0xb2, 0x29, 0x92, 0x5a, 0x83, 0x41, 0x17, 0xe8, 0x67, 0xe4, 0xd2, 0xa4, 0xd8, 0x8f, + 0xe2, 0xc9, 0x6c, 0xd8, 0xce, 0x33, 0x6b, 0x4e, 0x07, 0x73, 0x48, 0xff, 0x53, 0xb2, 0xac, 0x9a, + 0x94, 0x28, 0xd2, 0x09, 0x8f, 0x62, 0x56, 0xab, 0xeb, 0x47, 0x02, 0x2b, 0x8b, 0x34, 0x9a, 0x80, + 0xfc, 0xf4, 0xff, 0x58, 0x22, 0x2b, 0xf7, 0xca, 0x5e, 0xb4, 0x86, 0x6b, 0x03, 0x26, 0xaa, 0x88, + 0xcc, 0x76, 0xc3, 0xbe, 0x24, 0x8a, 0x9b, 0x8e, 0x43, 0x45, 0xa2, 0x07, 0x84, 0x6a, 0x3b, 0xba, + 0xef, 0x70, 0xe4, 0xca, 0x4d, 0xbc, 0x96, 0x67, 0x56, 0x83, 0x16, 0x1a, 0xb0, 0x62, 0x76, 0x1b, + 0xe5, 0x44, 0x65, 0x70, 0x39, 0xbb, 0xc2, 0xa1, 0x22, 0xd1, 0x4f, 0xc8, 0x46, 0x99, 0x7f, 0x47, + 0x2c, 0xe4, 0x2a, 0x5d, 0x69, 0x9e, 0x59, 0x35, 0x0d, 0xd4, 0xe4, 0x32, 0x5e, 0xc6, 0xb9, 0xe3, + 0xf5, 0xdf, 0x05, 0x62, 0xa0, 0xbe, 0x98, 0x58, 0x1d, 0x0c, 0x3b, 0x56, 0xe7, 0x5d, 0x4e, 0x5c, + 0x68, 0xa0, 0x26, 0xd3, 0x2f, 0xc8, 0x15, 0x0d, 0xb9, 0x1b, 0xfd, 0x10, 0x06, 0x91, 0xe3, 0x15, + 0x51, 0xbb, 0x96, 0x67, 0x56, 0xb3, 0x01, 0x34, 0xc3, 0xe2, 0x0c, 0xdc, 0x0a, 0x86, 0xd5, 0xa4, + 0x53, 0x9e, 0xc1, 0xbc, 0x16, 0x1a, 0x30, 0xea, 0x92, 0x6b, 0xa2, 0x74, 0x9c, 0x02, 0x3b, 0x66, + 0x31, 0x0b, 0x5d, 0xe6, 0x95, 0xb7, 0xbf, 0xb7, 0xbe, 0xd3, 0xbe, 0xb5, 0x62, 0xdf, 0xcc, 0x33, + 0xeb, 0x8d, 0x85, 0x46, 0xb3, 0x14, 0x81, 0xc5, 0x7e, 0xca, 0xe7, 0x47, 0xad, 0xb9, 0x0b, 0x6c, + 0xc1, 0xf3, 0x63, 0xb6, 0x3f, 0x60, 0xc7, 0xc9, 0x01, 0xe3, 0xee, 0xa8, 0x28, 0xac, 0xfa, 0xfe, + 0x2a, 0x5a, 0x68, 0xc0, 0xe8, 0xb7, 0xa4, 0xe7, 0x46, 0x78, 0xdd, 0xfd, 0x28, 0xdc, 0x8f, 0x42, + 0x1e, 0x47, 0xc1, 0xa1, 0xc3, 0x59, 0xe8, 0x9e, 0x62, 0xed, 0xed, 0xd8, 0x37, 0xf2, 0xcc, 0x5a, + 0x68, 0x03, 0x0b, 0x35, 0xd4, 0x23, 0x37, 0x26, 0xfe, 0x84, 0x89, 0x2e, 0xf5, 0x4d, 0xec, 0x4c, + 0x26, 0x2c, 0x96, 0x59, 0xca, 0x3c, 0x59, 0xdb, 0x64, 0xad, 0xde, 0xc9, 0x33, 0xeb, 0x4c, 0x3b, + 0x38, 0x53, 0xdb, 0xff, 0xd3, 0x20, 0x06, 0xc6, 0x49, 0x5c, 0xbf, 0x11, 0x73, 0x3c, 0x19, 0x34, + 0x51, 0x8f, 0xf4, 0x7b, 0x5f, 0xd5, 0x40, 0x4d, 0xae, 0x70, 0xe5, 0xea, 0x8c, 0x06, 0xae, 0x5c, + 0x4f, 0x4d, 0xa6, 0xfb, 0xe4, 0xb2, 0xc7, 0xdc, 0x68, 0x3c, 0x89, 0xb1, 0xf8, 0xc9, 0xa9, 0x65, + 0xe8, 0xae, 0xe4, 0x99, 0x35, 0xaf, 0x84, 0x79, 0xa8, 0xee, 0x44, 0x8f, 0xd0, 0x9c, 0x13, 0xb9, + 0x8c, 0x79, 0x88, 0xde, 0x21, 0x9b, 0xf5, 0x75, 0xc8, 0xb6, 0xb6, 0x95, 0x67, 0x56, 0x5d, 0x05, + 0x75, 0x40, 0xd0, 0x31, 0x97, 0xee, 0xa6, 0x93, 0xc0, 0x77, 0x1d, 0x41, 0xbf, 0x58, 0xd2, 0x6b, + 0x2a, 0xa8, 0x03, 0x82, 0x3e, 0xa9, 0xb5, 0x2f, 0x52, 0xd2, 0x6b, 0x2a, 0xa8, 0x03, 0x74, 0x42, + 0x76, 0x8a, 0xc0, 0x2e, 0x68, 0x30, 0xaa, 0x1d, 0xbe, 0x95, 0x67, 0xd6, 0x2b, 0x6d, 0xe1, 0x95, + 0x16, 0xf4, 0x94, 0xbc, 0xa9, 0xc7, 0x70, 0xd1, 0xa4, 0xb2, 0x49, 0xbe, 0x9d, 0x67, 0xd6, 0x79, + 0xcc, 0xe1, 0x3c, 0x46, 0xfd, 0xbf, 0x3b, 0xc4, 0xc0, 0x87, 0xa9, 0xa8, 0xf1, 0x4c, 0x3e, 0x2a, + 0x0e, 0xa2, 0x34, 0xac, 0x74, 0x18, 0x1d, 0x87, 0x8a, 0x24, 0x9a, 0x24, 0x9b, 0x3d, 0x45, 0x4e, + 0x52, 0xd1, 0xab, 0x64, 0xa5, 0x34, 0x64, 0x93, 0xac, 0xeb, 0x60, 0x0e, 0xa1, 0x1f, 0x91, 0x75, + 0x85, 0x61, 0xf1, 0x96, 0xcf, 0x43, 0xc3, 0xbe, 0x9c, 0x67, 0x56, 0x55, 0x01, 0x55, 0x51, 0x10, + 0xf1, 0x3d, 0x0b, 0xcc, 0x65, 0xfe, 0xa3, 0xe2, 0x31, 0x88, 0xc4, 0x8a, 0x02, 0xaa, 0xa2, 0x78, + 0xd6, 0x21, 0x80, 0x2d, 0x49, 0xa6, 0x17, 0x3e, 0xeb, 0x0a, 0x10, 0xca, 0xa1, 0x78, 0x2d, 0xc6, + 0x72, 0xad, 0x32, 0x97, 0x0c, 0xf9, 0x5a, 0x9c, 0x61, 0x50, 0x8c, 0x44, 0x00, 0x3d, 0xbd, 0xc4, + 0x2f, 0x97, 0x4d, 0x52, 0xc7, 0xa1, 0x22, 0x89, 0x7c, 0xc3, 0x72, 0x7c, 0xc8, 0xc2, 0x21, 0x1f, + 0x1d, 0xb1, 0xf8, 0x51, 0xf1, 0x06, 0xc4, 0x7c, 0x9b, 0x53, 0xc2, 0x3c, 0x64, 0x0f, 0x9e, 0x3e, + 0x37, 0x5b, 0xcf, 0x9e, 0x9b, 0xad, 0x97, 0xcf, 0xcd, 0xf6, 0xcf, 0x53, 0xb3, 0xfd, 0xfb, 0xd4, + 0x6c, 0x3f, 0x99, 0x9a, 0xed, 0xa7, 0x53, 0xb3, 0xfd, 0xcf, 0xd4, 0x6c, 0xff, 0x3b, 0x35, 0x5b, + 0x2f, 0xa7, 0x66, 0xfb, 0x97, 0x17, 0x66, 0xeb, 0xe9, 0x0b, 0xb3, 0xf5, 0xec, 0x85, 0xd9, 0xfa, + 0xee, 0xbd, 0xa1, 0xcf, 0x47, 0xe9, 0x60, 0xd7, 0x8d, 0xc6, 0x7b, 0xc3, 0xd8, 0x39, 0x76, 0x42, + 0x67, 0x2f, 0x88, 0x1e, 0xfa, 0x7b, 0x4d, 0x7f, 0xfb, 0x07, 0x5d, 0xfc, 0x53, 0xff, 0xc1, 0xff, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x44, 0xbc, 0x0a, 0x32, 0x15, 0x10, 0x00, 0x00, } func (this *Result) Equal(that interface{}) bool { @@ -1206,6 +1217,9 @@ func (this *Store) Equal(that interface{}) bool { if this.CongestionControlLatency != that1.CongestionControlLatency { return false } + if this.PipelineWrapperFilteredLines != that1.PipelineWrapperFilteredLines { + return false + } return true } func (this *Chunk) Equal(that interface{}) bool { @@ -1392,7 +1406,7 @@ func (this *Store) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 11) + s := make([]string, 0, 12) s = append(s, "&stats.Store{") s = append(s, "TotalChunksRef: "+fmt.Sprintf("%#v", this.TotalChunksRef)+",\n") s = append(s, "TotalChunksDownloaded: "+fmt.Sprintf("%#v", this.TotalChunksDownloaded)+",\n") @@ -1401,6 +1415,7 @@ func (this *Store) GoString() string { s = append(s, "Chunk: "+strings.Replace(this.Chunk.GoString(), `&`, ``, 1)+",\n") s = append(s, "ChunkRefsFetchTime: "+fmt.Sprintf("%#v", this.ChunkRefsFetchTime)+",\n") s = append(s, "CongestionControlLatency: "+fmt.Sprintf("%#v", this.CongestionControlLatency)+",\n") + s = append(s, "PipelineWrapperFilteredLines: "+fmt.Sprintf("%#v", this.PipelineWrapperFilteredLines)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -1857,6 +1872,11 @@ func (m *Store) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x68 } + if m.PipelineWrapperFilteredLines != 0 { + i = encodeVarintStats(dAtA, i, uint64(m.PipelineWrapperFilteredLines)) + i-- + dAtA[i] = 0x38 + } if m.CongestionControlLatency != 0 { i = encodeVarintStats(dAtA, i, uint64(m.CongestionControlLatency)) i-- @@ -2198,6 +2218,9 @@ func (m *Store) Size() (n int) { if m.CongestionControlLatency != 0 { n += 1 + sovStats(uint64(m.CongestionControlLatency)) } + if m.PipelineWrapperFilteredLines != 0 { + n += 1 + sovStats(uint64(m.PipelineWrapperFilteredLines)) + } if m.QueryReferencedStructured { n += 2 } @@ -2377,6 +2400,7 @@ func (this *Store) String() string { `Chunk:` + strings.Replace(strings.Replace(this.Chunk.String(), "Chunk", "Chunk", 1), `&`, ``, 1) + `,`, `ChunkRefsFetchTime:` + fmt.Sprintf("%v", this.ChunkRefsFetchTime) + `,`, `CongestionControlLatency:` + fmt.Sprintf("%v", this.CongestionControlLatency) + `,`, + `PipelineWrapperFilteredLines:` + fmt.Sprintf("%v", this.PipelineWrapperFilteredLines) + `,`, `QueryReferencedStructured:` + fmt.Sprintf("%v", this.QueryReferencedStructured) + `,`, `}`, }, "") @@ -3721,6 +3745,25 @@ func (m *Store) Unmarshal(dAtA []byte) error { break } } + case 7: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field PipelineWrapperFilteredLines", wireType) + } + m.PipelineWrapperFilteredLines = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStats + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.PipelineWrapperFilteredLines |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } case 13: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field QueryReferencedStructured", wireType) diff --git a/pkg/logqlmodel/stats/stats.proto b/pkg/logqlmodel/stats/stats.proto index 84219f11510be..df21e4a2ee9a6 100644 --- a/pkg/logqlmodel/stats/stats.proto +++ b/pkg/logqlmodel/stats/stats.proto @@ -153,6 +153,9 @@ message Store { // Time spent being blocked on congestion control. int64 congestionControlLatency = 6 [(gogoproto.jsontag) = "congestionControlLatency"]; + + // Total number of lines filtered by pipeline wrapper. + int64 pipelineWrapperFilteredLines = 7 [(gogoproto.jsontag) = "pipelineWrapperFilteredLines"]; } message Chunk { diff --git a/pkg/querier/queryrange/codec_test.go b/pkg/querier/queryrange/codec_test.go index b615d73b09c01..6a692c9180c94 100644 --- a/pkg/querier/queryrange/codec_test.go +++ b/pkg/querier/queryrange/codec_test.go @@ -1565,7 +1565,8 @@ var ( "totalChunksRef": 0, "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, - "queryReferencedStructuredMetadata": false + "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 2 }, "totalBatches": 6, "totalChunksMatched": 7, @@ -1590,7 +1591,8 @@ var ( "totalChunksRef": 17, "totalChunksDownloaded": 18, "chunkRefsFetchTime": 19, - "queryReferencedStructuredMetadata": true + "queryReferencedStructuredMetadata": true, + "pipelineWrapperFilteredLines": 4 } }, "index": { @@ -2023,17 +2025,19 @@ var ( PostFilterLines: 0, TotalDuplicates: 19, }, - ChunksDownloadTime: 16, - CongestionControlLatency: 0, - TotalChunksRef: 17, - TotalChunksDownloaded: 18, - ChunkRefsFetchTime: 19, - QueryReferencedStructured: true, + ChunksDownloadTime: 16, + CongestionControlLatency: 0, + TotalChunksRef: 17, + TotalChunksDownloaded: 18, + ChunkRefsFetchTime: 19, + QueryReferencedStructured: true, + PipelineWrapperFilteredLines: 4, }, }, Ingester: stats.Ingester{ Store: stats.Store{ + PipelineWrapperFilteredLines: 2, Chunk: stats.Chunk{ CompressedBytes: 1, DecompressedBytes: 2, diff --git a/pkg/querier/queryrange/prometheus_test.go b/pkg/querier/queryrange/prometheus_test.go index 624c8b5194eec..a5bea28684251 100644 --- a/pkg/querier/queryrange/prometheus_test.go +++ b/pkg/querier/queryrange/prometheus_test.go @@ -25,6 +25,7 @@ var emptyStats = `"stats": { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -50,6 +51,7 @@ var emptyStats = `"stats": { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, diff --git a/pkg/util/marshal/legacy/marshal_test.go b/pkg/util/marshal/legacy/marshal_test.go index de308abb2b276..e146e9a99a5fa 100644 --- a/pkg/util/marshal/legacy/marshal_test.go +++ b/pkg/util/marshal/legacy/marshal_test.go @@ -68,6 +68,7 @@ var queryTests = []struct { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -93,6 +94,7 @@ var queryTests = []struct { "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, diff --git a/pkg/util/marshal/marshal_test.go b/pkg/util/marshal/marshal_test.go index 3a56617fb55b3..7917be41dae76 100644 --- a/pkg/util/marshal/marshal_test.go +++ b/pkg/util/marshal/marshal_test.go @@ -36,6 +36,7 @@ const emptyStats = `{ "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, @@ -61,6 +62,7 @@ const emptyStats = `{ "totalChunksDownloaded": 0, "chunkRefsFetchTime": 0, "queryReferencedStructuredMetadata": false, + "pipelineWrapperFilteredLines": 0, "chunk" :{ "compressedBytes": 0, "decompressedBytes": 0, From b286075428a6cc7f58040bbdec6c81a97b626852 Mon Sep 17 00:00:00 2001 From: Paul Rogers <129207811+paul1r@users.noreply.github.com> Date: Fri, 29 Mar 2024 14:57:15 -0400 Subject: [PATCH 38/54] chore: Update examples to use v13 schema, along with related defaults (#12402) Co-authored-by: J Stickler --- cmd/loki/loki-docker-config.yaml | 2 +- cmd/loki/loki-local-config.yaml | 2 +- cmd/loki/loki-local-experimental-config.yaml | 2 +- cmd/loki/loki-local-with-memcached.yaml | 2 +- .../examples/configuration-examples.md | 20 ++++++++--------- .../yaml/1-Local-Configuration-Example.yaml | 2 +- .../examples/yaml/11-COS-HMAC-Example.yaml | 2 +- .../examples/yaml/12-COS-APIKey-Example.yaml | 2 +- .../yaml/13-COS-Trusted-Profile-Example.yaml | 2 +- .../examples/yaml/2-S3-Cluster-Example.yaml | 2 +- .../examples/yaml/4-GCS-Example.yaml | 2 +- .../examples/yaml/5-BOS-Example.yaml | 2 +- .../yaml/7-Schema-Migration-Snippet.yaml | 4 ++-- .../yaml/8-alibaba-cloud-storage-Snippet.yaml | 2 +- docs/sources/operations/storage/retention.md | 4 ++-- .../operations/storage/schema/_index.md | 4 ++-- docs/sources/operations/storage/tsdb.md | 2 +- .../setup/migrate/migrate-to-tsdb/_index.md | 4 ++-- docs/sources/storage/_index.md | 22 +++++++++---------- examples/getting-started/loki-config.yaml | 2 +- production/docker/config/loki.yaml | 7 ++++++ production/nomad/loki-distributed/config.yml | 4 ++-- production/nomad/loki-simple/config.yml | 4 ++-- production/nomad/loki/config.yml | 4 ++-- 24 files changed, 56 insertions(+), 49 deletions(-) diff --git a/cmd/loki/loki-docker-config.yaml b/cmd/loki/loki-docker-config.yaml index b9f80f910236c..c50c147b06f2f 100644 --- a/cmd/loki/loki-docker-config.yaml +++ b/cmd/loki/loki-docker-config.yaml @@ -20,7 +20,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/loki-local-config.yaml b/cmd/loki/loki-local-config.yaml index cbc04cb4413f3..e448dfd9f1fa3 100644 --- a/cmd/loki/loki-local-config.yaml +++ b/cmd/loki/loki-local-config.yaml @@ -28,7 +28,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/loki-local-experimental-config.yaml b/cmd/loki/loki-local-experimental-config.yaml index f9968bc04b72d..03734fec08b49 100644 --- a/cmd/loki/loki-local-experimental-config.yaml +++ b/cmd/loki/loki-local-experimental-config.yaml @@ -28,7 +28,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/cmd/loki/loki-local-with-memcached.yaml b/cmd/loki/loki-local-with-memcached.yaml index a2f4336cdd484..482fb40e0c2cb 100644 --- a/cmd/loki/loki-local-with-memcached.yaml +++ b/cmd/loki/loki-local-with-memcached.yaml @@ -76,7 +76,7 @@ schema_config: - from: 2020-10-24 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/configuration-examples.md b/docs/sources/configure/examples/configuration-examples.md index 6fb77d78a6c4f..eaaf659049dee 100644 --- a/docs/sources/configure/examples/configuration-examples.md +++ b/docs/sources/configure/examples/configuration-examples.md @@ -30,7 +30,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -71,7 +71,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -127,7 +127,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -154,7 +154,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: bos - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -203,12 +203,12 @@ schema_config: period: 24h prefix: index_ - # Starting from 2023-6-15 Loki should store indexes on TSDB with the v12 schema + # Starting from 2023-6-15 Loki should store indexes on TSDB with the v13 schema # using daily periodic tables and chunks on AWS S3. - from: "2023-06-15" store: tsdb object_store: s3 - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -227,7 +227,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: alibabacloud - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -297,7 +297,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -327,7 +327,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ @@ -364,7 +364,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml b/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml index e4b3ab0c6a28c..4ccba0868d2e2 100644 --- a/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml +++ b/docs/sources/configure/examples/yaml/1-Local-Configuration-Example.yaml @@ -19,7 +19,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml b/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml index 642d0f3316dae..98d5f160926bb 100644 --- a/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml +++ b/docs/sources/configure/examples/yaml/11-COS-HMAC-Example.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml b/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml index d50d1c39863cb..2dfa640dae2d9 100644 --- a/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml +++ b/docs/sources/configure/examples/yaml/12-COS-APIKey-Example.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml b/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml index 90e6b9673353d..f6f14a4577bfb 100644 --- a/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml +++ b/docs/sources/configure/examples/yaml/13-COS-Trusted-Profile-Example.yaml @@ -11,7 +11,7 @@ schema_config: - from: 2020-10-01 store: tsdb object_store: cos - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml b/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml index 4dd9a3ae04f03..cde37ed5b2e8a 100644 --- a/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml +++ b/docs/sources/configure/examples/yaml/2-S3-Cluster-Example.yaml @@ -20,7 +20,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/4-GCS-Example.yaml b/docs/sources/configure/examples/yaml/4-GCS-Example.yaml index 7a227d064a417..330e94006aeef 100644 --- a/docs/sources/configure/examples/yaml/4-GCS-Example.yaml +++ b/docs/sources/configure/examples/yaml/4-GCS-Example.yaml @@ -19,7 +19,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/5-BOS-Example.yaml b/docs/sources/configure/examples/yaml/5-BOS-Example.yaml index 3a024ee849c70..be25b802a0226 100644 --- a/docs/sources/configure/examples/yaml/5-BOS-Example.yaml +++ b/docs/sources/configure/examples/yaml/5-BOS-Example.yaml @@ -6,7 +6,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: bos - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml b/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml index 37c12e034ba26..dd1976b0107d7 100644 --- a/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml +++ b/docs/sources/configure/examples/yaml/7-Schema-Migration-Snippet.yaml @@ -11,12 +11,12 @@ schema_config: period: 24h prefix: index_ - # Starting from 2023-6-15 Loki should store indexes on TSDB with the v12 schema + # Starting from 2023-6-15 Loki should store indexes on TSDB with the v13 schema # using daily periodic tables and chunks on AWS S3. - from: "2023-06-15" store: tsdb object_store: s3 - schema: v12 + schema: v13 index: period: 24h prefix: index_ diff --git a/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml b/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml index 48b7edeb7b0bb..b26f1198d4c81 100644 --- a/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml +++ b/docs/sources/configure/examples/yaml/8-alibaba-cloud-storage-Snippet.yaml @@ -5,7 +5,7 @@ schema_config: - from: 2020-05-15 store: tsdb object_store: alibabacloud - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/docs/sources/operations/storage/retention.md b/docs/sources/operations/storage/retention.md index 96880a43374e6..8a8e86ad52337 100644 --- a/docs/sources/operations/storage/retention.md +++ b/docs/sources/operations/storage/retention.md @@ -72,7 +72,7 @@ schema_config: period: 24h prefix: index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb storage_config: tsdb_shipper: @@ -238,7 +238,7 @@ schema_config: - from: 2018-04-15 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: loki_index_ period: 24h diff --git a/docs/sources/operations/storage/schema/_index.md b/docs/sources/operations/storage/schema/_index.md index e5a14b3ed8a61..a368497414de1 100644 --- a/docs/sources/operations/storage/schema/_index.md +++ b/docs/sources/operations/storage/schema/_index.md @@ -23,7 +23,7 @@ Here are items to consider when changing the schema; if schema changes are not d Be aware of your relation to UTC when using the current date. Make sure that UTC 00:00:00 has not already passed for your current date. - As an example, assume that the current date is 2022-04-10, and you want to update to the v12 schema, so you restart Loki with 2022-04-11 as the `from` date for the new schema. If you forget to take into account that your timezone is UTC -5:00 and it’s currently 20:00 hours in your local timezone, that is actually 2022-04-11T01:00:00 UTC. When Loki starts it will see the new schema and begin to write and store objects following that new schema. If you then try to query data that was written between 00:00:00 and 01:00:00 UTC, Loki will use the new schema and the data will be unreadable, because it was created with the previous schema. + As an example, assume that the current date is 2022-04-10, and you want to update to the v13 schema, so you restart Loki with 2022-04-11 as the `from` date for the new schema. If you forget to take into account that your timezone is UTC -5:00 and it’s currently 20:00 hours in your local timezone, that is actually 2022-04-11T01:00:00 UTC. When Loki starts it will see the new schema and begin to write and store objects following that new schema. If you then try to query data that was written between 00:00:00 and 01:00:00 UTC, Loki will use the new schema and the data will be unreadable, because it was created with the previous schema. - You cannot undo or roll back a schema change. @@ -46,6 +46,6 @@ schema_config: period: 24h prefix: loki_ops_index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb ``` diff --git a/docs/sources/operations/storage/tsdb.md b/docs/sources/operations/storage/tsdb.md index 82e3a6dbf33db..d7c315bdf5e16 100644 --- a/docs/sources/operations/storage/tsdb.md +++ b/docs/sources/operations/storage/tsdb.md @@ -29,7 +29,7 @@ schema_config: period: 24h prefix: index_ object_store: gcs - schema: v12 + schema: v13 store: tsdb storage_config: diff --git a/docs/sources/setup/migrate/migrate-to-tsdb/_index.md b/docs/sources/setup/migrate/migrate-to-tsdb/_index.md index b9a1f478d359d..3407345b4acbf 100644 --- a/docs/sources/setup/migrate/migrate-to-tsdb/_index.md +++ b/docs/sources/setup/migrate/migrate-to-tsdb/_index.md @@ -39,7 +39,7 @@ schema_config: - from: 2023-10-20 ① store: tsdb ② object_store: filesystem ③ - schema: v12 ④ + schema: v13 ④ index: prefix: index_ period: 24h @@ -51,7 +51,7 @@ schema_config: ③ This sample configuration uses filesystem as the storage in both the periods. If you want to use a different storage for the TSDB index and chunks, you can specify a different `object_store` in the new period. -④ Update the schema to v12 which is the recommended version at the time of writing. Please refer to the [configure page]({{< relref "../../../configure#period_config" >}}) for the current recommend version. +④ Update the schema to v13 which is the recommended version at the time of writing. Please refer to the [configure page]({{< relref "../../../configure#period_config" >}}) for the current recommend version. ### Configure TSDB shipper diff --git a/docs/sources/storage/_index.md b/docs/sources/storage/_index.md index e7517e5c00353..2c967d2fc4b92 100644 --- a/docs/sources/storage/_index.md +++ b/docs/sources/storage/_index.md @@ -127,7 +127,7 @@ This storage type for indexes is deprecated and may be removed in future major v ## Schema Config Loki aims to be backwards compatible and over the course of its development has had many internal changes that facilitate better and more efficient storage/querying. Loki allows incrementally upgrading to these new storage _schemas_ and can query across them transparently. This makes upgrading a breeze. -For instance, this is what it looks like when migrating from BoltDB with v11 schema to TSDB with v12 schema starting 2023-07-01: +For instance, this is what it looks like when migrating from BoltDB with v11 schema to TSDB with v13 schema starting 2023-07-01: ```yaml schema_config: @@ -142,13 +142,13 @@ schema_config: - from: 2023-07-01 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h ``` -For all data ingested before 2023-07-01, Loki used BoltDB with the v11 schema, and then switched after that point to the more effective TSDB with the v12 schema. This dramatically simplifies upgrading, ensuring it's simple to take advantage of new storage optimizations. These configs should be immutable for as long as you care about retention. +For all data ingested before 2023-07-01, Loki used BoltDB with the v11 schema, and then switched after that point to the more effective TSDB with the v13 schema. This dramatically simplifies upgrading, ensuring it's simple to take advantage of new storage optimizations. These configs should be immutable for as long as you care about retention. ## Table Manager (deprecated) @@ -190,7 +190,7 @@ When a new schema is released and you want to gain the advantages it provides, y First, you'll want to create a new [period_config]({{< relref "../configure#period_config" >}}) entry in your [schema_config]({{< relref "../configure#schema_config" >}}). The important thing to remember here is to set this at some point in the _future_ and then roll out the config file changes to Loki. This allows the table manager to create the required table in advance of writes and ensures that existing data isn't queried as if it adheres to the new schema. -As an example, let's say it's 2023-07-14 and we want to start using the `v12` schema on the 20th: +As an example, let's say it's 2023-07-14 and we want to start using the `v13` schema on the 20th: ```yaml schema_config: configs: @@ -204,7 +204,7 @@ schema_config: - from: 2023-07-20 store: tsdb object_store: filesystem - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -243,7 +243,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: gcs - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -266,7 +266,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: aws - schema: v12 + schema: v13 index: prefix: index_ period: 24h @@ -352,7 +352,7 @@ schema_config: period: 24h prefix: index_ object_store: azure - schema: v12 + schema: v13 store: tsdb storage_config: azure: @@ -388,7 +388,7 @@ schema_config: period: 24h prefix: index_ object_store: azure - schema: v12 + schema: v13 store: tsdb storage_config: azure: @@ -420,7 +420,7 @@ schema_config: period: 24h prefix: loki_index_ object_store: cos - schema: v12 + schema: v13 store: tsdb storage_config: @@ -489,7 +489,7 @@ schema_config: - from: 2020-07-01 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/examples/getting-started/loki-config.yaml b/examples/getting-started/loki-config.yaml index d4d00904b2f16..73ca66f78796a 100644 --- a/examples/getting-started/loki-config.yaml +++ b/examples/getting-started/loki-config.yaml @@ -9,7 +9,7 @@ schema_config: - from: 2021-08-01 store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/production/docker/config/loki.yaml b/production/docker/config/loki.yaml index 0a124e5ccfaae..512badb27f8d2 100644 --- a/production/docker/config/loki.yaml +++ b/production/docker/config/loki.yaml @@ -97,6 +97,13 @@ schema_config: index: prefix: index_ period: 24h + - from: 2024-03-29 + store: tsdb + object_store: s3 + schema: v13 + index: + prefix: index_ + period: 24h limits_config: diff --git a/production/nomad/loki-distributed/config.yml b/production/nomad/loki-distributed/config.yml index 48fc8e166c688..a6abc8ae76d4e 100644 --- a/production/nomad/loki-distributed/config.yml +++ b/production/nomad/loki-distributed/config.yml @@ -72,9 +72,9 @@ frontend_worker: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/production/nomad/loki-simple/config.yml b/production/nomad/loki-simple/config.yml index 79b1d39d57a92..750c6c483cf97 100644 --- a/production/nomad/loki-simple/config.yml +++ b/production/nomad/loki-simple/config.yml @@ -27,9 +27,9 @@ ingester: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h diff --git a/production/nomad/loki/config.yml b/production/nomad/loki/config.yml index ceeda7d2e49ef..492f9d59a319d 100644 --- a/production/nomad/loki/config.yml +++ b/production/nomad/loki/config.yml @@ -27,9 +27,9 @@ ingester: schema_config: configs: - from: 2022-05-15 - store: boltdb-shipper + store: tsdb object_store: s3 - schema: v12 + schema: v13 index: prefix: index_ period: 24h From 246623f7d09039761d9950414064d6c1bf74207f Mon Sep 17 00:00:00 2001 From: Trevor Whitney Date: Fri, 29 Mar 2024 17:05:36 -0600 Subject: [PATCH 39/54] fix(detected_fields): fix issues with frontend integration (#12406) This PRs fixes issues we found when integrating with the frontend * the `/experimental` api made it difficult to interact with using the existing datasource, so move to `v1/detected_fields` * the config flag was considered cumbersome as the only potential negative impact of the endpoint is when it is used, and nothing is currently using it * the use of an enum in the protobuf produced unexpected results in the json, so type was converted to string --- cmd/loki/loki-local-experimental-config.yaml | 53 --- docs/sources/configure/_index.md | 4 - pkg/logproto/extensions.go | 18 +- pkg/logproto/logproto.pb.go | 383 +++++++++---------- pkg/logproto/logproto.proto | 11 +- pkg/loki/modules.go | 5 +- pkg/lokifrontend/config.go | 3 - pkg/querier/queryrange/codec.go | 4 +- pkg/querier/queryrange/roundtrip.go | 2 +- 9 files changed, 194 insertions(+), 289 deletions(-) delete mode 100644 cmd/loki/loki-local-experimental-config.yaml diff --git a/cmd/loki/loki-local-experimental-config.yaml b/cmd/loki/loki-local-experimental-config.yaml deleted file mode 100644 index 03734fec08b49..0000000000000 --- a/cmd/loki/loki-local-experimental-config.yaml +++ /dev/null @@ -1,53 +0,0 @@ -auth_enabled: false - -server: - http_listen_port: 3100 - grpc_listen_port: 9096 - -common: - instance_addr: 127.0.0.1 - path_prefix: /tmp/loki - storage: - filesystem: - chunks_directory: /tmp/loki/chunks - rules_directory: /tmp/loki/rules - replication_factor: 1 - ring: - kvstore: - store: inmemory - -query_range: - results_cache: - cache: - embedded_cache: - enabled: true - max_size_mb: 100 - -schema_config: - configs: - - from: 2020-10-24 - store: tsdb - object_store: filesystem - schema: v13 - index: - prefix: index_ - period: 24h - -frontend: - experimental_apis_enabled: true - -ruler: - alertmanager_url: http://localhost:9093 - -# By default, Loki will send anonymous, but uniquely-identifiable usage and configuration -# analytics to Grafana Labs. These statistics are sent to https://stats.grafana.org/ -# -# Statistics help us better understand how Loki is used, and they show us performance -# levels for most users. This helps us prioritize features and documentation. -# For more information on what's sent, look at -# https://github.com/grafana/loki/blob/main/pkg/analytics/stats.go -# Refer to the buildReport method to see what goes into a report. -# -# If you would like to disable reporting, uncomment the following lines: -#analytics: -# reporting_enabled: false diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index dc0716a34fa23..82cb0ecadea03 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -817,10 +817,6 @@ The `frontend` block configures the Loki query-frontend. # The TLS configuration. [tail_tls_config: ] - -# Whether to enable experimental APIs in the frontend. -# CLI flag: -frontend.experimental-apis-enabled -[experimental_apis_enabled: | default = false] ``` ### query_range diff --git a/pkg/logproto/extensions.go b/pkg/logproto/extensions.go index e3996fbe69cbd..9c0e5c3d432d5 100644 --- a/pkg/logproto/extensions.go +++ b/pkg/logproto/extensions.go @@ -141,11 +141,17 @@ func (m *Shard) SpaceFor(stats *IndexStatsResponse, targetShardBytes uint64) boo return newDelta <= curDelta } +type DetectedFieldType string + const ( - DetectedFieldString DetectedFieldType = 0 - DetectedFieldInt DetectedFieldType = 1 - DetectedFieldFloat DetectedFieldType = 2 - DetectedFieldBoolean DetectedFieldType = 3 - DetectedFieldDuration DetectedFieldType = 4 - DetectedFieldBytes DetectedFieldType = 5 + DetectedFieldString DetectedFieldType = "string" + DetectedFieldInt DetectedFieldType = "int" + DetectedFieldFloat DetectedFieldType = "float" + DetectedFieldBoolean DetectedFieldType = "boolean" + DetectedFieldDuration DetectedFieldType = "duration" + DetectedFieldBytes DetectedFieldType = "bytes" ) + +func (d DetectedFieldType) String() string { + return string(d) +} diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 1747d4dda7d5e..2b794a5f899c2 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -63,39 +63,6 @@ func (Direction) EnumDescriptor() ([]byte, []int) { return fileDescriptor_c28a5f14f1f4c79a, []int{0} } -type DetectedFieldType int32 - -const ( - STRING DetectedFieldType = 0 - INT DetectedFieldType = 1 - FLOAT DetectedFieldType = 2 - BOOL DetectedFieldType = 3 - DURATION DetectedFieldType = 4 - BYTES DetectedFieldType = 5 -) - -var DetectedFieldType_name = map[int32]string{ - 0: "STRING", - 1: "INT", - 2: "FLOAT", - 3: "BOOL", - 4: "DURATION", - 5: "BYTES", -} - -var DetectedFieldType_value = map[string]int32{ - "STRING": 0, - "INT": 1, - "FLOAT": 2, - "BOOL": 3, - "DURATION": 4, - "BYTES": 5, -} - -func (DetectedFieldType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_c28a5f14f1f4c79a, []int{1} -} - type StreamRatesRequest struct { } @@ -2649,7 +2616,7 @@ func (m *DetectedFieldsResponse) GetFields() []*DetectedField { type DetectedField struct { Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` - Type DetectedFieldType `protobuf:"varint,2,opt,name=type,proto3,enum=logproto.DetectedFieldType" json:"type,omitempty"` + Type DetectedFieldType `protobuf:"bytes,2,opt,name=type,proto3,casttype=DetectedFieldType" json:"type,omitempty"` Cardinality uint64 `protobuf:"varint,3,opt,name=cardinality,proto3" json:"cardinality,omitempty"` } @@ -2696,7 +2663,7 @@ func (m *DetectedField) GetType() DetectedFieldType { if m != nil { return m.Type } - return STRING + return "" } func (m *DetectedField) GetCardinality() uint64 { @@ -2708,7 +2675,6 @@ func (m *DetectedField) GetCardinality() uint64 { func init() { proto.RegisterEnum("logproto.Direction", Direction_name, Direction_value) - proto.RegisterEnum("logproto.DetectedFieldType", DetectedFieldType_name, DetectedFieldType_value) proto.RegisterType((*StreamRatesRequest)(nil), "logproto.StreamRatesRequest") proto.RegisterType((*StreamRatesResponse)(nil), "logproto.StreamRatesResponse") proto.RegisterType((*StreamRate)(nil), "logproto.StreamRate") @@ -2763,161 +2729,157 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2455 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4d, 0x6f, 0x1b, 0xc7, - 0x95, 0xcb, 0x5d, 0x7e, 0x3d, 0x52, 0x32, 0x3d, 0x62, 0x6c, 0x82, 0x76, 0x48, 0x79, 0x90, 0x3a, - 0x82, 0xe3, 0x88, 0xb1, 0xdc, 0xb8, 0xa9, 0xdd, 0xa0, 0x15, 0x25, 0x4b, 0x91, 0x2d, 0x4b, 0xce, - 0x48, 0x71, 0x53, 0xa3, 0xad, 0xb1, 0x22, 0x47, 0xd4, 0x42, 0xe4, 0x2e, 0xbd, 0x3b, 0x8c, 0x4d, - 0xa0, 0x87, 0xfe, 0x81, 0xa0, 0xb9, 0x15, 0xbd, 0x14, 0x2d, 0x50, 0x20, 0x05, 0x8a, 0x5e, 0xfa, - 0x03, 0xda, 0x4b, 0x0f, 0xee, 0xcd, 0xb9, 0x05, 0x39, 0xb0, 0xb5, 0x7c, 0x29, 0x74, 0xca, 0xad, - 0xd7, 0x62, 0x3e, 0xf6, 0x8b, 0xa2, 0xdc, 0x50, 0x75, 0x51, 0xf8, 0xc2, 0x9d, 0x79, 0xf3, 0xe6, - 0xcd, 0xfb, 0x9a, 0xf7, 0x31, 0x84, 0x73, 0xbd, 0xfd, 0x76, 0xbd, 0xe3, 0xb4, 0x7b, 0xae, 0xc3, - 0x9c, 0x60, 0x30, 0x2f, 0x7e, 0x51, 0xd6, 0x9f, 0x57, 0x4a, 0x6d, 0xa7, 0xed, 0x48, 0x1c, 0x3e, - 0x92, 0xeb, 0x95, 0x5a, 0xdb, 0x71, 0xda, 0x1d, 0x5a, 0x17, 0xb3, 0x9d, 0xfe, 0x6e, 0x9d, 0x59, - 0x5d, 0xea, 0x31, 0xb3, 0xdb, 0x53, 0x08, 0xb3, 0x8a, 0xfa, 0xc3, 0x4e, 0xd7, 0x69, 0xd1, 0x4e, - 0xdd, 0x63, 0x26, 0xf3, 0xe4, 0xaf, 0xc2, 0x98, 0xe1, 0x18, 0xbd, 0xbe, 0xb7, 0x27, 0x7e, 0x24, - 0x10, 0x97, 0x00, 0x6d, 0x31, 0x97, 0x9a, 0x5d, 0x62, 0x32, 0xea, 0x11, 0xfa, 0xb0, 0x4f, 0x3d, - 0x86, 0xef, 0xc0, 0x4c, 0x0c, 0xea, 0xf5, 0x1c, 0xdb, 0xa3, 0xe8, 0x1a, 0xe4, 0xbd, 0x10, 0x5c, - 0xd6, 0x66, 0xf5, 0xb9, 0xfc, 0x42, 0x69, 0x3e, 0x10, 0x25, 0xdc, 0x43, 0xa2, 0x88, 0xf8, 0xd7, - 0x1a, 0x40, 0xb8, 0x86, 0xaa, 0x00, 0x72, 0xf5, 0x03, 0xd3, 0xdb, 0x2b, 0x6b, 0xb3, 0xda, 0x9c, - 0x41, 0x22, 0x10, 0x74, 0x19, 0x4e, 0x87, 0xb3, 0x0d, 0x67, 0x6b, 0xcf, 0x74, 0x5b, 0xe5, 0xa4, - 0x40, 0x3b, 0xba, 0x80, 0x10, 0x18, 0xae, 0xc9, 0x68, 0x59, 0x9f, 0xd5, 0xe6, 0x74, 0x22, 0xc6, - 0xe8, 0x0c, 0xa4, 0x19, 0xb5, 0x4d, 0x9b, 0x95, 0x8d, 0x59, 0x6d, 0x2e, 0x47, 0xd4, 0x8c, 0xc3, - 0xb9, 0xec, 0xd4, 0x2b, 0xa7, 0x66, 0xb5, 0xb9, 0x29, 0xa2, 0x66, 0xf8, 0x73, 0x1d, 0x0a, 0x1f, - 0xf6, 0xa9, 0x3b, 0x50, 0x0a, 0x40, 0x55, 0xc8, 0x7a, 0xb4, 0x43, 0x9b, 0xcc, 0x71, 0x05, 0x83, - 0xb9, 0x46, 0xb2, 0xac, 0x91, 0x00, 0x86, 0x4a, 0x90, 0xea, 0x58, 0x5d, 0x8b, 0x09, 0xb6, 0xa6, - 0x88, 0x9c, 0xa0, 0xeb, 0x90, 0xf2, 0x98, 0xe9, 0x32, 0xc1, 0x4b, 0x7e, 0xa1, 0x32, 0x2f, 0x8d, - 0x36, 0xef, 0x1b, 0x6d, 0x7e, 0xdb, 0x37, 0x5a, 0x23, 0xfb, 0x64, 0x58, 0x4b, 0x7c, 0xf6, 0xf7, - 0x9a, 0x46, 0xe4, 0x16, 0x74, 0x0d, 0x74, 0x6a, 0xb7, 0x04, 0xbf, 0xdf, 0x74, 0x27, 0xdf, 0x80, - 0xae, 0x40, 0xae, 0x65, 0xb9, 0xb4, 0xc9, 0x2c, 0xc7, 0x16, 0x52, 0x4d, 0x2f, 0xcc, 0x84, 0x16, - 0x59, 0xf6, 0x97, 0x48, 0x88, 0x85, 0x2e, 0x43, 0xda, 0xe3, 0xaa, 0xf3, 0xca, 0x99, 0x59, 0x7d, - 0x2e, 0xd7, 0x28, 0x1d, 0x0e, 0x6b, 0x45, 0x09, 0xb9, 0xec, 0x74, 0x2d, 0x46, 0xbb, 0x3d, 0x36, - 0x20, 0x0a, 0x07, 0x5d, 0x82, 0x4c, 0x8b, 0x76, 0x28, 0x37, 0x78, 0x56, 0x18, 0xbc, 0x18, 0x21, - 0x2f, 0x16, 0x88, 0x8f, 0x80, 0xee, 0x83, 0xd1, 0xeb, 0x98, 0x76, 0x39, 0x27, 0xa4, 0x98, 0x0e, - 0x11, 0xef, 0x76, 0x4c, 0xbb, 0x71, 0xed, 0xab, 0x61, 0x6d, 0xa1, 0x6d, 0xb1, 0xbd, 0xfe, 0xce, - 0x7c, 0xd3, 0xe9, 0xd6, 0xdb, 0xae, 0xb9, 0x6b, 0xda, 0x66, 0xbd, 0xe3, 0xec, 0x5b, 0x75, 0xee, - 0x9c, 0x0f, 0xfb, 0xd4, 0xb5, 0xa8, 0x5b, 0xe7, 0x34, 0xe6, 0x85, 0x3d, 0xf8, 0x3e, 0x22, 0x68, - 0xde, 0x32, 0xb2, 0xe9, 0x62, 0x06, 0x0f, 0x93, 0x80, 0xb6, 0xcc, 0x6e, 0xaf, 0x43, 0x27, 0xb2, - 0x57, 0x60, 0x99, 0xe4, 0x89, 0x2d, 0xa3, 0x4f, 0x6a, 0x99, 0x50, 0xcd, 0xc6, 0x64, 0x6a, 0x4e, - 0x7d, 0x53, 0x35, 0xa7, 0x5f, 0xbe, 0x9a, 0x71, 0x19, 0x0c, 0x3e, 0x43, 0x45, 0xd0, 0x5d, 0xf3, - 0x91, 0x50, 0x66, 0x81, 0xf0, 0x21, 0x5e, 0x87, 0xb4, 0x64, 0x04, 0x55, 0x46, 0xb5, 0x1d, 0xbf, - 0x19, 0xa1, 0xa6, 0x75, 0x5f, 0x87, 0xc5, 0x50, 0x87, 0xba, 0xd0, 0x0e, 0xfe, 0x8d, 0x06, 0x53, - 0xca, 0x84, 0x2a, 0xba, 0xec, 0x40, 0x46, 0xde, 0x6e, 0x3f, 0xb2, 0x9c, 0x1d, 0x8d, 0x2c, 0x8b, - 0x2d, 0xb3, 0xc7, 0xa8, 0xdb, 0xa8, 0x3f, 0x19, 0xd6, 0xb4, 0xaf, 0x86, 0xb5, 0x37, 0x5f, 0x24, - 0xa5, 0x08, 0x72, 0x2a, 0xea, 0xf8, 0x84, 0xd1, 0x5b, 0x82, 0x3b, 0xe6, 0x29, 0x3f, 0x38, 0x35, - 0x2f, 0x03, 0xe4, 0x9a, 0xdd, 0xa6, 0x1e, 0xa7, 0x6c, 0x70, 0x13, 0x12, 0x89, 0x83, 0x7f, 0x06, - 0x33, 0x31, 0x57, 0x53, 0x7c, 0xbe, 0x07, 0x69, 0x8f, 0x2b, 0xd0, 0x67, 0x33, 0x62, 0xa8, 0x2d, - 0x01, 0x6f, 0x4c, 0x2b, 0xfe, 0xd2, 0x72, 0x4e, 0x14, 0xfe, 0x64, 0xa7, 0xff, 0x55, 0x83, 0xc2, - 0xba, 0xb9, 0x43, 0x3b, 0xbe, 0x8f, 0x23, 0x30, 0x6c, 0xb3, 0x4b, 0x95, 0xc6, 0xc5, 0x98, 0x07, - 0xb4, 0x4f, 0xcc, 0x4e, 0x9f, 0x4a, 0x92, 0x59, 0xa2, 0x66, 0x93, 0x46, 0x22, 0xed, 0xc4, 0x91, - 0x48, 0x0b, 0xfd, 0xbd, 0x04, 0x29, 0xee, 0x59, 0x03, 0x11, 0x85, 0x72, 0x44, 0x4e, 0xf0, 0x9b, - 0x30, 0xa5, 0xa4, 0x50, 0xea, 0x0b, 0x59, 0xe6, 0xea, 0xcb, 0xf9, 0x2c, 0xe3, 0x2e, 0xa4, 0xa5, - 0xb6, 0xd1, 0x1b, 0x90, 0x0b, 0xb2, 0x9b, 0x90, 0x56, 0x6f, 0xa4, 0x0f, 0x87, 0xb5, 0x24, 0xf3, - 0x48, 0xb8, 0x80, 0x6a, 0x90, 0x12, 0x3b, 0x85, 0xe4, 0x5a, 0x23, 0x77, 0x38, 0xac, 0x49, 0x00, - 0x91, 0x1f, 0x74, 0x1e, 0x8c, 0x3d, 0x9e, 0x60, 0xb8, 0x0a, 0x8c, 0x46, 0xf6, 0x70, 0x58, 0x13, - 0x73, 0x22, 0x7e, 0xf1, 0x2a, 0x14, 0xd6, 0x69, 0xdb, 0x6c, 0x0e, 0xd4, 0xa1, 0x25, 0x9f, 0x1c, - 0x3f, 0x50, 0xf3, 0x69, 0x5c, 0x80, 0x42, 0x70, 0xe2, 0x83, 0xae, 0xa7, 0x9c, 0x3a, 0x1f, 0xc0, - 0xee, 0x78, 0xf8, 0x57, 0x1a, 0x28, 0x3b, 0x23, 0x0c, 0xe9, 0x0e, 0x97, 0xd5, 0x53, 0x31, 0x08, - 0x0e, 0x87, 0x35, 0x05, 0x21, 0xea, 0x8b, 0x6e, 0x40, 0xc6, 0x13, 0x27, 0x72, 0x62, 0xa3, 0xee, - 0x23, 0x16, 0x1a, 0xa7, 0xb8, 0x1b, 0x1c, 0x0e, 0x6b, 0x3e, 0x22, 0xf1, 0x07, 0x68, 0x3e, 0x96, - 0x39, 0xa5, 0x60, 0xd3, 0x87, 0xc3, 0x5a, 0x04, 0x1a, 0xcd, 0xa4, 0xf8, 0x5f, 0x1a, 0xe4, 0xb7, - 0x4d, 0x2b, 0x70, 0xa1, 0xb2, 0x6f, 0xa2, 0x30, 0x46, 0x4a, 0x00, 0xbf, 0xd2, 0x2d, 0xda, 0x31, - 0x07, 0x2b, 0x8e, 0x2b, 0xe8, 0x4e, 0x91, 0x60, 0x1e, 0x26, 0x3b, 0x63, 0x6c, 0xb2, 0x4b, 0x4d, - 0x1e, 0x52, 0xff, 0x87, 0x01, 0xec, 0x96, 0x91, 0x4d, 0x16, 0x75, 0xfc, 0x47, 0x0d, 0x0a, 0x52, - 0x72, 0xe5, 0x76, 0x3f, 0x86, 0xb4, 0x54, 0x8c, 0x90, 0xfd, 0x05, 0xc1, 0xe5, 0xad, 0x49, 0x02, - 0x8b, 0xa2, 0x89, 0xbe, 0x0f, 0xd3, 0x2d, 0xd7, 0xe9, 0xf5, 0x68, 0x6b, 0x4b, 0x85, 0xb0, 0xe4, - 0x68, 0x08, 0x5b, 0x8e, 0xae, 0x93, 0x11, 0x74, 0xfc, 0x37, 0x0d, 0xa6, 0x54, 0xb4, 0x50, 0xb6, - 0x0a, 0xf4, 0xab, 0x9d, 0x38, 0x65, 0x25, 0x27, 0x4d, 0x59, 0x67, 0x20, 0xdd, 0x76, 0x9d, 0x7e, - 0xcf, 0x2b, 0xeb, 0xf2, 0x6e, 0xca, 0xd9, 0x64, 0xa9, 0x0c, 0xdf, 0x82, 0x69, 0x5f, 0x94, 0x63, - 0x42, 0x66, 0x65, 0x34, 0x64, 0xae, 0xb5, 0xa8, 0xcd, 0xac, 0x5d, 0x2b, 0x08, 0x82, 0x0a, 0x1f, - 0xff, 0x42, 0x83, 0xe2, 0x28, 0x0a, 0x5a, 0x8e, 0xdc, 0x33, 0x4e, 0xee, 0xe2, 0xf1, 0xe4, 0xe6, - 0x45, 0xf0, 0xf1, 0x6e, 0xda, 0xcc, 0x1d, 0xf8, 0xa4, 0xe5, 0xde, 0xca, 0xbb, 0x90, 0x8f, 0x2c, - 0xf2, 0x14, 0xb5, 0x4f, 0xd5, 0xcd, 0x20, 0x7c, 0x18, 0x86, 0x84, 0xa4, 0x0c, 0x68, 0x62, 0x82, - 0x7f, 0xa9, 0xc1, 0x54, 0xcc, 0x96, 0xe8, 0x3d, 0x30, 0x76, 0x5d, 0xa7, 0x3b, 0x91, 0xa1, 0xc4, - 0x0e, 0xf4, 0x6d, 0x48, 0x32, 0x67, 0x22, 0x33, 0x25, 0x99, 0xc3, 0xad, 0xa4, 0xc4, 0xd7, 0x65, - 0x75, 0x2b, 0x67, 0xf8, 0x5d, 0xc8, 0x09, 0x81, 0xee, 0x9a, 0x96, 0x3b, 0x36, 0x5b, 0x8c, 0x17, - 0xe8, 0x06, 0x9c, 0x92, 0x91, 0x70, 0xfc, 0xe6, 0xc2, 0xb8, 0xcd, 0x05, 0x7f, 0xf3, 0x39, 0x48, - 0x2d, 0xed, 0xf5, 0xed, 0x7d, 0xbe, 0xa5, 0x65, 0x32, 0xd3, 0xdf, 0xc2, 0xc7, 0xf8, 0x35, 0x98, - 0xe1, 0x77, 0x90, 0xba, 0xde, 0x92, 0xd3, 0xb7, 0x99, 0xdf, 0x5d, 0x5c, 0x86, 0x52, 0x1c, 0xac, - 0xbc, 0xa4, 0x04, 0xa9, 0x26, 0x07, 0x08, 0x1a, 0x53, 0x44, 0x4e, 0xf0, 0xef, 0x34, 0x40, 0xab, - 0x94, 0x89, 0x53, 0xd6, 0x96, 0x83, 0xeb, 0x51, 0x81, 0x6c, 0xd7, 0x64, 0xcd, 0x3d, 0xea, 0x7a, - 0x7e, 0x0d, 0xe2, 0xcf, 0xff, 0x1f, 0xd5, 0x1e, 0xbe, 0x02, 0x33, 0x31, 0x2e, 0x95, 0x4c, 0x15, - 0xc8, 0x36, 0x15, 0x4c, 0xe5, 0xbb, 0x60, 0x8e, 0xff, 0x94, 0x84, 0xac, 0xd8, 0x40, 0xe8, 0x2e, - 0xba, 0x02, 0xf9, 0x5d, 0xcb, 0x6e, 0x53, 0xb7, 0xe7, 0x5a, 0x4a, 0x05, 0x46, 0xe3, 0xd4, 0xe1, - 0xb0, 0x16, 0x05, 0x93, 0xe8, 0x04, 0xbd, 0x0d, 0x99, 0xbe, 0x47, 0xdd, 0x07, 0x96, 0xbc, 0xe9, - 0xb9, 0x46, 0xe9, 0x60, 0x58, 0x4b, 0x7f, 0xe4, 0x51, 0x77, 0x6d, 0x99, 0x67, 0x9e, 0xbe, 0x18, - 0x11, 0xf9, 0x6d, 0xa1, 0xdb, 0xca, 0x4d, 0x45, 0x11, 0xd6, 0xf8, 0x0e, 0x67, 0x7f, 0x24, 0xd4, - 0xf5, 0x5c, 0xa7, 0x4b, 0xd9, 0x1e, 0xed, 0x7b, 0xf5, 0xa6, 0xd3, 0xed, 0x3a, 0x76, 0x5d, 0xf4, - 0x92, 0x42, 0x68, 0x9e, 0x3e, 0xf9, 0x76, 0xe5, 0xb9, 0xdb, 0x90, 0x61, 0x7b, 0xae, 0xd3, 0x6f, - 0xef, 0x89, 0xac, 0xa0, 0x37, 0xae, 0x4f, 0x4e, 0xcf, 0xa7, 0x40, 0xfc, 0x01, 0xba, 0xc0, 0xb5, - 0x45, 0x9b, 0xfb, 0x5e, 0xbf, 0x2b, 0x3b, 0xb4, 0x46, 0xea, 0x70, 0x58, 0xd3, 0xde, 0x26, 0x01, - 0x18, 0x7f, 0x9a, 0x84, 0x9a, 0x70, 0xd4, 0x7b, 0xa2, 0x6c, 0x58, 0x71, 0xdc, 0x3b, 0x94, 0xb9, - 0x56, 0x73, 0xc3, 0xec, 0x52, 0xdf, 0x37, 0x6a, 0x90, 0xef, 0x0a, 0xe0, 0x83, 0xc8, 0x15, 0x80, - 0x6e, 0x80, 0x87, 0x5e, 0x07, 0x10, 0x77, 0x46, 0xae, 0xcb, 0xdb, 0x90, 0x13, 0x10, 0xb1, 0xbc, - 0x14, 0xd3, 0x54, 0x7d, 0x42, 0xc9, 0x94, 0x86, 0xd6, 0x46, 0x35, 0x34, 0x31, 0x9d, 0x40, 0x2d, - 0x51, 0x5f, 0x4f, 0xc5, 0x7d, 0x1d, 0x7f, 0xa1, 0x41, 0x75, 0xdd, 0xe7, 0xfc, 0x84, 0xea, 0xf0, - 0xe5, 0x4d, 0xbe, 0x24, 0x79, 0xf5, 0xff, 0x4e, 0x5e, 0x5c, 0x05, 0x58, 0xb7, 0x6c, 0xba, 0x62, - 0x75, 0x18, 0x75, 0xc7, 0x74, 0x22, 0x9f, 0xea, 0x61, 0x48, 0x20, 0x74, 0xd7, 0x97, 0x73, 0x29, - 0x12, 0x87, 0x5f, 0x86, 0x18, 0xc9, 0x97, 0x68, 0x36, 0x7d, 0x24, 0x44, 0xed, 0x43, 0x66, 0x57, - 0x88, 0x27, 0x53, 0x6a, 0xec, 0x19, 0x25, 0x94, 0xbd, 0x71, 0x43, 0x1d, 0x7e, 0xf5, 0x45, 0x05, - 0x89, 0x78, 0xf5, 0xa9, 0x7b, 0x03, 0x9b, 0x99, 0x8f, 0x23, 0x9b, 0x89, 0x7f, 0x02, 0xfa, 0xa9, - 0x2a, 0xb7, 0x52, 0x63, 0xcb, 0x2d, 0xff, 0xe6, 0x9e, 0xbc, 0x67, 0x7c, 0x3f, 0x8c, 0x7d, 0xc2, - 0x1c, 0x2a, 0xf6, 0x5d, 0x04, 0xc3, 0xa5, 0xbb, 0x7e, 0x92, 0x46, 0xe1, 0xb1, 0x01, 0xa6, 0x58, - 0xc7, 0x7f, 0xd6, 0xa0, 0xb8, 0x4a, 0x59, 0xbc, 0xfc, 0x79, 0x85, 0x8c, 0x89, 0x3f, 0x80, 0xd3, - 0x11, 0xfe, 0x95, 0xf4, 0x57, 0x47, 0x6a, 0x9e, 0xd7, 0x42, 0xf9, 0xd7, 0xec, 0x16, 0x7d, 0xac, - 0x7a, 0xc5, 0x78, 0xb9, 0x73, 0x17, 0xf2, 0x91, 0x45, 0xb4, 0x38, 0x52, 0xe8, 0x44, 0x5e, 0x76, - 0x82, 0x64, 0xdd, 0x28, 0x29, 0x99, 0x64, 0xb7, 0xa8, 0xca, 0xd8, 0xa0, 0x28, 0xd8, 0x02, 0x24, - 0xcc, 0x25, 0xc8, 0x46, 0xd3, 0x92, 0x80, 0xde, 0x0e, 0x2a, 0x9e, 0x60, 0x8e, 0x2e, 0x80, 0xe1, - 0x3a, 0x8f, 0xfc, 0x0a, 0x76, 0x2a, 0x3c, 0x92, 0x38, 0x8f, 0x88, 0x58, 0xc2, 0x37, 0x40, 0x27, - 0xce, 0x23, 0x54, 0x05, 0x70, 0x4d, 0xbb, 0x4d, 0xef, 0x05, 0x8d, 0x53, 0x81, 0x44, 0x20, 0xc7, - 0x94, 0x0c, 0x4b, 0x70, 0x3a, 0xca, 0x91, 0x34, 0xf7, 0x3c, 0x64, 0x3e, 0xec, 0x47, 0xd5, 0x55, - 0x1a, 0x51, 0x97, 0xec, 0xc1, 0x7d, 0x24, 0xee, 0x33, 0x10, 0xc2, 0xd1, 0x79, 0xc8, 0x31, 0x73, - 0xa7, 0x43, 0x37, 0xc2, 0x00, 0x17, 0x02, 0xf8, 0x2a, 0xef, 0xf9, 0xee, 0x45, 0x6a, 0x9f, 0x10, - 0x80, 0x2e, 0x41, 0x31, 0xe4, 0xf9, 0xae, 0x4b, 0x77, 0xad, 0xc7, 0xc2, 0xc2, 0x05, 0x72, 0x04, - 0x8e, 0xe6, 0xe0, 0x54, 0x08, 0xdb, 0x12, 0x35, 0x86, 0x21, 0x50, 0x47, 0xc1, 0x5c, 0x37, 0x42, - 0xdc, 0x9b, 0x0f, 0xfb, 0x66, 0x47, 0xdc, 0xbc, 0x02, 0x89, 0x40, 0xf0, 0x5f, 0x34, 0x38, 0x2d, - 0x4d, 0xcd, 0xbb, 0xfd, 0x57, 0xd1, 0xeb, 0x3f, 0xd7, 0x00, 0x45, 0x25, 0x50, 0xae, 0xf5, 0xad, - 0xe8, 0x33, 0x0e, 0x2f, 0x62, 0xf2, 0xa2, 0x95, 0x95, 0xa0, 0xf0, 0x25, 0x06, 0x43, 0x5a, 0x14, - 0x42, 0xb2, 0xa7, 0x36, 0x64, 0xaf, 0x2c, 0x21, 0x44, 0x7d, 0x79, 0x8b, 0xbf, 0x33, 0x60, 0xd4, - 0x53, 0x9d, 0xae, 0x68, 0xf1, 0x05, 0x80, 0xc8, 0x0f, 0x3f, 0x8b, 0xda, 0x4c, 0x78, 0x8d, 0x11, - 0x9e, 0xa5, 0x40, 0xc4, 0x1f, 0xe0, 0x3f, 0x24, 0x61, 0xea, 0x9e, 0xd3, 0xe9, 0x87, 0x29, 0xf1, - 0x55, 0x4a, 0x15, 0xb1, 0xf6, 0x3b, 0xe5, 0xb7, 0xdf, 0x08, 0x0c, 0x8f, 0xd1, 0x9e, 0xf0, 0x2c, - 0x9d, 0x88, 0x31, 0xc2, 0x50, 0x60, 0xa6, 0xdb, 0xa6, 0x4c, 0xf6, 0x35, 0xe5, 0xb4, 0x28, 0x38, - 0x63, 0x30, 0x34, 0x0b, 0x79, 0xb3, 0xdd, 0x76, 0x69, 0xdb, 0x64, 0xb4, 0x31, 0x28, 0x67, 0xc4, - 0x61, 0x51, 0x10, 0xfe, 0x18, 0xa6, 0x7d, 0x65, 0x29, 0x93, 0xbe, 0x03, 0x99, 0x4f, 0x04, 0x64, - 0xcc, 0x93, 0x97, 0x44, 0x55, 0x61, 0xcc, 0x47, 0x8b, 0xbf, 0x8f, 0xfb, 0x3c, 0xe3, 0x5b, 0x90, - 0x96, 0xe8, 0xe8, 0x7c, 0xb4, 0x3b, 0x91, 0x6f, 0x33, 0x7c, 0xae, 0x5a, 0x0d, 0x0c, 0x69, 0x49, - 0x48, 0x19, 0x5e, 0xf8, 0x86, 0x84, 0x10, 0xf5, 0xc5, 0xbf, 0xd5, 0xe0, 0xb5, 0x65, 0xca, 0x68, - 0x93, 0xd1, 0xd6, 0x8a, 0x45, 0x3b, 0xad, 0x93, 0x36, 0xce, 0xda, 0x89, 0x1b, 0xe7, 0x71, 0x6f, - 0x5f, 0x7a, 0xf4, 0xed, 0x6b, 0x0d, 0xce, 0x8c, 0xb2, 0xa8, 0x34, 0x5a, 0x87, 0xf4, 0xae, 0x80, - 0x1c, 0x7d, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x3f, 0x86, 0xa9, 0xd8, 0x82, 0xd0, 0x30, 0xb7, - 0xa8, 0x8a, 0x76, 0x72, 0x82, 0xea, 0x60, 0xb0, 0x41, 0x4f, 0x06, 0xb9, 0xe9, 0x85, 0x73, 0xc7, - 0x50, 0xdd, 0x1e, 0xf4, 0x28, 0x11, 0x88, 0xdc, 0x1d, 0x9a, 0xa6, 0xdb, 0xb2, 0x6c, 0xb3, 0x63, - 0x31, 0xc9, 0xbe, 0x41, 0xa2, 0xa0, 0x4b, 0x17, 0x21, 0x17, 0xfc, 0x8b, 0x80, 0xf2, 0x90, 0x59, - 0xd9, 0x24, 0x3f, 0x5c, 0x24, 0xcb, 0xc5, 0x04, 0x2a, 0x40, 0xb6, 0xb1, 0xb8, 0x74, 0x5b, 0xcc, - 0xb4, 0x4b, 0xf7, 0xe0, 0xf4, 0x91, 0x43, 0x10, 0x40, 0x7a, 0x6b, 0x9b, 0xac, 0x6d, 0xac, 0x16, - 0x13, 0x28, 0x03, 0xfa, 0xda, 0xc6, 0x76, 0x51, 0x43, 0x39, 0x48, 0xad, 0xac, 0x6f, 0x2e, 0x6e, - 0x17, 0x93, 0x28, 0x0b, 0x46, 0x63, 0x73, 0x73, 0xbd, 0xa8, 0x73, 0x62, 0xcb, 0x1f, 0x91, 0xc5, - 0xed, 0xb5, 0xcd, 0x8d, 0xa2, 0xc1, 0x51, 0x1a, 0x3f, 0xda, 0xbe, 0xb9, 0x55, 0x4c, 0x2d, 0x7c, - 0x91, 0xf2, 0x53, 0x83, 0x8b, 0xbe, 0x07, 0x29, 0x19, 0xef, 0xcf, 0x84, 0x92, 0x45, 0xff, 0x07, - 0xa8, 0x9c, 0x3d, 0x02, 0x97, 0x0a, 0xc7, 0x89, 0x77, 0x34, 0xb4, 0x01, 0x79, 0x01, 0x54, 0x2f, - 0x7e, 0xe7, 0x47, 0x1f, 0xde, 0x62, 0x94, 0x5e, 0x3f, 0x66, 0x35, 0x42, 0xef, 0x3a, 0xa4, 0xc4, - 0xa5, 0x8a, 0x72, 0x13, 0x7d, 0xb1, 0x8d, 0x72, 0x13, 0x7b, 0x03, 0xc5, 0x09, 0xf4, 0x5d, 0x30, - 0x78, 0x0f, 0x8c, 0x22, 0x55, 0x41, 0xe4, 0xa1, 0xae, 0x72, 0x66, 0x14, 0x1c, 0x39, 0xf6, 0xfd, - 0xe0, 0xbd, 0xf1, 0xec, 0xe8, 0xbb, 0x87, 0xbf, 0xbd, 0x7c, 0x74, 0x21, 0x38, 0x79, 0x53, 0x3e, - 0x8c, 0xf9, 0xdd, 0x37, 0x7a, 0x3d, 0x7e, 0xd4, 0x48, 0xb3, 0x5e, 0xa9, 0x1e, 0xb7, 0x1c, 0x10, - 0x5c, 0x87, 0x7c, 0xa4, 0xf3, 0x8d, 0xaa, 0xf5, 0x68, 0xdb, 0x1e, 0x55, 0xeb, 0x98, 0x76, 0x19, - 0x27, 0xd0, 0x2a, 0x64, 0x79, 0x2d, 0xc5, 0x53, 0x0a, 0x3a, 0x37, 0x5a, 0x32, 0x45, 0x52, 0x65, - 0xe5, 0xfc, 0xf8, 0xc5, 0x80, 0xd0, 0x0f, 0x20, 0xb7, 0x4a, 0x99, 0x8a, 0x37, 0x67, 0x47, 0x03, - 0xd6, 0x18, 0x4d, 0xc5, 0x83, 0x1e, 0x4e, 0xa0, 0x8f, 0x45, 0x59, 0x17, 0xbf, 0xc1, 0xa8, 0x76, - 0xcc, 0x9d, 0x0a, 0xf8, 0x9a, 0x3d, 0x1e, 0xc1, 0xa7, 0xbc, 0xf0, 0x13, 0xff, 0xff, 0xd0, 0x65, - 0x93, 0x99, 0x68, 0x13, 0xa6, 0x85, 0xc8, 0xc1, 0x1f, 0xa6, 0x31, 0xd7, 0x3c, 0xf2, 0xef, 0x6c, - 0xcc, 0x35, 0x8f, 0xfe, 0x4b, 0x8b, 0x13, 0x8d, 0xfb, 0x4f, 0x9f, 0x55, 0x13, 0x5f, 0x3e, 0xab, - 0x26, 0xbe, 0x7e, 0x56, 0xd5, 0x7e, 0x7e, 0x50, 0xd5, 0x7e, 0x7f, 0x50, 0xd5, 0x9e, 0x1c, 0x54, - 0xb5, 0xa7, 0x07, 0x55, 0xed, 0x1f, 0x07, 0x55, 0xed, 0x9f, 0x07, 0xd5, 0xc4, 0xd7, 0x07, 0x55, - 0xed, 0xb3, 0xe7, 0xd5, 0xc4, 0xd3, 0xe7, 0xd5, 0xc4, 0x97, 0xcf, 0xab, 0x89, 0xfb, 0x6f, 0xfc, - 0x87, 0x1e, 0x43, 0x46, 0xc1, 0xb4, 0xf8, 0x5c, 0xfd, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x10, - 0x3a, 0x9f, 0xc0, 0xce, 0x1e, 0x00, 0x00, + // 2395 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4b, 0x6f, 0x1b, 0xc7, + 0x99, 0x4b, 0x2e, 0x5f, 0x1f, 0x29, 0x59, 0x1e, 0xd1, 0x36, 0xc1, 0xd8, 0xa4, 0x3c, 0x48, 0x1d, + 0xd5, 0x71, 0xc8, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0xd0, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x1f, 0x19, + 0xb9, 0x6e, 0x60, 0xb4, 0x35, 0x56, 0xe4, 0x90, 0x5a, 0x88, 0xdc, 0xa5, 0x77, 0x87, 0xb1, 0x09, + 0xf4, 0xd0, 0x3f, 0x10, 0x34, 0xb7, 0xa2, 0x97, 0xa2, 0x05, 0x0a, 0xa4, 0x40, 0xd1, 0x4b, 0x7f, + 0x40, 0x7b, 0xe9, 0xc1, 0xbd, 0x39, 0xb7, 0x20, 0x07, 0xb6, 0x96, 0x2f, 0x85, 0x4e, 0xb9, 0x15, + 0xe8, 0xa9, 0x98, 0xd7, 0x3e, 0x28, 0xca, 0x0d, 0x55, 0x17, 0x85, 0x2f, 0xdc, 0x99, 0x6f, 0xbe, + 0xf9, 0xe6, 0x7b, 0xcd, 0xf7, 0x18, 0xc2, 0x6b, 0x83, 0x9d, 0x6e, 0xa3, 0xe7, 0x76, 0x07, 0x9e, + 0xcb, 0xdc, 0x60, 0x50, 0x17, 0xbf, 0x28, 0xa7, 0xe7, 0x95, 0x52, 0xd7, 0xed, 0xba, 0x12, 0x87, + 0x8f, 0xe4, 0x7a, 0xa5, 0xd6, 0x75, 0xdd, 0x6e, 0x8f, 0x36, 0xc4, 0x6c, 0x6b, 0xd8, 0x69, 0x30, + 0xbb, 0x4f, 0x7d, 0x66, 0xf5, 0x07, 0x0a, 0x61, 0x49, 0x51, 0x7f, 0xd8, 0xeb, 0xbb, 0x6d, 0xda, + 0x6b, 0xf8, 0xcc, 0x62, 0xbe, 0xfc, 0x55, 0x18, 0x8b, 0x1c, 0x63, 0x30, 0xf4, 0xb7, 0xc5, 0x8f, + 0x04, 0xe2, 0x12, 0xa0, 0x4d, 0xe6, 0x51, 0xab, 0x4f, 0x2c, 0x46, 0x7d, 0x42, 0x1f, 0x0e, 0xa9, + 0xcf, 0xf0, 0x4d, 0x58, 0x8c, 0x41, 0xfd, 0x81, 0xeb, 0xf8, 0x14, 0x5d, 0x84, 0x82, 0x1f, 0x82, + 0xcb, 0xc6, 0x52, 0x6a, 0xb9, 0xb0, 0x52, 0xaa, 0x07, 0xa2, 0x84, 0x7b, 0x48, 0x14, 0x11, 0xff, + 0xca, 0x00, 0x08, 0xd7, 0x50, 0x15, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0xdb, 0x65, 0x63, 0xc9, 0x58, + 0x36, 0x49, 0x04, 0x82, 0xce, 0xc1, 0xd1, 0x70, 0x76, 0xcb, 0xdd, 0xdc, 0xb6, 0xbc, 0x76, 0x39, + 0x29, 0xd0, 0xf6, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0x4e, 0x2d, 0x19, 0xcb, 0x29, 0x22, + 0xc6, 0xe8, 0x38, 0x64, 0x18, 0x75, 0x2c, 0x87, 0x95, 0xcd, 0x25, 0x63, 0x39, 0x4f, 0xd4, 0x8c, + 0xc3, 0xb9, 0xec, 0xd4, 0x2f, 0xa7, 0x97, 0x8c, 0xe5, 0x39, 0xa2, 0x66, 0xf8, 0xb3, 0x14, 0x14, + 0x3f, 0x1c, 0x52, 0x6f, 0xa4, 0x14, 0x80, 0xaa, 0x90, 0xf3, 0x69, 0x8f, 0xb6, 0x98, 0xeb, 0x09, + 0x06, 0xf3, 0xcd, 0x64, 0xd9, 0x20, 0x01, 0x0c, 0x95, 0x20, 0xdd, 0xb3, 0xfb, 0x36, 0x13, 0x6c, + 0xcd, 0x11, 0x39, 0x41, 0x97, 0x20, 0xed, 0x33, 0xcb, 0x63, 0x82, 0x97, 0xc2, 0x4a, 0xa5, 0x2e, + 0x8d, 0x56, 0xd7, 0x46, 0xab, 0xdf, 0xd5, 0x46, 0x6b, 0xe6, 0x9e, 0x8c, 0x6b, 0x89, 0x4f, 0xff, + 0x56, 0x33, 0x88, 0xdc, 0x82, 0x2e, 0x42, 0x8a, 0x3a, 0x6d, 0xc1, 0xef, 0xd7, 0xdd, 0xc9, 0x37, + 0xa0, 0xf3, 0x90, 0x6f, 0xdb, 0x1e, 0x6d, 0x31, 0xdb, 0x75, 0x84, 0x54, 0xf3, 0x2b, 0x8b, 0xa1, + 0x45, 0xd6, 0xf4, 0x12, 0x09, 0xb1, 0xd0, 0x39, 0xc8, 0xf8, 0x5c, 0x75, 0x7e, 0x39, 0xbb, 0x94, + 0x5a, 0xce, 0x37, 0x4b, 0x7b, 0xe3, 0xda, 0x82, 0x84, 0x9c, 0x73, 0xfb, 0x36, 0xa3, 0xfd, 0x01, + 0x1b, 0x11, 0x85, 0x83, 0xce, 0x42, 0xb6, 0x4d, 0x7b, 0x94, 0x1b, 0x3c, 0x27, 0x0c, 0xbe, 0x10, + 0x21, 0x2f, 0x16, 0x88, 0x46, 0x40, 0xf7, 0xc1, 0x1c, 0xf4, 0x2c, 0xa7, 0x9c, 0x17, 0x52, 0xcc, + 0x87, 0x88, 0x77, 0x7a, 0x96, 0xd3, 0xbc, 0xf8, 0xe5, 0xb8, 0xb6, 0xd2, 0xb5, 0xd9, 0xf6, 0x70, + 0xab, 0xde, 0x72, 0xfb, 0x8d, 0xae, 0x67, 0x75, 0x2c, 0xc7, 0x6a, 0xf4, 0xdc, 0x1d, 0xbb, 0xc1, + 0x9d, 0xf3, 0xe1, 0x90, 0x7a, 0x36, 0xf5, 0x1a, 0x9c, 0x46, 0x5d, 0xd8, 0x83, 0xef, 0x23, 0x82, + 0xe6, 0x75, 0x33, 0x97, 0x59, 0xc8, 0xe2, 0x71, 0x12, 0xd0, 0xa6, 0xd5, 0x1f, 0xf4, 0xe8, 0x4c, + 0xf6, 0x0a, 0x2c, 0x93, 0x3c, 0xb4, 0x65, 0x52, 0xb3, 0x5a, 0x26, 0x54, 0xb3, 0x39, 0x9b, 0x9a, + 0xd3, 0x5f, 0x57, 0xcd, 0x99, 0x97, 0xaf, 0x66, 0x5c, 0x06, 0x93, 0xcf, 0xd0, 0x02, 0xa4, 0x3c, + 0xeb, 0x91, 0x50, 0x66, 0x91, 0xf0, 0x21, 0xde, 0x80, 0x8c, 0x64, 0x04, 0x55, 0x26, 0xb5, 0x1d, + 0xbf, 0x19, 0xa1, 0xa6, 0x53, 0x5a, 0x87, 0x0b, 0xa1, 0x0e, 0x53, 0x42, 0x3b, 0xf8, 0xd7, 0x06, + 0xcc, 0x29, 0x13, 0xaa, 0xe8, 0xb2, 0x05, 0x59, 0x79, 0xbb, 0x75, 0x64, 0x39, 0x31, 0x19, 0x59, + 0xae, 0xb4, 0xad, 0x01, 0xa3, 0x5e, 0xb3, 0xf1, 0x64, 0x5c, 0x33, 0xbe, 0x1c, 0xd7, 0xde, 0x78, + 0x91, 0x94, 0x22, 0xc8, 0xa9, 0xa8, 0xa3, 0x09, 0xa3, 0x37, 0x05, 0x77, 0xcc, 0x57, 0x7e, 0x70, + 0xa4, 0x2e, 0x03, 0xe4, 0xba, 0xd3, 0xa5, 0x3e, 0xa7, 0x6c, 0x72, 0x13, 0x12, 0x89, 0x83, 0x7f, + 0x0a, 0x8b, 0x31, 0x57, 0x53, 0x7c, 0xbe, 0x0b, 0x19, 0x9f, 0x2b, 0x50, 0xb3, 0x19, 0x31, 0xd4, + 0xa6, 0x80, 0x37, 0xe7, 0x15, 0x7f, 0x19, 0x39, 0x27, 0x0a, 0x7f, 0xb6, 0xd3, 0xff, 0x62, 0x40, + 0x71, 0xc3, 0xda, 0xa2, 0x3d, 0xed, 0xe3, 0x08, 0x4c, 0xc7, 0xea, 0x53, 0xa5, 0x71, 0x31, 0xe6, + 0x01, 0xed, 0x63, 0xab, 0x37, 0xa4, 0x92, 0x64, 0x8e, 0xa8, 0xd9, 0xac, 0x91, 0xc8, 0x38, 0x74, + 0x24, 0x32, 0x42, 0x7f, 0x2f, 0x41, 0x9a, 0x7b, 0xd6, 0x48, 0x44, 0xa1, 0x3c, 0x91, 0x13, 0xfc, + 0x06, 0xcc, 0x29, 0x29, 0x94, 0xfa, 0x42, 0x96, 0xb9, 0xfa, 0xf2, 0x9a, 0x65, 0xdc, 0x87, 0x8c, + 0xd4, 0x36, 0x7a, 0x1d, 0xf2, 0x41, 0x76, 0x13, 0xd2, 0xa6, 0x9a, 0x99, 0xbd, 0x71, 0x2d, 0xc9, + 0x7c, 0x12, 0x2e, 0xa0, 0x1a, 0xa4, 0xc5, 0x4e, 0x21, 0xb9, 0xd1, 0xcc, 0xef, 0x8d, 0x6b, 0x12, + 0x40, 0xe4, 0x07, 0x9d, 0x04, 0x73, 0x9b, 0x27, 0x18, 0xae, 0x02, 0xb3, 0x99, 0xdb, 0x1b, 0xd7, + 0xc4, 0x9c, 0x88, 0x5f, 0x7c, 0x0d, 0x8a, 0x1b, 0xb4, 0x6b, 0xb5, 0x46, 0xea, 0xd0, 0x92, 0x26, + 0xc7, 0x0f, 0x34, 0x34, 0x8d, 0xd3, 0x50, 0x0c, 0x4e, 0x7c, 0xd0, 0xf7, 0x95, 0x53, 0x17, 0x02, + 0xd8, 0x4d, 0x1f, 0xff, 0xd2, 0x00, 0x65, 0x67, 0x84, 0x21, 0xd3, 0xe3, 0xb2, 0xfa, 0x2a, 0x06, + 0xc1, 0xde, 0xb8, 0xa6, 0x20, 0x44, 0x7d, 0xd1, 0x65, 0xc8, 0xfa, 0xe2, 0x44, 0x4e, 0x6c, 0xd2, + 0x7d, 0xc4, 0x42, 0xf3, 0x08, 0x77, 0x83, 0xbd, 0x71, 0x4d, 0x23, 0x12, 0x3d, 0x40, 0xf5, 0x58, + 0xe6, 0x94, 0x82, 0xcd, 0xef, 0x8d, 0x6b, 0x11, 0x68, 0x34, 0x93, 0xe2, 0x7f, 0x1a, 0x50, 0xb8, + 0x6b, 0xd9, 0x81, 0x0b, 0x95, 0xb5, 0x89, 0xc2, 0x18, 0x29, 0x01, 0xfc, 0x4a, 0xb7, 0x69, 0xcf, + 0x1a, 0x5d, 0x75, 0x3d, 0x41, 0x77, 0x8e, 0x04, 0xf3, 0x30, 0xd9, 0x99, 0x53, 0x93, 0x5d, 0x7a, + 0xf6, 0x90, 0xfa, 0x3f, 0x0c, 0x60, 0xd7, 0xcd, 0x5c, 0x72, 0x21, 0x85, 0xff, 0x60, 0x40, 0x51, + 0x4a, 0xae, 0xdc, 0xee, 0x47, 0x90, 0x91, 0x8a, 0x11, 0xb2, 0xbf, 0x20, 0xb8, 0xbc, 0x39, 0x4b, + 0x60, 0x51, 0x34, 0xd1, 0xf7, 0x60, 0xbe, 0xed, 0xb9, 0x83, 0x01, 0x6d, 0x6f, 0xaa, 0x10, 0x96, + 0x9c, 0x0c, 0x61, 0x6b, 0xd1, 0x75, 0x32, 0x81, 0x8e, 0xff, 0x6a, 0xc0, 0x9c, 0x8a, 0x16, 0xca, + 0x56, 0x81, 0x7e, 0x8d, 0x43, 0xa7, 0xac, 0xe4, 0xac, 0x29, 0xeb, 0x38, 0x64, 0xba, 0x9e, 0x3b, + 0x1c, 0xf8, 0xe5, 0x94, 0xbc, 0x9b, 0x72, 0x36, 0x5b, 0x2a, 0xc3, 0xd7, 0x61, 0x5e, 0x8b, 0x72, + 0x40, 0xc8, 0xac, 0x4c, 0x86, 0xcc, 0xf5, 0x36, 0x75, 0x98, 0xdd, 0xb1, 0x83, 0x20, 0xa8, 0xf0, + 0xf1, 0xcf, 0x0d, 0x58, 0x98, 0x44, 0x41, 0x6b, 0x91, 0x7b, 0xc6, 0xc9, 0x9d, 0x39, 0x98, 0x5c, + 0x5d, 0x04, 0x1f, 0xff, 0x7d, 0x87, 0x79, 0x23, 0x4d, 0x5a, 0xee, 0xad, 0xbc, 0x03, 0x85, 0xc8, + 0x22, 0x4f, 0x51, 0x3b, 0x54, 0xdd, 0x0c, 0xc2, 0x87, 0x61, 0x48, 0x48, 0xca, 0x80, 0x26, 0x26, + 0xf8, 0x17, 0x06, 0xcc, 0xc5, 0x6c, 0x89, 0xde, 0x05, 0xb3, 0xe3, 0xb9, 0xfd, 0x99, 0x0c, 0x25, + 0x76, 0xa0, 0x6f, 0x41, 0x92, 0xb9, 0x33, 0x99, 0x29, 0xc9, 0x5c, 0x6e, 0x25, 0x25, 0x7e, 0x4a, + 0x56, 0xb7, 0x72, 0x86, 0xdf, 0x81, 0xbc, 0x10, 0xe8, 0x8e, 0x65, 0x7b, 0x53, 0xb3, 0xc5, 0x74, + 0x81, 0x2e, 0xc3, 0x11, 0x19, 0x09, 0xa7, 0x6f, 0x2e, 0x4e, 0xdb, 0x5c, 0xd4, 0x9b, 0x5f, 0x83, + 0xf4, 0xea, 0xf6, 0xd0, 0xd9, 0xe1, 0x5b, 0xda, 0x16, 0xb3, 0xf4, 0x16, 0x3e, 0xc6, 0xc7, 0x60, + 0x91, 0xdf, 0x41, 0xea, 0xf9, 0xab, 0xee, 0xd0, 0x61, 0xba, 0xbb, 0x38, 0x07, 0xa5, 0x38, 0x58, + 0x79, 0x49, 0x09, 0xd2, 0x2d, 0x0e, 0x10, 0x34, 0xe6, 0x88, 0x9c, 0xe0, 0xdf, 0x1a, 0x80, 0xae, + 0x51, 0x26, 0x4e, 0x59, 0x5f, 0x0b, 0xae, 0x47, 0x05, 0x72, 0x7d, 0x8b, 0xb5, 0xb6, 0xa9, 0xe7, + 0xeb, 0x1a, 0x44, 0xcf, 0xff, 0x1f, 0xd5, 0x1e, 0x3e, 0x0f, 0x8b, 0x31, 0x2e, 0x95, 0x4c, 0x15, + 0xc8, 0xb5, 0x14, 0x4c, 0xe5, 0xbb, 0x60, 0x8e, 0xff, 0x98, 0x84, 0x9c, 0xd8, 0x40, 0x68, 0x07, + 0x9d, 0x87, 0x42, 0xc7, 0x76, 0xba, 0xd4, 0x1b, 0x78, 0xb6, 0x52, 0x81, 0xd9, 0x3c, 0xb2, 0x37, + 0xae, 0x45, 0xc1, 0x24, 0x3a, 0x41, 0x6f, 0x41, 0x76, 0xe8, 0x53, 0xef, 0x81, 0x2d, 0x6f, 0x7a, + 0xbe, 0x59, 0xda, 0x1d, 0xd7, 0x32, 0x3f, 0xf0, 0xa9, 0xb7, 0xbe, 0xc6, 0x33, 0xcf, 0x50, 0x8c, + 0x88, 0xfc, 0xb6, 0xd1, 0x0d, 0xe5, 0xa6, 0xa2, 0x08, 0x6b, 0x7e, 0x9b, 0xb3, 0x3f, 0x11, 0xea, + 0x06, 0x9e, 0xdb, 0xa7, 0x6c, 0x9b, 0x0e, 0xfd, 0x46, 0xcb, 0xed, 0xf7, 0x5d, 0xa7, 0x21, 0x7a, + 0x49, 0x21, 0x34, 0x4f, 0x9f, 0x7c, 0xbb, 0xf2, 0xdc, 0xbb, 0x90, 0x65, 0xdb, 0x9e, 0x3b, 0xec, + 0x6e, 0x8b, 0xac, 0x90, 0x6a, 0x5e, 0x9a, 0x9d, 0x9e, 0xa6, 0x40, 0xf4, 0x00, 0x9d, 0xe6, 0xda, + 0xa2, 0xad, 0x1d, 0x7f, 0xd8, 0x97, 0x1d, 0x5a, 0x33, 0xbd, 0x37, 0xae, 0x19, 0x6f, 0x91, 0x00, + 0x8c, 0x3f, 0x49, 0x42, 0x4d, 0x38, 0xea, 0x3d, 0x51, 0x36, 0x5c, 0x75, 0xbd, 0x9b, 0x94, 0x79, + 0x76, 0xeb, 0x96, 0xd5, 0xa7, 0xda, 0x37, 0x6a, 0x50, 0xe8, 0x0b, 0xe0, 0x83, 0xc8, 0x15, 0x80, + 0x7e, 0x80, 0x87, 0x4e, 0x01, 0x88, 0x3b, 0x23, 0xd7, 0xe5, 0x6d, 0xc8, 0x0b, 0x88, 0x58, 0x5e, + 0x8d, 0x69, 0xaa, 0x31, 0xa3, 0x64, 0x4a, 0x43, 0xeb, 0x93, 0x1a, 0x9a, 0x99, 0x4e, 0xa0, 0x96, + 0xa8, 0xaf, 0xa7, 0xe3, 0xbe, 0x8e, 0x3f, 0x37, 0xa0, 0xba, 0xa1, 0x39, 0x3f, 0xa4, 0x3a, 0xb4, + 0xbc, 0xc9, 0x97, 0x24, 0x6f, 0xea, 0xbf, 0x93, 0x17, 0x57, 0x01, 0x36, 0x6c, 0x87, 0x5e, 0xb5, + 0x7b, 0x8c, 0x7a, 0x53, 0x3a, 0x91, 0x4f, 0x52, 0x61, 0x48, 0x20, 0xb4, 0xa3, 0xe5, 0x5c, 0x8d, + 0xc4, 0xe1, 0x97, 0x21, 0x46, 0xf2, 0x25, 0x9a, 0x2d, 0x35, 0x11, 0xa2, 0x76, 0x20, 0xdb, 0x11, + 0xe2, 0xc9, 0x94, 0x1a, 0x7b, 0x46, 0x09, 0x65, 0x6f, 0x5e, 0x56, 0x87, 0x5f, 0x78, 0x51, 0x41, + 0x22, 0x5e, 0x7d, 0x1a, 0xfe, 0xc8, 0x61, 0xd6, 0xe3, 0xc8, 0x66, 0xa2, 0x4f, 0x40, 0x3f, 0x51, + 0xe5, 0x56, 0x7a, 0x6a, 0xb9, 0xa5, 0x6f, 0xee, 0xe1, 0x7b, 0xc6, 0xf7, 0xc2, 0xd8, 0x27, 0xcc, + 0xa1, 0x62, 0xdf, 0x19, 0x30, 0x3d, 0xda, 0xd1, 0x49, 0x1a, 0x85, 0xc7, 0x06, 0x98, 0x62, 0x1d, + 0xff, 0xc9, 0x80, 0x85, 0x6b, 0x94, 0xc5, 0xcb, 0x9f, 0x57, 0xc8, 0x98, 0xf8, 0x03, 0x38, 0x1a, + 0xe1, 0x5f, 0x49, 0x7f, 0x61, 0xa2, 0xe6, 0x39, 0x16, 0xca, 0xbf, 0xee, 0xb4, 0xe9, 0x63, 0xd5, + 0x2b, 0xc6, 0xcb, 0x9d, 0x3b, 0x50, 0x88, 0x2c, 0xa2, 0x2b, 0x13, 0x85, 0x4e, 0xe4, 0x65, 0x27, + 0x48, 0xd6, 0xcd, 0x92, 0x92, 0x49, 0x76, 0x8b, 0xaa, 0x8c, 0x0d, 0x8a, 0x82, 0x4d, 0x40, 0xc2, + 0x5c, 0x82, 0x6c, 0x34, 0x2d, 0x09, 0xe8, 0x8d, 0xa0, 0xe2, 0x09, 0xe6, 0xe8, 0x34, 0x98, 0x9e, + 0xfb, 0x48, 0x57, 0xb0, 0x73, 0xe1, 0x91, 0xc4, 0x7d, 0x44, 0xc4, 0x12, 0xbe, 0x0c, 0x29, 0xe2, + 0x3e, 0x42, 0x55, 0x00, 0xcf, 0x72, 0xba, 0xf4, 0x5e, 0xd0, 0x38, 0x15, 0x49, 0x04, 0x72, 0x40, + 0xc9, 0xb0, 0x0a, 0x47, 0xa3, 0x1c, 0x49, 0x73, 0xd7, 0x21, 0xfb, 0xe1, 0x30, 0xaa, 0xae, 0xd2, + 0x84, 0xba, 0x64, 0x0f, 0xae, 0x91, 0xb8, 0xcf, 0x40, 0x08, 0x47, 0x27, 0x21, 0xcf, 0xac, 0xad, + 0x1e, 0xbd, 0x15, 0x06, 0xb8, 0x10, 0xc0, 0x57, 0x79, 0xcf, 0x77, 0x2f, 0x52, 0xfb, 0x84, 0x00, + 0x74, 0x16, 0x16, 0x42, 0x9e, 0xef, 0x78, 0xb4, 0x63, 0x3f, 0x16, 0x16, 0x2e, 0x92, 0x7d, 0x70, + 0xb4, 0x0c, 0x47, 0x42, 0xd8, 0xa6, 0xa8, 0x31, 0x4c, 0x81, 0x3a, 0x09, 0xe6, 0xba, 0x11, 0xe2, + 0xbe, 0xff, 0x70, 0x68, 0xf5, 0xc4, 0xcd, 0x2b, 0x92, 0x08, 0x04, 0xff, 0xd9, 0x80, 0xa3, 0xd2, + 0xd4, 0xbc, 0xdb, 0x7f, 0x15, 0xbd, 0xfe, 0x33, 0x03, 0x50, 0x54, 0x02, 0xe5, 0x5a, 0xdf, 0x88, + 0x3e, 0xe3, 0xf0, 0x22, 0xa6, 0x20, 0x5a, 0x59, 0x09, 0x0a, 0x5f, 0x62, 0x30, 0x64, 0x44, 0x21, + 0x24, 0x7b, 0x6a, 0x53, 0xf6, 0xca, 0x12, 0x42, 0xd4, 0x97, 0xb7, 0xf8, 0x5b, 0x23, 0x46, 0x7d, + 0xd5, 0xe9, 0x8a, 0x16, 0x5f, 0x00, 0x88, 0xfc, 0xf0, 0xb3, 0xa8, 0xc3, 0x84, 0xd7, 0x98, 0xe1, + 0x59, 0x0a, 0x44, 0xf4, 0x00, 0xff, 0x3e, 0x09, 0x73, 0xf7, 0xdc, 0xde, 0x30, 0x4c, 0x89, 0xaf, + 0x52, 0xaa, 0x88, 0xb5, 0xdf, 0x69, 0xdd, 0x7e, 0x23, 0x30, 0x7d, 0x46, 0x07, 0xc2, 0xb3, 0x52, + 0x44, 0x8c, 0x11, 0x86, 0x22, 0xb3, 0xbc, 0x2e, 0x65, 0xb2, 0xaf, 0x29, 0x67, 0x44, 0xc1, 0x19, + 0x83, 0xa1, 0x25, 0x28, 0x58, 0xdd, 0xae, 0x47, 0xbb, 0x16, 0xa3, 0xcd, 0x51, 0x39, 0x2b, 0x0e, + 0x8b, 0x82, 0xf0, 0x47, 0x30, 0xaf, 0x95, 0xa5, 0x4c, 0xfa, 0x36, 0x64, 0x3f, 0x16, 0x90, 0x29, + 0x4f, 0x5e, 0x12, 0x55, 0x85, 0x31, 0x8d, 0x16, 0x7f, 0x1f, 0xd7, 0x3c, 0xe3, 0xeb, 0x90, 0x91, + 0xe8, 0xe8, 0x64, 0xb4, 0x3b, 0x91, 0x6f, 0x33, 0x7c, 0xae, 0x5a, 0x0d, 0x0c, 0x19, 0x49, 0x48, + 0x19, 0x5e, 0xf8, 0x86, 0x84, 0x10, 0xf5, 0xc5, 0xbf, 0x31, 0xe0, 0xd8, 0x1a, 0x65, 0xb4, 0xc5, + 0x68, 0xfb, 0xaa, 0x4d, 0x7b, 0xed, 0xc3, 0x36, 0xce, 0xc6, 0xa1, 0x1b, 0xe7, 0x69, 0x6f, 0x5f, + 0xa9, 0xe8, 0xdb, 0xd7, 0x3a, 0x1c, 0x9f, 0x64, 0x51, 0x69, 0xb4, 0x01, 0x99, 0x8e, 0x80, 0xec, + 0x7f, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x7b, 0x30, 0x17, 0x5b, 0x10, 0x1a, 0xe6, 0x16, 0x55, + 0xd1, 0x4e, 0x4e, 0xd0, 0x37, 0xc1, 0x64, 0xa3, 0x81, 0x0a, 0x72, 0xcd, 0x63, 0xff, 0x1a, 0xd7, + 0x8e, 0xc6, 0xb6, 0xdd, 0x1d, 0x0d, 0x28, 0x11, 0x28, 0xdc, 0x11, 0x5a, 0x96, 0xd7, 0xb6, 0x1d, + 0xab, 0x67, 0x33, 0xc9, 0xb8, 0x49, 0xa2, 0xa0, 0xb3, 0x67, 0x20, 0x1f, 0xfc, 0x7f, 0x80, 0x0a, + 0x90, 0xbd, 0x7a, 0x9b, 0xfc, 0xf0, 0x0a, 0x59, 0x5b, 0x48, 0xa0, 0x22, 0xe4, 0x9a, 0x57, 0x56, + 0x6f, 0x88, 0x99, 0xb1, 0xf2, 0x79, 0x5a, 0x07, 0x6f, 0x0f, 0x7d, 0x17, 0xd2, 0x32, 0x22, 0x1f, + 0x0f, 0x25, 0x8a, 0xbe, 0xd4, 0x57, 0x4e, 0xec, 0x83, 0x4b, 0x95, 0xe0, 0xc4, 0xdb, 0x06, 0xba, + 0x05, 0x05, 0x01, 0x54, 0x6f, 0x72, 0x27, 0x27, 0x9f, 0xc6, 0x62, 0x94, 0x4e, 0x1d, 0xb0, 0x1a, + 0xa1, 0x77, 0x09, 0xd2, 0xc2, 0xed, 0xa3, 0xdc, 0x44, 0xdf, 0x54, 0xa3, 0xdc, 0xc4, 0x5e, 0x29, + 0x71, 0x02, 0x7d, 0x07, 0x4c, 0xde, 0xa5, 0xa2, 0x48, 0xde, 0x8e, 0x3c, 0xa5, 0x55, 0x8e, 0x4f, + 0x82, 0x23, 0xc7, 0xbe, 0x17, 0xbc, 0x08, 0x9e, 0x98, 0x7c, 0x99, 0xd0, 0xdb, 0xcb, 0xfb, 0x17, + 0x82, 0x93, 0x6f, 0xcb, 0xa7, 0x2b, 0xdd, 0x1f, 0xa3, 0x53, 0xf1, 0xa3, 0x26, 0xda, 0xe9, 0x4a, + 0xf5, 0xa0, 0xe5, 0x80, 0xe0, 0x06, 0x14, 0x22, 0xbd, 0x69, 0x54, 0xad, 0xfb, 0x1b, 0xeb, 0xa8, + 0x5a, 0xa7, 0x34, 0xb4, 0x38, 0x81, 0xae, 0x41, 0x8e, 0x57, 0x3b, 0x3c, 0xe8, 0xa3, 0xd7, 0x26, + 0x8b, 0x9a, 0x48, 0x32, 0xab, 0x9c, 0x9c, 0xbe, 0x18, 0x10, 0xfa, 0x3e, 0xe4, 0xaf, 0x51, 0xa6, + 0x22, 0xc2, 0x89, 0xc9, 0x90, 0x32, 0x45, 0x53, 0xf1, 0xb0, 0x84, 0x13, 0xe8, 0x23, 0x51, 0x78, + 0xc5, 0xef, 0x18, 0xaa, 0x1d, 0x70, 0x97, 0x02, 0xbe, 0x96, 0x0e, 0x46, 0xd0, 0x94, 0x57, 0x7e, + 0xac, 0xff, 0xb1, 0x5c, 0xb3, 0x98, 0x85, 0x6e, 0xc3, 0xbc, 0x10, 0x39, 0xf8, 0x4b, 0x33, 0xe6, + 0x9a, 0xfb, 0xfe, 0x3f, 0x8d, 0xb9, 0xe6, 0xfe, 0xff, 0x51, 0x71, 0xa2, 0x79, 0xff, 0xe9, 0xb3, + 0x6a, 0xe2, 0x8b, 0x67, 0xd5, 0xc4, 0x57, 0xcf, 0xaa, 0xc6, 0xcf, 0x76, 0xab, 0xc6, 0xef, 0x76, + 0xab, 0xc6, 0x93, 0xdd, 0xaa, 0xf1, 0x74, 0xb7, 0x6a, 0xfc, 0x7d, 0xb7, 0x6a, 0xfc, 0x63, 0xb7, + 0x9a, 0xf8, 0x6a, 0xb7, 0x6a, 0x7c, 0xfa, 0xbc, 0x9a, 0x78, 0xfa, 0xbc, 0x9a, 0xf8, 0xe2, 0x79, + 0x35, 0x71, 0xff, 0xf5, 0xff, 0xd0, 0x05, 0xc8, 0x38, 0x95, 0x11, 0x9f, 0x0b, 0xff, 0x0e, 0x00, + 0x00, 0xff, 0xff, 0x46, 0xc6, 0x73, 0x88, 0x70, 0x1e, 0x00, 0x00, } func (x Direction) String() string { @@ -2927,13 +2889,6 @@ func (x Direction) String() string { } return strconv.Itoa(int(x)) } -func (x DetectedFieldType) String() string { - s, ok := DetectedFieldType_name[int32(x)] - if ok { - return s - } - return strconv.Itoa(int(x)) -} func (this *StreamRatesRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -7823,10 +7778,12 @@ func (m *DetectedField) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x18 } - if m.Type != 0 { - i = encodeVarintLogproto(dAtA, i, uint64(m.Type)) + if len(m.Type) > 0 { + i -= len(m.Type) + copy(dAtA[i:], m.Type) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Type))) i-- - dAtA[i] = 0x10 + dAtA[i] = 0x12 } if len(m.Label) > 0 { i -= len(m.Label) @@ -8777,8 +8734,9 @@ func (m *DetectedField) Size() (n int) { if l > 0 { n += 1 + l + sovLogproto(uint64(l)) } - if m.Type != 0 { - n += 1 + sovLogproto(uint64(m.Type)) + l = len(m.Type) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) } if m.Cardinality != 0 { n += 1 + sovLogproto(uint64(m.Cardinality)) @@ -15645,10 +15603,10 @@ func (m *DetectedField) Unmarshal(dAtA []byte) error { m.Label = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: - if wireType != 0 { + if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) } - m.Type = 0 + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowLogproto @@ -15658,11 +15616,24 @@ func (m *DetectedField) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.Type |= DetectedFieldType(b&0x7F) << shift + stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Type = DetectedFieldType(dAtA[iNdEx:postIndex]) + iNdEx = postIndex case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Cardinality", wireType) diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 48799a85dce1a..136400a555f2e 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -439,17 +439,8 @@ message DetectedFieldsResponse { repeated DetectedField fields = 1; } -enum DetectedFieldType { - STRING = 0; - INT = 1; - FLOAT = 2; - BOOL = 3; - DURATION = 4; - BYTES = 5; -} - message DetectedField { string label = 1; - DetectedFieldType type = 2; + string type = 2 [(gogoproto.casttype) = "DetectedFieldType"]; uint64 cardinality = 3; } diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index e9fc10bfc09e8..2b438ad158ec1 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -1039,6 +1039,7 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/loki/api/v1/labels").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/label/{name}/values").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/detected_fields").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(frontendHandler) @@ -1056,10 +1057,6 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/api/prom/tail").Methods("GET", "POST").Handler(defaultHandler) } - if t.Cfg.Frontend.ExperimentalAPIsEnabled { - t.Server.HTTP.Path("/loki/api/experimental/detected_fields").Methods("GET", "POST").Handler(frontendHandler) - } - if t.frontend == nil { return services.NewIdleService(nil, func(_ error) error { if t.stopper != nil { diff --git a/pkg/lokifrontend/config.go b/pkg/lokifrontend/config.go index 648a049c74812..30ab5cd29fecc 100644 --- a/pkg/lokifrontend/config.go +++ b/pkg/lokifrontend/config.go @@ -20,8 +20,6 @@ type Config struct { TailProxyURL string `yaml:"tail_proxy_url"` TLS tls.ClientConfig `yaml:"tail_tls_config"` - - ExperimentalAPIsEnabled bool `yaml:"experimental_apis_enabled"` } // RegisterFlags adds the flags required to config this to the given FlagSet. @@ -34,5 +32,4 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.BoolVar(&cfg.CompressResponses, "querier.compress-http-responses", true, "Compress HTTP responses.") f.StringVar(&cfg.DownstreamURL, "frontend.downstream-url", "", "URL of downstream Loki.") f.StringVar(&cfg.TailProxyURL, "frontend.tail-proxy-url", "", "URL of querier for tail proxy.") - f.BoolVar(&cfg.ExperimentalAPIsEnabled, "frontend.experimental-apis-enabled", false, "Whether to enable experimental APIs in the frontend.") } diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 397546cfaf98f..820931e97efd3 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -866,7 +866,7 @@ func (c Codec) EncodeRequest(ctx context.Context, r queryrangebase.Request) (*ht } u := &url.URL{ - Path: "/loki/api/experimental/detected_fields", + Path: "/loki/api/v1/detected_fields", RawQuery: params.Encode(), } req := &http.Request{ @@ -904,7 +904,7 @@ func (c Codec) Path(r queryrangebase.Request) string { case *logproto.VolumeRequest: return "/loki/api/v1/index/volume_range" case *DetectedFieldsRequest: - return "/loki/api/experimental/detected_fields" + return "/loki/api/v1/detected_fields" } return "other" diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index ee896639f8a8c..3d1a5daf1afb4 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -432,7 +432,7 @@ func getOperation(path string) string { return VolumeRangeOp case path == "/loki/api/v1/index/shards": return IndexShardsOp - case path == "/loki/api/experimental/detected_fields": + case path == "/loki/api/v1/detected_fields": return DetectedFieldsOp default: return "" From 7480468e0f8dd3912ff094587e999a406957ca3d Mon Sep 17 00:00:00 2001 From: Kaviraj Kanagaraj Date: Sun, 31 Mar 2024 18:00:53 +0200 Subject: [PATCH 40/54] fix: (Bug) correct resultType when storing instant query results in cache (#12312) Signed-off-by: Kaviraj --- cmd/loki/loki-local-with-memcached.yaml | 4 + pkg/querier/queryrange/codec.go | 14 +- .../queryrange/instant_metric_cache.go | 2 - pkg/querier/queryrange/limits_test.go | 6 +- .../queryrangebase/marshaling_test.go | 4 +- .../queryrange/queryrangebase/query_range.go | 28 ++- .../queryrangebase/query_range_test.go | 6 +- .../queryrangebase/results_cache_test.go | 6 +- pkg/querier/queryrange/roundtrip_test.go | 180 +++++++++++++++++- 9 files changed, 222 insertions(+), 28 deletions(-) diff --git a/cmd/loki/loki-local-with-memcached.yaml b/cmd/loki/loki-local-with-memcached.yaml index 482fb40e0c2cb..d69a983d6124a 100644 --- a/cmd/loki/loki-local-with-memcached.yaml +++ b/cmd/loki/loki-local-with-memcached.yaml @@ -16,6 +16,10 @@ common: kvstore: store: inmemory +limits_config: + split_instant_metric_queries_by_interval: '10m' + + query_range: align_queries_with_step: true cache_index_stats_results: true diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 820931e97efd3..f56ef55c08e20 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -22,6 +22,7 @@ import ( json "github.com/json-iterator/go" "github.com/opentracing/opentracing-go" otlog "github.com/opentracing/opentracing-go/log" + "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/timestamp" "github.com/grafana/loki/pkg/loghttp" @@ -1271,15 +1272,22 @@ func (Codec) MergeResponse(responses ...queryrangebase.Response) (queryrangebase return nil, errors.New("merging responses requires at least one response") } var mergedStats stats.Result - switch responses[0].(type) { + switch res := responses[0].(type) { + // LokiPromResponse type is used for both instant and range queries. + // Meaning, values that are merged can be either vector or matrix types. case *LokiPromResponse: + codec := queryrangebase.PrometheusCodecForRangeQueries + if res.Response.Data.ResultType == model.ValVector.String() { + codec = queryrangebase.PrometheusCodecForInstantQueries + } + promResponses := make([]queryrangebase.Response, 0, len(responses)) for _, res := range responses { mergedStats.MergeSplit(res.(*LokiPromResponse).Statistics) promResponses = append(promResponses, res.(*LokiPromResponse).Response) } - promRes, err := queryrangebase.PrometheusCodec.MergeResponse(promResponses...) + promRes, err := codec.MergeResponse(promResponses...) if err != nil { return nil, err } @@ -1800,7 +1808,7 @@ func NewEmptyResponse(r queryrangebase.Request) (queryrangebase.Response, error) } if _, ok := expr.(syntax.SampleExpr); ok { return &LokiPromResponse{ - Response: queryrangebase.NewEmptyPrometheusResponse(), + Response: queryrangebase.NewEmptyPrometheusResponse(model.ValMatrix), // range metric query }, nil } return &LokiResponse{ diff --git a/pkg/querier/queryrange/instant_metric_cache.go b/pkg/querier/queryrange/instant_metric_cache.go index ef1083e6cd229..6f505ebf8016f 100644 --- a/pkg/querier/queryrange/instant_metric_cache.go +++ b/pkg/querier/queryrange/instant_metric_cache.go @@ -49,8 +49,6 @@ func (cfg *InstantMetricCacheConfig) Validate() error { return cfg.ResultsCacheConfig.Validate() } -type instantMetricExtractor struct{} - func NewInstantMetricCacheMiddleware( log log.Logger, limits Limits, diff --git a/pkg/querier/queryrange/limits_test.go b/pkg/querier/queryrange/limits_test.go index a80cf96dde805..07bf4d1f30a74 100644 --- a/pkg/querier/queryrange/limits_test.go +++ b/pkg/querier/queryrange/limits_test.go @@ -240,7 +240,7 @@ func Test_MaxQueryParallelism(t *testing.T) { defer count.Dec() // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") @@ -271,7 +271,7 @@ func Test_MaxQueryParallelismLateScheduling(t *testing.T) { h := base.HandlerFunc(func(_ context.Context, _ base.Request) (base.Response, error) { // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") @@ -298,7 +298,7 @@ func Test_MaxQueryParallelismDisable(t *testing.T) { h := base.HandlerFunc(func(_ context.Context, _ base.Request) (base.Response, error) { // simulate some work time.Sleep(20 * time.Millisecond) - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) ctx := user.InjectOrgID(context.Background(), "foo") diff --git a/pkg/querier/queryrange/queryrangebase/marshaling_test.go b/pkg/querier/queryrange/queryrangebase/marshaling_test.go index ee0cdd0f217ca..4fcba1804c3a6 100644 --- a/pkg/querier/queryrange/queryrangebase/marshaling_test.go +++ b/pkg/querier/queryrange/queryrangebase/marshaling_test.go @@ -29,7 +29,7 @@ func BenchmarkPrometheusCodec_DecodeResponse(b *testing.B) { b.ReportAllocs() for n := 0; n < b.N; n++ { - _, err := PrometheusCodec.DecodeResponse(context.Background(), &http.Response{ + _, err := PrometheusCodecForRangeQueries.DecodeResponse(context.Background(), &http.Response{ StatusCode: 200, Body: io.NopCloser(bytes.NewReader(encodedRes)), ContentLength: int64(len(encodedRes)), @@ -51,7 +51,7 @@ func BenchmarkPrometheusCodec_EncodeResponse(b *testing.B) { b.ReportAllocs() for n := 0; n < b.N; n++ { - _, err := PrometheusCodec.EncodeResponse(context.Background(), nil, res) + _, err := PrometheusCodecForRangeQueries.EncodeResponse(context.Background(), nil, res) require.NoError(b, err) } } diff --git a/pkg/querier/queryrange/queryrangebase/query_range.go b/pkg/querier/queryrange/queryrangebase/query_range.go index ed2bf48c6757f..5e8f0dd855edb 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range.go +++ b/pkg/querier/queryrange/queryrangebase/query_range.go @@ -38,14 +38,26 @@ var ( errNegativeStep = httpgrpc.Errorf(http.StatusBadRequest, "zero or negative query resolution step widths are not accepted. Try a positive integer") errStepTooSmall = httpgrpc.Errorf(http.StatusBadRequest, "exceeded maximum resolution of 11,000 points per time series. Try increasing the value of the step parameter") - // PrometheusCodec is a codec to encode and decode Prometheus query range requests and responses. - PrometheusCodec = &prometheusCodec{} + // PrometheusCodecForRangeQueries is a codec to encode and decode Loki range metric query requests and responses. + PrometheusCodecForRangeQueries = &prometheusCodec{ + resultType: model.ValMatrix, + } + + // PrometheusCodecForInstantQueries is a codec to encode and decode Loki range metric query requests and responses. + PrometheusCodecForInstantQueries = &prometheusCodec{ + resultType: model.ValVector, + } // Name of the cache control header. cacheControlHeader = "Cache-Control" ) -type prometheusCodec struct{} +type prometheusCodec struct { + // prometheusCodec is used to merge multiple response of either range (matrix) or instant queries(vector). + // when creating empty responses during merge, it need to be aware what kind of valueType it should create with. + // helps other middlewares to filter the correct result type. + resultType model.ValueType +} // WithStartEnd clones the current `PrometheusRequest` with a new `start` and `end` timestamp. func (q *PrometheusRequest) WithStartEnd(start, end time.Time) Request { @@ -125,19 +137,19 @@ func (resp *PrometheusResponse) SetHeader(name, value string) { } // NewEmptyPrometheusResponse returns an empty successful Prometheus query range response. -func NewEmptyPrometheusResponse() *PrometheusResponse { +func NewEmptyPrometheusResponse(v model.ValueType) *PrometheusResponse { return &PrometheusResponse{ Status: StatusSuccess, Data: PrometheusData{ - ResultType: model.ValMatrix.String(), + ResultType: v.String(), Result: []SampleStream{}, }, } } -func (prometheusCodec) MergeResponse(responses ...Response) (Response, error) { +func (p prometheusCodec) MergeResponse(responses ...Response) (Response, error) { if len(responses) == 0 { - return NewEmptyPrometheusResponse(), nil + return NewEmptyPrometheusResponse(p.resultType), nil } promResponses := make([]*PrometheusResponse, 0, len(responses)) @@ -155,7 +167,7 @@ func (prometheusCodec) MergeResponse(responses ...Response) (Response, error) { response := PrometheusResponse{ Status: StatusSuccess, Data: PrometheusData{ - ResultType: model.ValMatrix.String(), + ResultType: p.resultType.String(), Result: matrixMerge(promResponses), }, } diff --git a/pkg/querier/queryrange/queryrangebase/query_range_test.go b/pkg/querier/queryrange/queryrangebase/query_range_test.go index 21c115eec5892..4a59b2977b649 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range_test.go +++ b/pkg/querier/queryrange/queryrangebase/query_range_test.go @@ -33,7 +33,7 @@ func TestResponse(t *testing.T) { Header: http.Header{"Content-Type": []string{"application/json"}}, Body: io.NopCloser(bytes.NewBuffer([]byte(tc.body))), } - resp, err := PrometheusCodec.DecodeResponse(context.Background(), response, nil) + resp, err := PrometheusCodecForRangeQueries.DecodeResponse(context.Background(), response, nil) require.NoError(t, err) assert.Equal(t, tc.expected, resp) @@ -44,7 +44,7 @@ func TestResponse(t *testing.T) { Body: io.NopCloser(bytes.NewBuffer([]byte(tc.body))), ContentLength: int64(len(tc.body)), } - resp2, err := PrometheusCodec.EncodeResponse(context.Background(), nil, resp) + resp2, err := PrometheusCodecForRangeQueries.EncodeResponse(context.Background(), nil, resp) require.NoError(t, err) assert.Equal(t, response, resp2) }) @@ -262,7 +262,7 @@ func TestMergeAPIResponses(t *testing.T) { }, }} { t.Run(tc.name, func(t *testing.T) { - output, err := PrometheusCodec.MergeResponse(tc.input...) + output, err := PrometheusCodecForRangeQueries.MergeResponse(tc.input...) require.NoError(t, err) require.Equal(t, tc.expected, output) }) diff --git a/pkg/querier/queryrange/queryrangebase/results_cache_test.go b/pkg/querier/queryrange/queryrangebase/results_cache_test.go index 6706e6a2d9fa7..2ee599fab30ae 100644 --- a/pkg/querier/queryrange/queryrangebase/results_cache_test.go +++ b/pkg/querier/queryrange/queryrangebase/results_cache_test.go @@ -414,7 +414,7 @@ func TestResultsCache(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, nil, @@ -461,7 +461,7 @@ func TestResultsCacheRecent(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{maxCacheFreshness: 10 * time.Minute}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, nil, @@ -572,7 +572,7 @@ func TestResultsCacheShouldCacheFunc(t *testing.T) { c, resultscache.ConstSplitter(day), mockLimits{maxCacheFreshness: 10 * time.Minute}, - PrometheusCodec, + PrometheusCodecForRangeQueries, PrometheusResponseExtractor{}, nil, tc.shouldCache, diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index ff03a8339cd6a..cd287a6ac4ab0 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -86,6 +86,21 @@ var ( }, }, }, + CacheInstantMetricResults: true, + InstantMetricQuerySplitAlign: true, + InstantMetricCacheConfig: InstantMetricCacheConfig{ + ResultsCacheConfig: base.ResultsCacheConfig{ + Config: resultscache.Config{ + CacheConfig: cache.Config{ + EmbeddedCache: cache.EmbeddedCacheConfig{ + Enabled: true, + MaxSizeMB: 1024, + TTL: 24 * time.Hour, + }, + }, + }, + }, + }, } testEngineOpts = logql.EngineOpts{ MaxLookBackPeriod: 30 * time.Second, @@ -334,6 +349,127 @@ func TestLogFilterTripperware(t *testing.T) { require.Equal(t, 0, *queryCount) } +func TestInstantQueryTripperwareResultCaching(t *testing.T) { + // Goal is to make sure the instant query tripperware returns same results with and without cache middleware. + // 1. Get result without cache middleware. + // 2. Get result with middelware (with splitting). Result should be same. + // 3. Make same query with middleware (this time hitting the cache). Result should be same. + + testLocal := testConfig + testLocal.ShardedQueries = true + testLocal.CacheResults = false + testLocal.CacheIndexStatsResults = false + testLocal.CacheInstantMetricResults = false + var l = fakeLimits{ + maxQueryParallelism: 1, + tsdbMaxQueryParallelism: 1, + maxQueryBytesRead: 1000, + maxQuerierBytesRead: 100, + queryTimeout: 1 * time.Minute, + maxSeries: 1, + } + tpw, stopper, err := NewMiddleware(testLocal, testEngineOpts, nil, util_log.Logger, l, config.SchemaConfig{Configs: testSchemasTSDB}, nil, false, nil, constants.Loki) + if stopper != nil { + defer stopper.Stop() + } + require.NoError(t, err) + + q := `sum by (job) (bytes_rate({cluster="dev-us-central-0"}[15m]))` + lreq := &LokiInstantRequest{ + Query: q, + Limit: 1000, + TimeTs: testTime.Add(-4 * time.Hour), + Direction: logproto.FORWARD, + Path: "/loki/api/v1/query", + Plan: &plan.QueryPlan{ + AST: syntax.MustParseExpr(q), + }, + } + + ctx := user.InjectOrgID(context.Background(), "1") + + // Test MaxQueryBytesRead limit + statsCount, statsHandler := indexStatsResult(logproto.IndexStatsResponse{Bytes: 2000}) + queryCount, queryHandler := counter() + h := getQueryAndStatsHandler(queryHandler, statsHandler) + _, err = tpw.Wrap(h).Do(ctx, lreq) + require.Error(t, err) + require.Equal(t, 1, *statsCount) + require.Equal(t, 0, *queryCount) + + // Test MaxQuerierBytesRead limit + statsCount, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 200}) + queryCount, queryHandler = counter() + h = getQueryAndStatsHandler(queryHandler, statsHandler) + _, err = tpw.Wrap(h).Do(ctx, lreq) + require.Error(t, err) + require.Equal(t, 2, *statsCount) + require.Equal(t, 0, *queryCount) + + // 1. Without cache middleware. + count, queryHandler := promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + h = tpw.Wrap(h) + lokiResponse, err := h.Do(ctx, lreq) + require.Equal(t, 1, *count) + require.NoError(t, err) + + exp, err := ResultToResponse(logqlmodel.Result{ + Data: vector, + }, nil) + require.NoError(t, err) + expected := exp.(*LokiPromResponse) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete := lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values + + // 2. First time with caching enabled (no cache hit). + testLocal.CacheInstantMetricResults = true + l.instantMetricSplitDuration = map[string]time.Duration{ + // so making request [15] range, will have 2 subqueries aligned with [5m] giving total of [10m]. And 2 more subqueries for remaining [5m] aligning depending on exec time of the query. + "1": 5 * time.Minute, + } + tpw, stopper, err = NewMiddleware(testLocal, testEngineOpts, nil, util_log.Logger, l, config.SchemaConfig{Configs: testSchemasTSDB}, nil, false, nil, constants.Loki) + if stopper != nil { + defer stopper.Stop() + } + require.NoError(t, err) + + count, queryHandler = promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + lokiResponse, err = tpw.Wrap(h).Do(ctx, lreq) + require.Equal(t, 4, *count) // split into 4 subqueries. like explained in `instantMetricSplitDuration` limits config. + require.NoError(t, err) + + exp, err = ResultToResponse(logqlmodel.Result{ + Data: instantQueryResultWithCache(*count, 15*time.Minute, vector[0]), + }, nil) + require.NoError(t, err) + expected = exp.(*LokiPromResponse) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete = lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values + + // 3. Second time with caching enabled (cache hit). + count, queryHandler = promqlResult(vector) + _, statsHandler = indexStatsResult(logproto.IndexStatsResponse{Bytes: 10}) + h = getQueryAndStatsHandler(queryHandler, statsHandler) + lokiResponse, err = tpw.Wrap(h).Do(ctx, lreq) + require.Equal(t, 0, *count) // no queries hit base handler, because all queries hit from cache. + require.NoError(t, err) + + require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete = lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) + assertInstantSampleValues(t, expected, concrete) // assert actual sample values +} + func TestInstantQueryTripperware(t *testing.T) { testShardingConfigNoCache := testConfig testShardingConfigNoCache.ShardedQueries = true @@ -357,7 +493,7 @@ func TestInstantQueryTripperware(t *testing.T) { lreq := &LokiInstantRequest{ Query: q, Limit: 1000, - TimeTs: testTime, + TimeTs: testTime.Add(-4 * time.Hour), // because vector data we return from mock handler has that time. Direction: logproto.FORWARD, Path: "/loki/api/v1/query", Plan: &plan.QueryPlan{ @@ -393,6 +529,8 @@ func TestInstantQueryTripperware(t *testing.T) { require.NoError(t, err) require.IsType(t, &LokiPromResponse{}, lokiResponse) + concrete := lokiResponse.(*LokiPromResponse) + require.Equal(t, loghttp.ResultTypeVector, concrete.Response.Data.ResultType) } func TestSeriesTripperware(t *testing.T) { @@ -1165,8 +1303,11 @@ func TestMetricsTripperware_SplitShardStats(t *testing.T) { AST: syntax.MustParseExpr(`sum by (app) (rate({app="foo"} |= "foo"[2h]))`), }, }, - expectedSplitStats: 2, // [2h] interval split by 1h configured split interval - expectedShardStats: 8, // 2 time splits * 4 row shards + // [2h] interval split by 1h configured split interval. + // Also since we split align(testConfig.InstantQuerySplitAlign=true) with split interval (1h). + // 1 subquery will be exactly [1h] and 2 more subqueries to align with `testTime` used in query's TimeTs. + expectedSplitStats: 3, + expectedShardStats: 12, // 3 time splits * 4 row shards }, { name: "instant query split not split", @@ -1405,7 +1546,7 @@ func counter() (*int, base.Handler) { lock.Lock() defer lock.Unlock() count++ - return base.NewEmptyPrometheusResponse(), nil + return base.NewEmptyPrometheusResponse(model.ValMatrix), nil }) } @@ -1473,6 +1614,37 @@ func seriesVolumeResult(v logproto.VolumeResponse) (*int, base.Handler) { }) } +// instantQueryResultWithCache used when instant query tripperware is created with split align and cache middleware. +// Assuming each subquery handler returns `val` sample, then this function returns overal result by combining all the subqueries sample values. +func instantQueryResultWithCache(split int, ts time.Duration, val promql.Sample) promql.Vector { + v := (val.F * float64(split)) / ts.Seconds() + return promql.Vector{ + promql.Sample{ + T: val.T, + F: v, + H: val.H, + Metric: val.Metric, + }, + } +} + +func assertInstantSampleValues(t *testing.T, exp *LokiPromResponse, got *LokiPromResponse) { + expR := exp.Response.Data.Result + gotR := got.Response.Data.Result + + expSamples := make([]logproto.LegacySample, 0) + for _, v := range expR { + expSamples = append(expSamples, v.Samples...) + } + + gotSamples := make([]logproto.LegacySample, 0) + for _, v := range gotR { + gotSamples = append(gotSamples, v.Samples...) + } + + require.Equal(t, expSamples, gotSamples) +} + type fakeHandler struct { count int lock sync.Mutex From a509871c558c36753b471aa93ecd78d706b1cf2e Mon Sep 17 00:00:00 2001 From: Ed Welch Date: Sun, 31 Mar 2024 22:14:21 -0400 Subject: [PATCH 41/54] chore: remove experimental flags for l2 cache and memcached "addresses" config (#12410) --- docs/sources/configure/_index.md | 15 ++++++++++++--- pkg/storage/chunk/cache/memcached_client.go | 6 ++---- pkg/storage/config/store.go | 8 ++++---- 3 files changed, 18 insertions(+), 11 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 82cb0ecadea03..cf81dd561cf37 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2404,6 +2404,10 @@ The `chunk_store_config` block configures how chunks will be cached and how long # The CLI flags prefix for this block configuration is: store.chunks-cache [chunk_cache_config: ] +# The cache block configures the cache backend. +# The CLI flags prefix for this block configuration is: store.chunks-cache-l2 +[chunk_cache_config_l2: ] + # Write dedupe cache is deprecated along with legacy index types (aws, # aws-dynamo, bigtable, bigtable-hashed, cassandra, gcp, gcp-columnkey, # grpc-store). @@ -2411,6 +2415,11 @@ The `chunk_store_config` block configures how chunks will be cached and how long # The CLI flags prefix for this block configuration is: store.index-cache-write [write_dedupe_cache_config: ] +# Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 +# cache. +# CLI flag: -store.chunks-cache-l2.handoff +[l2_chunk_cache_handoff: | default = 0s] + # Cache index entries older than this period. 0 to disable. # CLI flag: -store.cache-lookups-older-than [cache_lookups_older_than: | default = 0s] @@ -4488,6 +4497,7 @@ The cache block configures the cache backend. The supported CLI flags `` - `frontend.series-results-cache` - `frontend.volume-results-cache` - `store.chunks-cache` +- `store.chunks-cache-l2` - `store.index-cache-read` - `store.index-cache-write` @@ -4534,9 +4544,8 @@ memcached_client: # CLI flag: -.memcached.service [service: | default = "memcached"] - # EXPERIMENTAL: Comma separated addresses list in DNS Service Discovery - # format: - # https://cortexmetrics.io/docs/configuration/arguments/#dns-service-discovery + # Comma separated addresses list in DNS Service Discovery format: + # https://grafana.com/docs/mimir/latest/configure/about-dns-service-discovery/#supported-discovery-modes # CLI flag: -.memcached.addresses [addresses: | default = ""] diff --git a/pkg/storage/chunk/cache/memcached_client.go b/pkg/storage/chunk/cache/memcached_client.go index ca355e8f4e244..f2dc35bbe08f5 100644 --- a/pkg/storage/chunk/cache/memcached_client.go +++ b/pkg/storage/chunk/cache/memcached_client.go @@ -22,7 +22,6 @@ import ( "github.com/sony/gobreaker" "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" ) // MemcachedClient interface exists for mocking memcacheClient. @@ -75,7 +74,7 @@ type memcachedClient struct { type MemcachedClientConfig struct { Host string `yaml:"host"` Service string `yaml:"service"` - Addresses string `yaml:"addresses"` // EXPERIMENTAL. + Addresses string `yaml:"addresses"` Timeout time.Duration `yaml:"timeout"` MaxIdleConns int `yaml:"max_idle_conns"` MaxItemSize int `yaml:"max_item_size"` @@ -96,7 +95,7 @@ type MemcachedClientConfig struct { func (cfg *MemcachedClientConfig) RegisterFlagsWithPrefix(prefix, description string, f *flag.FlagSet) { f.StringVar(&cfg.Host, prefix+"memcached.hostname", "", description+"Hostname for memcached service to use. If empty and if addresses is unset, no memcached will be used.") f.StringVar(&cfg.Service, prefix+"memcached.service", "memcached", description+"SRV service used to discover memcache servers.") - f.StringVar(&cfg.Addresses, prefix+"memcached.addresses", "", description+"EXPERIMENTAL: Comma separated addresses list in DNS Service Discovery format: https://cortexmetrics.io/docs/configuration/arguments/#dns-service-discovery") + f.StringVar(&cfg.Addresses, prefix+"memcached.addresses", "", description+"Comma separated addresses list in DNS Service Discovery format: https://grafana.com/docs/mimir/latest/configure/about-dns-service-discovery/#supported-discovery-modes") f.IntVar(&cfg.MaxIdleConns, prefix+"memcached.max-idle-conns", 16, description+"Maximum number of idle connections in pool.") f.DurationVar(&cfg.Timeout, prefix+"memcached.timeout", 100*time.Millisecond, description+"Maximum time to wait before giving up on memcached requests.") f.DurationVar(&cfg.UpdateInterval, prefix+"memcached.update-interval", 1*time.Minute, description+"Period with which to poll DNS for memcache servers.") @@ -180,7 +179,6 @@ func NewMemcachedClient(cfg MemcachedClientConfig, name string, r prometheus.Reg } if len(cfg.Addresses) > 0 { - util_log.WarnExperimentalUse("DNS-based memcached service discovery", logger) newClient.addresses = strings.Split(cfg.Addresses, ",") } diff --git a/pkg/storage/config/store.go b/pkg/storage/config/store.go index 75bdaa2ace8dc..14218bb9cfb12 100644 --- a/pkg/storage/config/store.go +++ b/pkg/storage/config/store.go @@ -11,10 +11,10 @@ import ( type ChunkStoreConfig struct { ChunkCacheConfig cache.Config `yaml:"chunk_cache_config"` - ChunkCacheConfigL2 cache.Config `yaml:"chunk_cache_config_l2" doc:"hidden"` + ChunkCacheConfigL2 cache.Config `yaml:"chunk_cache_config_l2"` WriteDedupeCacheConfig cache.Config `yaml:"write_dedupe_cache_config" doc:"description=Write dedupe cache is deprecated along with legacy index types (aws, aws-dynamo, bigtable, bigtable-hashed, cassandra, gcp, gcp-columnkey, grpc-store).\nConsider using TSDB index which does not require a write dedupe cache."` - L2ChunkCacheHandoff time.Duration `yaml:"l2_chunk_cache_handoff" doc:"hidden"` + L2ChunkCacheHandoff time.Duration `yaml:"l2_chunk_cache_handoff"` CacheLookupsOlderThan model.Duration `yaml:"cache_lookups_older_than"` // Not visible in yaml because the setting shouldn't be common between ingesters and queriers. @@ -34,8 +34,8 @@ func (cfg *ChunkStoreConfig) ChunkCacheStubs() bool { // RegisterFlags adds the flags required to configure this flag set. func (cfg *ChunkStoreConfig) RegisterFlags(f *flag.FlagSet) { cfg.ChunkCacheConfig.RegisterFlagsWithPrefix("store.chunks-cache.", "", f) - cfg.ChunkCacheConfigL2.RegisterFlagsWithPrefix("experimental.store.chunks-cache-l2.", "", f) - f.DurationVar(&cfg.L2ChunkCacheHandoff, "experimental.store.chunks-cache-l2.handoff", 0, "Experimental, subject to change or removal. Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 cache.") + cfg.ChunkCacheConfigL2.RegisterFlagsWithPrefix("store.chunks-cache-l2.", "", f) + f.DurationVar(&cfg.L2ChunkCacheHandoff, "store.chunks-cache-l2.handoff", 0, "Chunks will be handed off to the L2 cache after this duration. 0 to disable L2 cache.") f.BoolVar(&cfg.chunkCacheStubs, "store.chunks-cache.cache-stubs", false, "If true, don't write the full chunk to cache, just a stub entry.") cfg.WriteDedupeCacheConfig.RegisterFlagsWithPrefix("store.index-cache-write.", "", f) From 0b7ff4817545dd6326042e7e6d31a95681aa1cdd Mon Sep 17 00:00:00 2001 From: Sandeep Sukhani Date: Mon, 1 Apr 2024 14:21:50 +0530 Subject: [PATCH 42/54] chore: delete request processing improvements (#12259) --- docs/sources/configure/_index.md | 6 +- pkg/compactor/compactor.go | 2 +- .../deletion/delete_requests_manager.go | 80 ++++-- .../deletion/delete_requests_manager_test.go | 270 +++++++++++++++--- .../deletion/grpc_request_handler_test.go | 16 +- pkg/compactor/deletion/request_handler.go | 38 +-- .../deletion/request_handler_test.go | 41 ++- .../deletion/tenant_delete_requests_client.go | 5 + .../tenant_delete_requests_client_test.go | 12 +- .../deletion/tenant_request_handler_test.go | 40 ++- 10 files changed, 402 insertions(+), 108 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index cf81dd561cf37..ab9ab42c18703 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2487,9 +2487,9 @@ The `compactor` block configures the compactor component, which compacts index s # CLI flag: -compactor.delete-request-cancel-period [delete_request_cancel_period: | default = 24h] -# Constrain the size of any single delete request. When a delete request > -# delete_max_interval is input, the request is sharded into smaller requests of -# no more than delete_max_interval +# Constrain the size of any single delete request with line filters. When a +# delete request > delete_max_interval is input, the request is sharded into +# smaller requests of no more than delete_max_interval # CLI flag: -compactor.delete-max-interval [delete_max_interval: | default = 24h] diff --git a/pkg/compactor/compactor.go b/pkg/compactor/compactor.go index 75bd575e2c77c..6521983729b9d 100644 --- a/pkg/compactor/compactor.go +++ b/pkg/compactor/compactor.go @@ -102,7 +102,7 @@ func (cfg *Config) RegisterFlags(f *flag.FlagSet) { f.StringVar(&cfg.DeleteRequestStoreKeyPrefix, "compactor.delete-request-store.key-prefix", "index/", "Path prefix for storing delete requests.") f.IntVar(&cfg.DeleteBatchSize, "compactor.delete-batch-size", 70, "The max number of delete requests to run per compaction cycle.") f.DurationVar(&cfg.DeleteRequestCancelPeriod, "compactor.delete-request-cancel-period", 24*time.Hour, "Allow cancellation of delete request until duration after they are created. Data would be deleted only after delete requests have been older than this duration. Ideally this should be set to at least 24h.") - f.DurationVar(&cfg.DeleteMaxInterval, "compactor.delete-max-interval", 24*time.Hour, "Constrain the size of any single delete request. When a delete request > delete_max_interval is input, the request is sharded into smaller requests of no more than delete_max_interval") + f.DurationVar(&cfg.DeleteMaxInterval, "compactor.delete-max-interval", 24*time.Hour, "Constrain the size of any single delete request with line filters. When a delete request > delete_max_interval is input, the request is sharded into smaller requests of no more than delete_max_interval") f.DurationVar(&cfg.RetentionTableTimeout, "compactor.retention-table-timeout", 0, "The maximum amount of time to spend running retention and deletion on any given table in the index.") f.IntVar(&cfg.MaxCompactionParallelism, "compactor.max-compaction-parallelism", 1, "Maximum number of tables to compact in parallel. While increasing this value, please make sure compactor has enough disk space allocated to be able to store and compact as many tables.") f.IntVar(&cfg.UploadParallelism, "compactor.upload-parallelism", 10, "Number of upload/remove operations to execute in parallel when finalizing a compaction. NOTE: This setting is per compaction operation, which can be executed in parallel. The upper bound on the number of concurrent uploads is upload_parallelism * max_compaction_parallelism.") diff --git a/pkg/compactor/deletion/delete_requests_manager.go b/pkg/compactor/deletion/delete_requests_manager.go index 0e22439dd2a6b..c18d1b032ba73 100644 --- a/pkg/compactor/deletion/delete_requests_manager.go +++ b/pkg/compactor/deletion/delete_requests_manager.go @@ -126,10 +126,25 @@ func (d *DeleteRequestsManager) loadDeleteRequestsToProcess() error { return err } + reqCount := 0 for i := range deleteRequests { deleteRequest := deleteRequests[i] - if i >= d.batchSize { - logBatchTruncation(i, len(deleteRequests)) + maxRetentionInterval := getMaxRetentionInterval(deleteRequest.UserID, d.limits) + // retention interval 0 means retain the data forever + if maxRetentionInterval != 0 { + oldestRetainedLogTimestamp := model.Now().Add(-maxRetentionInterval) + if deleteRequest.StartTime.Before(oldestRetainedLogTimestamp) && deleteRequest.EndTime.Before(oldestRetainedLogTimestamp) { + level.Info(util_log.Logger).Log( + "msg", "Marking delete request with interval beyond retention period as processed", + "delete_request_id", deleteRequest.RequestID, + "user", deleteRequest.UserID, + ) + d.markRequestAsProcessed(deleteRequest) + continue + } + } + if reqCount >= d.batchSize { + logBatchTruncation(reqCount, len(deleteRequests)) break } @@ -149,6 +164,7 @@ func (d *DeleteRequestsManager) loadDeleteRequestsToProcess() error { if deleteRequest.EndTime > ur.requestsInterval.End { ur.requestsInterval.End = deleteRequest.EndTime } + reqCount++ } return nil @@ -305,6 +321,28 @@ func (d *DeleteRequestsManager) MarkPhaseTimedOut() { d.deleteRequestsToProcess = map[string]*userDeleteRequests{} } +func (d *DeleteRequestsManager) markRequestAsProcessed(deleteRequest DeleteRequest) { + if err := d.deleteRequestsStore.UpdateStatus(context.Background(), deleteRequest, StatusProcessed); err != nil { + level.Error(util_log.Logger).Log( + "msg", "failed to mark delete request for user as processed", + "delete_request_id", deleteRequest.RequestID, + "sequence_num", deleteRequest.SequenceNum, + "user", deleteRequest.UserID, + "err", err, + "deleted_lines", deleteRequest.DeletedLines, + ) + } else { + level.Info(util_log.Logger).Log( + "msg", "delete request for user marked as processed", + "delete_request_id", deleteRequest.RequestID, + "sequence_num", deleteRequest.SequenceNum, + "user", deleteRequest.UserID, + "deleted_lines", deleteRequest.DeletedLines, + ) + d.metrics.deleteRequestsProcessedTotal.WithLabelValues(deleteRequest.UserID).Inc() + } +} + func (d *DeleteRequestsManager) MarkPhaseFinished() { d.deleteRequestsToProcessMtx.Lock() defer d.deleteRequestsToProcessMtx.Unlock() @@ -315,25 +353,7 @@ func (d *DeleteRequestsManager) MarkPhaseFinished() { } for _, deleteRequest := range userDeleteRequests.requests { - if err := d.deleteRequestsStore.UpdateStatus(context.Background(), *deleteRequest, StatusProcessed); err != nil { - level.Error(util_log.Logger).Log( - "msg", "failed to mark delete request for user as processed", - "delete_request_id", deleteRequest.RequestID, - "sequence_num", deleteRequest.SequenceNum, - "user", deleteRequest.UserID, - "err", err, - "deleted_lines", deleteRequest.DeletedLines, - ) - } else { - level.Info(util_log.Logger).Log( - "msg", "delete request for user marked as processed", - "delete_request_id", deleteRequest.RequestID, - "sequence_num", deleteRequest.SequenceNum, - "user", deleteRequest.UserID, - "deleted_lines", deleteRequest.DeletedLines, - ) - } - d.metrics.deleteRequestsProcessedTotal.WithLabelValues(deleteRequest.UserID).Inc() + d.markRequestAsProcessed(*deleteRequest) } } } @@ -355,3 +375,21 @@ func (d *DeleteRequestsManager) IntervalMayHaveExpiredChunks(_ model.Interval, u func (d *DeleteRequestsManager) DropFromIndex(_ retention.ChunkEntry, _ model.Time, _ model.Time) bool { return false } + +func getMaxRetentionInterval(userID string, limits Limits) time.Duration { + maxRetention := model.Duration(limits.RetentionPeriod(userID)) + if maxRetention == 0 { + return 0 + } + + for _, streamRetention := range limits.StreamRetention(userID) { + if streamRetention.Period == 0 { + return 0 + } + if streamRetention.Period > maxRetention { + maxRetention = streamRetention.Period + } + } + + return time.Duration(maxRetention) +} diff --git a/pkg/compactor/deletion/delete_requests_manager_test.go b/pkg/compactor/deletion/delete_requests_manager_test.go index c2777c9801b7e..44285bb890b4e 100644 --- a/pkg/compactor/deletion/delete_requests_manager_test.go +++ b/pkg/compactor/deletion/delete_requests_manager_test.go @@ -41,12 +41,13 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { } for _, tc := range []struct { - name string - deletionMode deletionmode.Mode - deleteRequestsFromStore []DeleteRequest - batchSize int - expectedResp resp - expectedDeletionRangeByUser map[string]model.Interval + name string + deletionMode deletionmode.Mode + deleteRequestsFromStore []DeleteRequest + batchSize int + expectedResp resp + expectedDeletionRangeByUser map[string]model.Interval + expectedRequestsMarkedAsProcessed []int }{ { name: "no delete requests", @@ -66,6 +67,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -77,6 +79,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "no relevant delete requests", @@ -88,6 +91,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -99,6 +103,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "delete request not matching labels", @@ -110,6 +115,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: `{fizz="buzz"}`, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -121,6 +127,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request", @@ -132,6 +139,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -143,6 +151,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with line filters", @@ -154,6 +163,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -168,6 +178,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with structured metadata filters", @@ -179,6 +190,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -193,6 +205,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "whole chunk deleted by single request with line and structured metadata filters", @@ -204,6 +217,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineAndStructuredMetadataFilters, StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -218,6 +232,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "deleted interval out of range", @@ -229,6 +244,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, }, expectedResp: resp{ @@ -240,6 +256,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now.Add(-24 * time.Hour), }, }, + expectedRequestsMarkedAsProcessed: []int{0}, }, { name: "deleted interval out of range(with multiple user requests)", @@ -251,12 +268,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: "different-user", Query: lblFoo.String(), StartTime: now.Add(-24 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -272,6 +291,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with one deleting the whole chunk", @@ -283,12 +303,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -300,6 +322,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with line filters and one deleting the whole chunk", @@ -311,12 +334,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -331,6 +356,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests with structured metadata filters and one deleting the whole chunk", @@ -342,12 +368,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-48 * time.Hour), EndTime: now.Add(-24 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -362,6 +390,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple delete requests causing multiple holes", @@ -373,24 +402,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -412,6 +445,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2, 3}, }, { name: "multiple overlapping requests deleting the whole chunk", @@ -423,12 +457,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -443,6 +479,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple overlapping requests with line filters deleting the whole chunk", @@ -454,12 +491,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -474,6 +513,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple overlapping requests with structured metadata filters deleting the whole chunk", @@ -485,12 +525,14 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-6 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-8 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -505,6 +547,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, { name: "multiple non-overlapping requests deleting the whole chunk", @@ -516,18 +559,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -542,6 +588,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "multiple non-overlapping requests with line filter deleting the whole chunk", @@ -553,18 +600,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithLineFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithLineFilters, StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -579,6 +629,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "multiple non-overlapping requests with structured metadata filter deleting the whole chunk", @@ -590,18 +641,21 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-12 * time.Hour), EndTime: now.Add(-6*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-4*time.Hour) - 1, + Status: StatusReceived, }, { UserID: testUserID, Query: streamSelectorWithStructuredMetadataFilters, StartTime: now.Add(-4 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -616,6 +670,7 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now, }, }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 2}, }, { name: "deletes are disabled", @@ -627,24 +682,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -661,24 +720,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, }, expectedResp: resp{ @@ -695,24 +758,28 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { Query: lblFoo.String(), StartTime: now.Add(-2 * time.Hour), EndTime: now, + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-6 * time.Hour), EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-10 * time.Hour), EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, }, { UserID: testUserID, Query: lblFoo.String(), StartTime: now.Add(-13 * time.Hour), EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, }, }, expectedResp: resp{ @@ -733,10 +800,108 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { End: now.Add(-8 * time.Hour), }, }, + expectedRequestsMarkedAsProcessed: []int{2, 3}, + }, + { + name: "Deletes beyond retention are marked as processed straight away without being batched for processing", + deletionMode: deletionmode.FilterAndDelete, + batchSize: 2, + deleteRequestsFromStore: []DeleteRequest{ + { + UserID: "different-user", + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-2 * time.Hour), + EndTime: now, + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-6 * time.Hour), + EndTime: now.Add(-5 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-10 * time.Hour), + EndTime: now.Add(-8 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-13 * time.Hour), + EndTime: now.Add(-11 * time.Hour), + Status: StatusReceived, + }, + }, + expectedResp: resp{ + isExpired: true, + expectedFilter: func(ts time.Time, s string, _ ...labels.Label) bool { + tsUnixNano := ts.UnixNano() + if (now.Add(-13*time.Hour).UnixNano() <= tsUnixNano && tsUnixNano <= now.Add(-11*time.Hour).UnixNano()) || + (now.Add(-10*time.Hour).UnixNano() <= tsUnixNano && tsUnixNano <= now.Add(-8*time.Hour).UnixNano()) { + return true + } + + return false + }, + }, + expectedDeletionRangeByUser: map[string]model.Interval{ + testUserID: { + Start: now.Add(-13 * time.Hour), + End: now.Add(-8 * time.Hour), + }, + }, + expectedRequestsMarkedAsProcessed: []int{0, 1, 4, 5}, + }, + { + name: "All deletes beyond retention", + deletionMode: deletionmode.FilterAndDelete, + batchSize: 2, + deleteRequestsFromStore: []DeleteRequest{ + { + UserID: "different-user", + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + { + UserID: testUserID, + Query: lblFoo.String(), + StartTime: now.Add(-14 * 24 * time.Hour), + EndTime: now.Add(-10 * 24 * time.Hour), + Status: StatusReceived, + }, + }, + expectedResp: resp{ + isExpired: false, + }, + expectedRequestsMarkedAsProcessed: []int{0, 1}, }, } { t.Run(tc.name, func(t *testing.T) { - mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, tc.batchSize, &fakeLimits{mode: tc.deletionMode.String()}, nil) + mockDeleteRequestsStore := &mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore} + mgr := NewDeleteRequestsManager(mockDeleteRequestsStore, time.Hour, tc.batchSize, &fakeLimits{defaultLimit: limit{ + retentionPeriod: 7 * 24 * time.Hour, + deletionMode: tc.deletionMode.String(), + }}, nil) require.NoError(t, mgr.loadDeleteRequestsToProcess()) for _, deleteRequests := range mgr.deleteRequestsToProcess { @@ -749,28 +914,38 @@ func TestDeleteRequestsManager_Expired(t *testing.T) { require.Equal(t, tc.expectedResp.isExpired, isExpired) if tc.expectedResp.expectedFilter == nil { require.Nil(t, filterFunc) - return - } - require.NotNil(t, filterFunc) + } else { + require.NotNil(t, filterFunc) - for start := chunkEntry.From; start <= chunkEntry.Through; start = start.Add(time.Minute) { - line := "foo bar" - if start.Time().Minute()%2 == 1 { - line = "fizz buzz" + for start := chunkEntry.From; start <= chunkEntry.Through; start = start.Add(time.Minute) { + line := "foo bar" + if start.Time().Minute()%2 == 1 { + line = "fizz buzz" + } + // mix of empty, ding=dong and ping=pong as structured metadata + var structuredMetadata []labels.Label + if start.Time().Minute()%3 == 0 { + structuredMetadata = []labels.Label{{Name: lblPing, Value: lblPong}} + } else if start.Time().Minute()%2 == 0 { + structuredMetadata = []labels.Label{{Name: "ting", Value: "tong"}} + } + require.Equal(t, tc.expectedResp.expectedFilter(start.Time(), line, structuredMetadata...), filterFunc(start.Time(), line, structuredMetadata...), "line", line, "time", start.Time(), "now", now.Time()) } - // mix of empty, ding=dong and ping=pong as structured metadata - var structuredMetadata []labels.Label - if start.Time().Minute()%3 == 0 { - structuredMetadata = []labels.Label{{Name: lblPing, Value: lblPong}} - } else if start.Time().Minute()%2 == 0 { - structuredMetadata = []labels.Label{{Name: "ting", Value: "tong"}} + + require.Equal(t, len(tc.expectedDeletionRangeByUser), len(mgr.deleteRequestsToProcess)) + for userID, dr := range tc.expectedDeletionRangeByUser { + require.Equal(t, dr, mgr.deleteRequestsToProcess[userID].requestsInterval) } - require.Equal(t, tc.expectedResp.expectedFilter(start.Time(), line, structuredMetadata...), filterFunc(start.Time(), line, structuredMetadata...), "line", line, "time", start.Time(), "now", now.Time()) } - require.Equal(t, len(tc.expectedDeletionRangeByUser), len(mgr.deleteRequestsToProcess)) - for userID, dr := range tc.expectedDeletionRangeByUser { - require.Equal(t, dr, mgr.deleteRequestsToProcess[userID].requestsInterval) + mgr.MarkPhaseFinished() + + processedRequests, err := mockDeleteRequestsStore.GetDeleteRequestsByStatus(context.Background(), StatusProcessed) + require.NoError(t, err) + require.Len(t, processedRequests, len(tc.expectedRequestsMarkedAsProcessed)) + + for i, reqIdx := range tc.expectedRequestsMarkedAsProcessed { + require.True(t, requestsAreEqual(tc.deleteRequestsFromStore[reqIdx], processedRequests[i])) } }) } @@ -782,18 +957,18 @@ func TestDeleteRequestsManager_IntervalMayHaveExpiredChunks(t *testing.T) { hasChunks bool user string }{ - {[]DeleteRequest{{Query: `0`, UserID: "test-user", StartTime: 0, EndTime: 100}}, true, "test-user"}, - {[]DeleteRequest{{Query: `1`, UserID: "test-user", StartTime: 200, EndTime: 400}}, true, "test-user"}, - {[]DeleteRequest{{Query: `2`, UserID: "test-user", StartTime: 400, EndTime: 500}}, true, "test-user"}, - {[]DeleteRequest{{Query: `3`, UserID: "test-user", StartTime: 500, EndTime: 700}}, true, "test-user"}, - {[]DeleteRequest{{Query: `3`, UserID: "other-user", StartTime: 500, EndTime: 700}}, false, "test-user"}, - {[]DeleteRequest{{Query: `4`, UserID: "test-user", StartTime: 700, EndTime: 900}}, true, "test-user"}, - {[]DeleteRequest{{Query: `4`, UserID: "", StartTime: 700, EndTime: 900}}, true, ""}, + {[]DeleteRequest{{Query: `0`, UserID: "test-user", StartTime: 0, EndTime: 100, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `1`, UserID: "test-user", StartTime: 200, EndTime: 400, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `2`, UserID: "test-user", StartTime: 400, EndTime: 500, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `3`, UserID: "test-user", StartTime: 500, EndTime: 700, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `3`, UserID: "other-user", StartTime: 500, EndTime: 700, Status: StatusReceived}}, false, "test-user"}, + {[]DeleteRequest{{Query: `4`, UserID: "test-user", StartTime: 700, EndTime: 900, Status: StatusReceived}}, true, "test-user"}, + {[]DeleteRequest{{Query: `4`, UserID: "", StartTime: 700, EndTime: 900, Status: StatusReceived}}, true, ""}, {[]DeleteRequest{}, false, ""}, } for _, tc := range tt { - mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, 70, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}, nil) + mgr := NewDeleteRequestsManager(&mockDeleteRequestsStore{deleteRequests: tc.deleteRequestsFromStore}, time.Hour, 70, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}, nil) require.NoError(t, mgr.loadDeleteRequestsToProcess()) interval := model.Interval{Start: 300, End: 600} @@ -823,8 +998,14 @@ type mockDeleteRequestsStore struct { genNumber string } -func (m *mockDeleteRequestsStore) GetDeleteRequestsByStatus(_ context.Context, _ DeleteRequestStatus) ([]DeleteRequest, error) { - return m.deleteRequests, nil +func (m *mockDeleteRequestsStore) GetDeleteRequestsByStatus(_ context.Context, status DeleteRequestStatus) ([]DeleteRequest, error) { + reqs := make([]DeleteRequest, 0, len(m.deleteRequests)) + for i := range m.deleteRequests { + if m.deleteRequests[i].Status == status { + reqs = append(reqs, m.deleteRequests[i]) + } + } + return reqs, nil } func (m *mockDeleteRequestsStore) AddDeleteRequestGroup(_ context.Context, reqs []DeleteRequest) ([]DeleteRequest, error) { @@ -854,3 +1035,24 @@ func (m *mockDeleteRequestsStore) GetAllDeleteRequestsForUser(_ context.Context, func (m *mockDeleteRequestsStore) GetCacheGenerationNumber(_ context.Context, _ string) (string, error) { return m.genNumber, m.getErr } + +func (m *mockDeleteRequestsStore) UpdateStatus(_ context.Context, req DeleteRequest, newStatus DeleteRequestStatus) error { + for i := range m.deleteRequests { + if requestsAreEqual(m.deleteRequests[i], req) { + m.deleteRequests[i].Status = newStatus + } + } + + return nil +} + +func requestsAreEqual(req1, req2 DeleteRequest) bool { + if req1.UserID == req2.UserID && + req1.Query == req2.Query && + req1.StartTime == req2.StartTime && + req1.EndTime == req2.EndTime { + return true + } + + return false +} diff --git a/pkg/compactor/deletion/grpc_request_handler_test.go b/pkg/compactor/deletion/grpc_request_handler_test.go index f0b2002e8d590..c7171e6ac3406 100644 --- a/pkg/compactor/deletion/grpc_request_handler_test.go +++ b/pkg/compactor/deletion/grpc_request_handler_test.go @@ -74,7 +74,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("it gets all the delete requests for the user", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getAllResult = []DeleteRequest{{RequestID: "test-request-1", Status: StatusReceived}, {RequestID: "test-request-2", Status: StatusReceived}} - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -96,7 +96,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { {RequestID: "test-request-2", CreatedAt: now.Add(time.Minute), StartTime: now.Add(30 * time.Minute), EndTime: now.Add(90 * time.Minute)}, {RequestID: "test-request-1", CreatedAt: now, StartTime: now.Add(time.Hour), EndTime: now.Add(2 * time.Hour)}, } - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -124,7 +124,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { {RequestID: "test-request-2", CreatedAt: now.Add(time.Minute), Status: StatusProcessed}, {RequestID: "test-request-3", CreatedAt: now.Add(2 * time.Minute), Status: StatusReceived}, } - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -145,7 +145,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("error getting from store", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getAllErr = errors.New("something bad") - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -162,7 +162,7 @@ func TestGRPCGetDeleteRequests(t *testing.T) { t.Run("validation", func(t *testing.T) { t.Run("no org id", func(t *testing.T) { - h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -178,7 +178,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("get gen number", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.genNumber = "123" - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -195,7 +195,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("error getting from store", func(t *testing.T) { store := &mockDeleteRequestsStore{} store.getErr = errors.New("something bad") - h := NewGRPCRequestHandler(store, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(store, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) @@ -212,7 +212,7 @@ func TestGRPCGetCacheGenNumbers(t *testing.T) { t.Run("validation", func(t *testing.T) { t.Run("no org id", func(t *testing.T) { - h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{mode: deletionmode.FilterAndDelete.String()}) + h := NewGRPCRequestHandler(&mockDeleteRequestsStore{}, &fakeLimits{defaultLimit: limit{deletionMode: deletionmode.FilterAndDelete.String()}}) grpcClient, closer := server(t, h) t.Cleanup(closer) diff --git a/pkg/compactor/deletion/request_handler.go b/pkg/compactor/deletion/request_handler.go index db5a22a83d544..458279d3b8523 100644 --- a/pkg/compactor/deletion/request_handler.go +++ b/pkg/compactor/deletion/request_handler.go @@ -10,14 +10,13 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/util" - "github.com/go-kit/log/level" + "github.com/grafana/dskit/tenant" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/dskit/tenant" - + "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/pkg/util" util_log "github.com/grafana/loki/pkg/util/log" ) @@ -49,7 +48,7 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r } params := r.URL.Query() - query, err := query(params) + query, parsedExpr, err := query(params) if err != nil { http.Error(w, err.Error(), http.StatusBadRequest) return @@ -67,13 +66,19 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r return } - interval, err := dm.interval(params, startTime, endTime) - if err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return + var shardByInterval time.Duration + if parsedExpr.HasFilter() { + var err error + shardByInterval, err = dm.interval(params, startTime, endTime) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + } else { + shardByInterval = endTime.Sub(startTime) + time.Minute } - deleteRequests := shardDeleteRequestsByInterval(startTime, endTime, query, userID, interval) + deleteRequests := shardDeleteRequestsByInterval(startTime, endTime, query, userID, shardByInterval) createdDeleteRequests, err := dm.deleteRequestsStore.AddDeleteRequestGroup(ctx, deleteRequests) if err != nil { level.Error(util_log.Logger).Log("msg", "error adding delete request to the store", "err", err) @@ -92,7 +97,7 @@ func (dm *DeleteRequestHandler) AddDeleteRequestHandler(w http.ResponseWriter, r "delete_request_id", createdDeleteRequests[0].RequestID, "user", userID, "query", query, - "interval", interval.String(), + "interval", shardByInterval.String(), ) dm.metrics.deleteRequestsReceivedTotal.WithLabelValues(userID).Inc() @@ -315,17 +320,18 @@ func (dm *DeleteRequestHandler) GetCacheGenerationNumberHandler(w http.ResponseW } } -func query(params url.Values) (string, error) { +func query(params url.Values) (string, syntax.LogSelectorExpr, error) { query := params.Get("query") if len(query) == 0 { - return "", errors.New("query not set") + return "", nil, errors.New("query not set") } - if _, err := parseDeletionQuery(query); err != nil { - return "", err + parsedExpr, err := parseDeletionQuery(query) + if err != nil { + return "", nil, err } - return query, nil + return query, parsedExpr, nil } func startTime(params url.Values) (model.Time, error) { diff --git a/pkg/compactor/deletion/request_handler_test.go b/pkg/compactor/deletion/request_handler_test.go index 1aaf0b582b366..58e2ffd13c328 100644 --- a/pkg/compactor/deletion/request_handler_test.go +++ b/pkg/compactor/deletion/request_handler_test.go @@ -10,14 +10,12 @@ import ( "testing" "time" + "github.com/grafana/dskit/user" "github.com/pkg/errors" - "github.com/prometheus/common/model" "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/util" - - "github.com/grafana/dskit/user" ) func TestAddDeleteRequestHandler(t *testing.T) { @@ -50,14 +48,14 @@ func TestAddDeleteRequestHandler(t *testing.T) { require.Equal(t, w.Code, http.StatusInternalServerError) }) - t.Run("it shards deletes based on a query param", func(t *testing.T) { + t.Run("it only shards deletes with line filter based on a query param", func(t *testing.T) { store := &mockDeleteRequestsStore{} h := NewDeleteRequestHandler(store, 0, nil) from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) - req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + req := buildRequest("org-id", `{foo="bar"} |= "foo"`, unixString(from), unixString(to)) params := req.URL.Query() params.Set("max_interval", "1h") req.URL.RawQuery = params.Encode() @@ -87,7 +85,7 @@ func TestAddDeleteRequestHandler(t *testing.T) { from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) - req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + req := buildRequest("org-id", `{foo="bar"} |= "foo"`, unixString(from), unixString(to)) w := httptest.NewRecorder() h.AddDeleteRequestHandler(w, req) @@ -107,6 +105,27 @@ func TestAddDeleteRequestHandler(t *testing.T) { } }) + t.Run("it does not shard deletes without line filter", func(t *testing.T) { + store := &mockDeleteRequestsStore{} + h := NewDeleteRequestHandler(store, 0, nil) + + from := model.TimeFromUnix(model.Now().Add(-3 * time.Hour).Unix()) + to := model.TimeFromUnix(from.Add(3 * time.Hour).Unix()) + + req := buildRequest("org-id", `{foo="bar"}`, unixString(from), unixString(to)) + params := req.URL.Query() + params.Set("max_interval", "1h") + req.URL.RawQuery = params.Encode() + + w := httptest.NewRecorder() + h.AddDeleteRequestHandler(w, req) + + require.Equal(t, w.Code, http.StatusNoContent) + require.Len(t, store.addReqs, 1) + require.Equal(t, from, store.addReqs[0].StartTime) + require.Equal(t, to, store.addReqs[0].EndTime) + }) + t.Run("it works with RFC3339", func(t *testing.T) { store := &mockDeleteRequestsStore{} h := NewDeleteRequestHandler(store, 0, nil) @@ -166,11 +185,11 @@ func TestAddDeleteRequestHandler(t *testing.T) { {"org-id", `{foo="bar"}`, "0000000000", "0000000000001", "", "invalid end time: require unix seconds or RFC3339 format\n"}, {"org-id", `{foo="bar"}`, "0000000000", fmt.Sprint(time.Now().Add(time.Hour).Unix())[:10], "", "deletes in the future are not allowed\n"}, {"org-id", `{foo="bar"}`, "0000000001", "0000000000", "", "start time can't be greater than end time\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "not-a-duration", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "1ms", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "1h", "max_interval can't be greater than 1m0s\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000001", "30s", "max_interval can't be greater than the interval to be deleted (1s)\n"}, - {"org-id", `{foo="bar"}`, "0000000000", "0000000000", "", "difference between start time and end time must be at least one second\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "not-a-duration", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "1ms", "invalid max_interval: valid time units are 's', 'm', 'h'\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "1h", "max_interval can't be greater than 1m0s\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000001", "30s", "max_interval can't be greater than the interval to be deleted (1s)\n"}, + {"org-id", `{foo="bar"} |= "foo"`, "0000000000", "0000000000", "", "difference between start time and end time must be at least one second\n"}, } { t.Run(strings.TrimSpace(tc.error), func(t *testing.T) { req := buildRequest(tc.orgID, tc.query, tc.startTime, tc.endTime) diff --git a/pkg/compactor/deletion/tenant_delete_requests_client.go b/pkg/compactor/deletion/tenant_delete_requests_client.go index 29b6a56922868..d3ba3a9905a3b 100644 --- a/pkg/compactor/deletion/tenant_delete_requests_client.go +++ b/pkg/compactor/deletion/tenant_delete_requests_client.go @@ -2,12 +2,17 @@ package deletion import ( "context" + "time" + + "github.com/grafana/loki/pkg/validation" ) const deletionNotAvailableMsg = "deletion is not available for this tenant" type Limits interface { DeletionMode(userID string) string + RetentionPeriod(userID string) time.Duration + StreamRetention(userID string) []validation.StreamRetention } type perTenantDeleteRequestsClient struct { diff --git a/pkg/compactor/deletion/tenant_delete_requests_client_test.go b/pkg/compactor/deletion/tenant_delete_requests_client_test.go index ba063f713b844..20e97d463f4f8 100644 --- a/pkg/compactor/deletion/tenant_delete_requests_client_test.go +++ b/pkg/compactor/deletion/tenant_delete_requests_client_test.go @@ -3,6 +3,7 @@ package deletion import ( "context" "testing" + "time" "github.com/stretchr/testify/require" ) @@ -13,7 +14,7 @@ func TestTenantDeleteRequestsClient(t *testing.T) { RequestID: "test-request", }}, } - perTenantClient := NewPerTenantDeleteRequestsClient(fakeClient, limits) + perTenantClient := NewPerTenantDeleteRequestsClient(fakeClient, defaultLimits) t.Run("tenant enabled", func(t *testing.T) { reqs, err := perTenantClient.GetAllDeleteRequestsForUser(context.Background(), "1") @@ -39,10 +40,11 @@ func (c *fakeRequestsClient) GetAllDeleteRequestsForUser(_ context.Context, _ st } var ( - limits = &fakeLimits{ - limits: map[string]string{ - "1": "filter-only", - "2": "disabled", + defaultLimits = &fakeLimits{ + tenantLimits: map[string]limit{ + "1": {deletionMode: "filter-only"}, + "2": {deletionMode: "disabled"}, + "3": {retentionPeriod: time.Hour}, }, } ) diff --git a/pkg/compactor/deletion/tenant_request_handler_test.go b/pkg/compactor/deletion/tenant_request_handler_test.go index e979a5c8c4d00..c4a18543ccef2 100644 --- a/pkg/compactor/deletion/tenant_request_handler_test.go +++ b/pkg/compactor/deletion/tenant_request_handler_test.go @@ -4,16 +4,19 @@ import ( "net/http" "net/http/httptest" "testing" + "time" "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/validation" ) func TestDeleteRequestHandlerDeletionMiddleware(t *testing.T) { fl := &fakeLimits{ - limits: map[string]string{ - "1": "filter-only", - "2": "disabled", + tenantLimits: map[string]limit{ + "1": {deletionMode: "filter-only"}, + "2": {deletionMode: "disabled"}, }, } @@ -47,15 +50,34 @@ func TestDeleteRequestHandlerDeletionMiddleware(t *testing.T) { require.Equal(t, http.StatusBadRequest, res.Result().StatusCode) } +type limit struct { + deletionMode string + retentionPeriod time.Duration + streamRetention []validation.StreamRetention +} + type fakeLimits struct { - limits map[string]string - mode string + tenantLimits map[string]limit + defaultLimit limit } -func (f *fakeLimits) DeletionMode(userID string) string { - if f.mode != "" { - return f.mode +func (f *fakeLimits) getLimitForUser(userID string) limit { + limit := f.defaultLimit + if override, ok := f.tenantLimits[userID]; ok { + limit = override } - return f.limits[userID] + return limit +} + +func (f *fakeLimits) DeletionMode(userID string) string { + return f.getLimitForUser(userID).deletionMode +} + +func (f *fakeLimits) RetentionPeriod(userID string) time.Duration { + return f.getLimitForUser(userID).retentionPeriod +} + +func (f *fakeLimits) StreamRetention(userID string) []validation.StreamRetention { + return f.getLimitForUser(userID).streamRetention } From 5190dda64b8e83013ab02b60845b1d9056154d68 Mon Sep 17 00:00:00 2001 From: Shantanu Alshi Date: Mon, 1 Apr 2024 18:30:21 +0530 Subject: [PATCH 43/54] feat(detected_labels): Initial skeleton for the API (#12390) Co-authored-by: Cyril Tovena --- pkg/loghttp/labels.go | 13 + pkg/logproto/logproto.pb.go | 1161 ++++++++++++++++++---- pkg/logproto/logproto.proto | 20 + pkg/logql/metrics.go | 4 + pkg/loki/modules.go | 1 + pkg/querier/handler.go | 7 + pkg/querier/http.go | 10 + pkg/querier/multi_tenant_querier.go | 22 + pkg/querier/querier.go | 11 + pkg/querier/querier_mock_test.go | 12 + pkg/querier/queryrange/codec.go | 123 ++- pkg/querier/queryrange/extensions.go | 17 + pkg/querier/queryrange/marshal.go | 10 + pkg/querier/queryrange/queryrange.pb.go | 753 ++++++++++++-- pkg/querier/queryrange/queryrange.proto | 10 + pkg/querier/queryrange/roundtrip.go | 12 +- pkg/querier/queryrange/roundtrip_test.go | 1 + pkg/querier/queryrange/stats.go | 3 + pkg/util/marshal/marshal.go | 10 + 19 files changed, 1908 insertions(+), 292 deletions(-) diff --git a/pkg/loghttp/labels.go b/pkg/loghttp/labels.go index f239873323cfe..efa059fc9709c 100644 --- a/pkg/loghttp/labels.go +++ b/pkg/loghttp/labels.go @@ -87,6 +87,19 @@ func ParseLabelQuery(r *http.Request) (*logproto.LabelRequest, error) { return req, nil } +func ParseDetectedLabelsQuery(r *http.Request) (*logproto.DetectedLabelsRequest, error) { + start, end, err := bounds(r) + if err != nil { + return nil, err + } + + return &logproto.DetectedLabelsRequest{ + Start: &start, + End: &end, + Query: query(r), + }, nil +} + func ParseDetectedFieldsQuery(r *http.Request) (*logproto.DetectedFieldsRequest, error) { req := &logproto.DetectedFieldsRequest{} diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 2b794a5f899c2..11482676c0efb 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -2673,6 +2673,151 @@ func (m *DetectedField) GetCardinality() uint64 { return 0 } +type DetectedLabelsRequest struct { + Start *time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start,omitempty"` + End *time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end,omitempty"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` +} + +func (m *DetectedLabelsRequest) Reset() { *m = DetectedLabelsRequest{} } +func (*DetectedLabelsRequest) ProtoMessage() {} +func (*DetectedLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{48} +} +func (m *DetectedLabelsRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsRequest.Merge(m, src) +} +func (m *DetectedLabelsRequest) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsRequest proto.InternalMessageInfo + +func (m *DetectedLabelsRequest) GetStart() *time.Time { + if m != nil { + return m.Start + } + return nil +} + +func (m *DetectedLabelsRequest) GetEnd() *time.Time { + if m != nil { + return m.End + } + return nil +} + +func (m *DetectedLabelsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +type DetectedLabelsResponse struct { + DetectedLabels []*DetectedLabel `protobuf:"bytes,1,rep,name=detectedLabels,proto3" json:"detectedLabels,omitempty"` +} + +func (m *DetectedLabelsResponse) Reset() { *m = DetectedLabelsResponse{} } +func (*DetectedLabelsResponse) ProtoMessage() {} +func (*DetectedLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{49} +} +func (m *DetectedLabelsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsResponse.Merge(m, src) +} +func (m *DetectedLabelsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsResponse proto.InternalMessageInfo + +func (m *DetectedLabelsResponse) GetDetectedLabels() []*DetectedLabel { + if m != nil { + return m.DetectedLabels + } + return nil +} + +type DetectedLabel struct { + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` +} + +func (m *DetectedLabel) Reset() { *m = DetectedLabel{} } +func (*DetectedLabel) ProtoMessage() {} +func (*DetectedLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_c28a5f14f1f4c79a, []int{50} +} +func (m *DetectedLabel) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabel.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabel.Merge(m, src) +} +func (m *DetectedLabel) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabel) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabel proto.InternalMessageInfo + +func (m *DetectedLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + func init() { proto.RegisterEnum("logproto.Direction", Direction_name, Direction_value) proto.RegisterType((*StreamRatesRequest)(nil), "logproto.StreamRatesRequest") @@ -2724,162 +2869,167 @@ func init() { proto.RegisterType((*DetectedFieldsRequest)(nil), "logproto.DetectedFieldsRequest") proto.RegisterType((*DetectedFieldsResponse)(nil), "logproto.DetectedFieldsResponse") proto.RegisterType((*DetectedField)(nil), "logproto.DetectedField") + proto.RegisterType((*DetectedLabelsRequest)(nil), "logproto.DetectedLabelsRequest") + proto.RegisterType((*DetectedLabelsResponse)(nil), "logproto.DetectedLabelsResponse") + proto.RegisterType((*DetectedLabel)(nil), "logproto.DetectedLabel") } func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2395 bytes of a gzipped FileDescriptorProto + // 2431 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4b, 0x6f, 0x1b, 0xc7, - 0x99, 0x4b, 0x2e, 0x5f, 0x1f, 0x29, 0x59, 0x1e, 0xd1, 0x36, 0xc1, 0xd8, 0xa4, 0x3c, 0x48, 0x1d, - 0xd5, 0x71, 0xc8, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0xd0, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x1f, 0x19, - 0xb9, 0x6e, 0x60, 0xb4, 0x35, 0x56, 0xe4, 0x90, 0x5a, 0x88, 0xdc, 0xa5, 0x77, 0x87, 0xb1, 0x09, + 0x99, 0x4b, 0x2e, 0x5f, 0x1f, 0x29, 0x59, 0x1e, 0xd1, 0x32, 0xc1, 0xd8, 0xa4, 0x3c, 0x48, 0x1c, + 0xd5, 0x71, 0xc4, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0x90, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x1f, 0x19, + 0xb9, 0x6e, 0x6a, 0xb4, 0x35, 0x56, 0xe4, 0x88, 0x5a, 0x88, 0xdc, 0xa5, 0x77, 0x87, 0xb1, 0x05, 0xf4, 0xd0, 0x3f, 0x10, 0x34, 0xb7, 0xa2, 0x97, 0xa2, 0x05, 0x0a, 0xa4, 0x40, 0xd1, 0x4b, 0x7f, 0x40, 0x7b, 0xe9, 0xc1, 0xbd, 0x39, 0xb7, 0x20, 0x07, 0xb6, 0x96, 0x2f, 0x85, 0x4e, 0xb9, 0x15, - 0xe8, 0xa9, 0x98, 0xd7, 0x3e, 0x28, 0xca, 0x0d, 0x55, 0x17, 0x85, 0x2f, 0xdc, 0x99, 0x6f, 0xbe, - 0xf9, 0xe6, 0x7b, 0xcd, 0xf7, 0x18, 0xc2, 0x6b, 0x83, 0x9d, 0x6e, 0xa3, 0xe7, 0x76, 0x07, 0x9e, - 0xcb, 0xdc, 0x60, 0x50, 0x17, 0xbf, 0x28, 0xa7, 0xe7, 0x95, 0x52, 0xd7, 0xed, 0xba, 0x12, 0x87, - 0x8f, 0xe4, 0x7a, 0xa5, 0xd6, 0x75, 0xdd, 0x6e, 0x8f, 0x36, 0xc4, 0x6c, 0x6b, 0xd8, 0x69, 0x30, - 0xbb, 0x4f, 0x7d, 0x66, 0xf5, 0x07, 0x0a, 0x61, 0x49, 0x51, 0x7f, 0xd8, 0xeb, 0xbb, 0x6d, 0xda, - 0x6b, 0xf8, 0xcc, 0x62, 0xbe, 0xfc, 0x55, 0x18, 0x8b, 0x1c, 0x63, 0x30, 0xf4, 0xb7, 0xc5, 0x8f, - 0x04, 0xe2, 0x12, 0xa0, 0x4d, 0xe6, 0x51, 0xab, 0x4f, 0x2c, 0x46, 0x7d, 0x42, 0x1f, 0x0e, 0xa9, - 0xcf, 0xf0, 0x4d, 0x58, 0x8c, 0x41, 0xfd, 0x81, 0xeb, 0xf8, 0x14, 0x5d, 0x84, 0x82, 0x1f, 0x82, - 0xcb, 0xc6, 0x52, 0x6a, 0xb9, 0xb0, 0x52, 0xaa, 0x07, 0xa2, 0x84, 0x7b, 0x48, 0x14, 0x11, 0xff, - 0xca, 0x00, 0x08, 0xd7, 0x50, 0x15, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0xdb, 0x65, 0x63, 0xc9, 0x58, - 0x36, 0x49, 0x04, 0x82, 0xce, 0xc1, 0xd1, 0x70, 0x76, 0xcb, 0xdd, 0xdc, 0xb6, 0xbc, 0x76, 0x39, - 0x29, 0xd0, 0xf6, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0x4e, 0x2d, 0x19, 0xcb, 0x29, 0x22, - 0xc6, 0xe8, 0x38, 0x64, 0x18, 0x75, 0x2c, 0x87, 0x95, 0xcd, 0x25, 0x63, 0x39, 0x4f, 0xd4, 0x8c, - 0xc3, 0xb9, 0xec, 0xd4, 0x2f, 0xa7, 0x97, 0x8c, 0xe5, 0x39, 0xa2, 0x66, 0xf8, 0xb3, 0x14, 0x14, - 0x3f, 0x1c, 0x52, 0x6f, 0xa4, 0x14, 0x80, 0xaa, 0x90, 0xf3, 0x69, 0x8f, 0xb6, 0x98, 0xeb, 0x09, - 0x06, 0xf3, 0xcd, 0x64, 0xd9, 0x20, 0x01, 0x0c, 0x95, 0x20, 0xdd, 0xb3, 0xfb, 0x36, 0x13, 0x6c, - 0xcd, 0x11, 0x39, 0x41, 0x97, 0x20, 0xed, 0x33, 0xcb, 0x63, 0x82, 0x97, 0xc2, 0x4a, 0xa5, 0x2e, - 0x8d, 0x56, 0xd7, 0x46, 0xab, 0xdf, 0xd5, 0x46, 0x6b, 0xe6, 0x9e, 0x8c, 0x6b, 0x89, 0x4f, 0xff, - 0x56, 0x33, 0x88, 0xdc, 0x82, 0x2e, 0x42, 0x8a, 0x3a, 0x6d, 0xc1, 0xef, 0xd7, 0xdd, 0xc9, 0x37, - 0xa0, 0xf3, 0x90, 0x6f, 0xdb, 0x1e, 0x6d, 0x31, 0xdb, 0x75, 0x84, 0x54, 0xf3, 0x2b, 0x8b, 0xa1, - 0x45, 0xd6, 0xf4, 0x12, 0x09, 0xb1, 0xd0, 0x39, 0xc8, 0xf8, 0x5c, 0x75, 0x7e, 0x39, 0xbb, 0x94, - 0x5a, 0xce, 0x37, 0x4b, 0x7b, 0xe3, 0xda, 0x82, 0x84, 0x9c, 0x73, 0xfb, 0x36, 0xa3, 0xfd, 0x01, - 0x1b, 0x11, 0x85, 0x83, 0xce, 0x42, 0xb6, 0x4d, 0x7b, 0x94, 0x1b, 0x3c, 0x27, 0x0c, 0xbe, 0x10, - 0x21, 0x2f, 0x16, 0x88, 0x46, 0x40, 0xf7, 0xc1, 0x1c, 0xf4, 0x2c, 0xa7, 0x9c, 0x17, 0x52, 0xcc, - 0x87, 0x88, 0x77, 0x7a, 0x96, 0xd3, 0xbc, 0xf8, 0xe5, 0xb8, 0xb6, 0xd2, 0xb5, 0xd9, 0xf6, 0x70, - 0xab, 0xde, 0x72, 0xfb, 0x8d, 0xae, 0x67, 0x75, 0x2c, 0xc7, 0x6a, 0xf4, 0xdc, 0x1d, 0xbb, 0xc1, - 0x9d, 0xf3, 0xe1, 0x90, 0x7a, 0x36, 0xf5, 0x1a, 0x9c, 0x46, 0x5d, 0xd8, 0x83, 0xef, 0x23, 0x82, - 0xe6, 0x75, 0x33, 0x97, 0x59, 0xc8, 0xe2, 0x71, 0x12, 0xd0, 0xa6, 0xd5, 0x1f, 0xf4, 0xe8, 0x4c, - 0xf6, 0x0a, 0x2c, 0x93, 0x3c, 0xb4, 0x65, 0x52, 0xb3, 0x5a, 0x26, 0x54, 0xb3, 0x39, 0x9b, 0x9a, - 0xd3, 0x5f, 0x57, 0xcd, 0x99, 0x97, 0xaf, 0x66, 0x5c, 0x06, 0x93, 0xcf, 0xd0, 0x02, 0xa4, 0x3c, - 0xeb, 0x91, 0x50, 0x66, 0x91, 0xf0, 0x21, 0xde, 0x80, 0x8c, 0x64, 0x04, 0x55, 0x26, 0xb5, 0x1d, - 0xbf, 0x19, 0xa1, 0xa6, 0x53, 0x5a, 0x87, 0x0b, 0xa1, 0x0e, 0x53, 0x42, 0x3b, 0xf8, 0xd7, 0x06, - 0xcc, 0x29, 0x13, 0xaa, 0xe8, 0xb2, 0x05, 0x59, 0x79, 0xbb, 0x75, 0x64, 0x39, 0x31, 0x19, 0x59, - 0xae, 0xb4, 0xad, 0x01, 0xa3, 0x5e, 0xb3, 0xf1, 0x64, 0x5c, 0x33, 0xbe, 0x1c, 0xd7, 0xde, 0x78, - 0x91, 0x94, 0x22, 0xc8, 0xa9, 0xa8, 0xa3, 0x09, 0xa3, 0x37, 0x05, 0x77, 0xcc, 0x57, 0x7e, 0x70, - 0xa4, 0x2e, 0x03, 0xe4, 0xba, 0xd3, 0xa5, 0x3e, 0xa7, 0x6c, 0x72, 0x13, 0x12, 0x89, 0x83, 0x7f, - 0x0a, 0x8b, 0x31, 0x57, 0x53, 0x7c, 0xbe, 0x0b, 0x19, 0x9f, 0x2b, 0x50, 0xb3, 0x19, 0x31, 0xd4, - 0xa6, 0x80, 0x37, 0xe7, 0x15, 0x7f, 0x19, 0x39, 0x27, 0x0a, 0x7f, 0xb6, 0xd3, 0xff, 0x62, 0x40, - 0x71, 0xc3, 0xda, 0xa2, 0x3d, 0xed, 0xe3, 0x08, 0x4c, 0xc7, 0xea, 0x53, 0xa5, 0x71, 0x31, 0xe6, - 0x01, 0xed, 0x63, 0xab, 0x37, 0xa4, 0x92, 0x64, 0x8e, 0xa8, 0xd9, 0xac, 0x91, 0xc8, 0x38, 0x74, - 0x24, 0x32, 0x42, 0x7f, 0x2f, 0x41, 0x9a, 0x7b, 0xd6, 0x48, 0x44, 0xa1, 0x3c, 0x91, 0x13, 0xfc, - 0x06, 0xcc, 0x29, 0x29, 0x94, 0xfa, 0x42, 0x96, 0xb9, 0xfa, 0xf2, 0x9a, 0x65, 0xdc, 0x87, 0x8c, - 0xd4, 0x36, 0x7a, 0x1d, 0xf2, 0x41, 0x76, 0x13, 0xd2, 0xa6, 0x9a, 0x99, 0xbd, 0x71, 0x2d, 0xc9, - 0x7c, 0x12, 0x2e, 0xa0, 0x1a, 0xa4, 0xc5, 0x4e, 0x21, 0xb9, 0xd1, 0xcc, 0xef, 0x8d, 0x6b, 0x12, - 0x40, 0xe4, 0x07, 0x9d, 0x04, 0x73, 0x9b, 0x27, 0x18, 0xae, 0x02, 0xb3, 0x99, 0xdb, 0x1b, 0xd7, - 0xc4, 0x9c, 0x88, 0x5f, 0x7c, 0x0d, 0x8a, 0x1b, 0xb4, 0x6b, 0xb5, 0x46, 0xea, 0xd0, 0x92, 0x26, - 0xc7, 0x0f, 0x34, 0x34, 0x8d, 0xd3, 0x50, 0x0c, 0x4e, 0x7c, 0xd0, 0xf7, 0x95, 0x53, 0x17, 0x02, - 0xd8, 0x4d, 0x1f, 0xff, 0xd2, 0x00, 0x65, 0x67, 0x84, 0x21, 0xd3, 0xe3, 0xb2, 0xfa, 0x2a, 0x06, - 0xc1, 0xde, 0xb8, 0xa6, 0x20, 0x44, 0x7d, 0xd1, 0x65, 0xc8, 0xfa, 0xe2, 0x44, 0x4e, 0x6c, 0xd2, - 0x7d, 0xc4, 0x42, 0xf3, 0x08, 0x77, 0x83, 0xbd, 0x71, 0x4d, 0x23, 0x12, 0x3d, 0x40, 0xf5, 0x58, - 0xe6, 0x94, 0x82, 0xcd, 0xef, 0x8d, 0x6b, 0x11, 0x68, 0x34, 0x93, 0xe2, 0x7f, 0x1a, 0x50, 0xb8, - 0x6b, 0xd9, 0x81, 0x0b, 0x95, 0xb5, 0x89, 0xc2, 0x18, 0x29, 0x01, 0xfc, 0x4a, 0xb7, 0x69, 0xcf, - 0x1a, 0x5d, 0x75, 0x3d, 0x41, 0x77, 0x8e, 0x04, 0xf3, 0x30, 0xd9, 0x99, 0x53, 0x93, 0x5d, 0x7a, - 0xf6, 0x90, 0xfa, 0x3f, 0x0c, 0x60, 0xd7, 0xcd, 0x5c, 0x72, 0x21, 0x85, 0xff, 0x60, 0x40, 0x51, - 0x4a, 0xae, 0xdc, 0xee, 0x47, 0x90, 0x91, 0x8a, 0x11, 0xb2, 0xbf, 0x20, 0xb8, 0xbc, 0x39, 0x4b, - 0x60, 0x51, 0x34, 0xd1, 0xf7, 0x60, 0xbe, 0xed, 0xb9, 0x83, 0x01, 0x6d, 0x6f, 0xaa, 0x10, 0x96, - 0x9c, 0x0c, 0x61, 0x6b, 0xd1, 0x75, 0x32, 0x81, 0x8e, 0xff, 0x6a, 0xc0, 0x9c, 0x8a, 0x16, 0xca, - 0x56, 0x81, 0x7e, 0x8d, 0x43, 0xa7, 0xac, 0xe4, 0xac, 0x29, 0xeb, 0x38, 0x64, 0xba, 0x9e, 0x3b, - 0x1c, 0xf8, 0xe5, 0x94, 0xbc, 0x9b, 0x72, 0x36, 0x5b, 0x2a, 0xc3, 0xd7, 0x61, 0x5e, 0x8b, 0x72, - 0x40, 0xc8, 0xac, 0x4c, 0x86, 0xcc, 0xf5, 0x36, 0x75, 0x98, 0xdd, 0xb1, 0x83, 0x20, 0xa8, 0xf0, - 0xf1, 0xcf, 0x0d, 0x58, 0x98, 0x44, 0x41, 0x6b, 0x91, 0x7b, 0xc6, 0xc9, 0x9d, 0x39, 0x98, 0x5c, - 0x5d, 0x04, 0x1f, 0xff, 0x7d, 0x87, 0x79, 0x23, 0x4d, 0x5a, 0xee, 0xad, 0xbc, 0x03, 0x85, 0xc8, - 0x22, 0x4f, 0x51, 0x3b, 0x54, 0xdd, 0x0c, 0xc2, 0x87, 0x61, 0x48, 0x48, 0xca, 0x80, 0x26, 0x26, - 0xf8, 0x17, 0x06, 0xcc, 0xc5, 0x6c, 0x89, 0xde, 0x05, 0xb3, 0xe3, 0xb9, 0xfd, 0x99, 0x0c, 0x25, - 0x76, 0xa0, 0x6f, 0x41, 0x92, 0xb9, 0x33, 0x99, 0x29, 0xc9, 0x5c, 0x6e, 0x25, 0x25, 0x7e, 0x4a, - 0x56, 0xb7, 0x72, 0x86, 0xdf, 0x81, 0xbc, 0x10, 0xe8, 0x8e, 0x65, 0x7b, 0x53, 0xb3, 0xc5, 0x74, - 0x81, 0x2e, 0xc3, 0x11, 0x19, 0x09, 0xa7, 0x6f, 0x2e, 0x4e, 0xdb, 0x5c, 0xd4, 0x9b, 0x5f, 0x83, - 0xf4, 0xea, 0xf6, 0xd0, 0xd9, 0xe1, 0x5b, 0xda, 0x16, 0xb3, 0xf4, 0x16, 0x3e, 0xc6, 0xc7, 0x60, - 0x91, 0xdf, 0x41, 0xea, 0xf9, 0xab, 0xee, 0xd0, 0x61, 0xba, 0xbb, 0x38, 0x07, 0xa5, 0x38, 0x58, - 0x79, 0x49, 0x09, 0xd2, 0x2d, 0x0e, 0x10, 0x34, 0xe6, 0x88, 0x9c, 0xe0, 0xdf, 0x1a, 0x80, 0xae, - 0x51, 0x26, 0x4e, 0x59, 0x5f, 0x0b, 0xae, 0x47, 0x05, 0x72, 0x7d, 0x8b, 0xb5, 0xb6, 0xa9, 0xe7, - 0xeb, 0x1a, 0x44, 0xcf, 0xff, 0x1f, 0xd5, 0x1e, 0x3e, 0x0f, 0x8b, 0x31, 0x2e, 0x95, 0x4c, 0x15, - 0xc8, 0xb5, 0x14, 0x4c, 0xe5, 0xbb, 0x60, 0x8e, 0xff, 0x98, 0x84, 0x9c, 0xd8, 0x40, 0x68, 0x07, - 0x9d, 0x87, 0x42, 0xc7, 0x76, 0xba, 0xd4, 0x1b, 0x78, 0xb6, 0x52, 0x81, 0xd9, 0x3c, 0xb2, 0x37, - 0xae, 0x45, 0xc1, 0x24, 0x3a, 0x41, 0x6f, 0x41, 0x76, 0xe8, 0x53, 0xef, 0x81, 0x2d, 0x6f, 0x7a, - 0xbe, 0x59, 0xda, 0x1d, 0xd7, 0x32, 0x3f, 0xf0, 0xa9, 0xb7, 0xbe, 0xc6, 0x33, 0xcf, 0x50, 0x8c, - 0x88, 0xfc, 0xb6, 0xd1, 0x0d, 0xe5, 0xa6, 0xa2, 0x08, 0x6b, 0x7e, 0x9b, 0xb3, 0x3f, 0x11, 0xea, - 0x06, 0x9e, 0xdb, 0xa7, 0x6c, 0x9b, 0x0e, 0xfd, 0x46, 0xcb, 0xed, 0xf7, 0x5d, 0xa7, 0x21, 0x7a, - 0x49, 0x21, 0x34, 0x4f, 0x9f, 0x7c, 0xbb, 0xf2, 0xdc, 0xbb, 0x90, 0x65, 0xdb, 0x9e, 0x3b, 0xec, - 0x6e, 0x8b, 0xac, 0x90, 0x6a, 0x5e, 0x9a, 0x9d, 0x9e, 0xa6, 0x40, 0xf4, 0x00, 0x9d, 0xe6, 0xda, - 0xa2, 0xad, 0x1d, 0x7f, 0xd8, 0x97, 0x1d, 0x5a, 0x33, 0xbd, 0x37, 0xae, 0x19, 0x6f, 0x91, 0x00, - 0x8c, 0x3f, 0x49, 0x42, 0x4d, 0x38, 0xea, 0x3d, 0x51, 0x36, 0x5c, 0x75, 0xbd, 0x9b, 0x94, 0x79, - 0x76, 0xeb, 0x96, 0xd5, 0xa7, 0xda, 0x37, 0x6a, 0x50, 0xe8, 0x0b, 0xe0, 0x83, 0xc8, 0x15, 0x80, - 0x7e, 0x80, 0x87, 0x4e, 0x01, 0x88, 0x3b, 0x23, 0xd7, 0xe5, 0x6d, 0xc8, 0x0b, 0x88, 0x58, 0x5e, - 0x8d, 0x69, 0xaa, 0x31, 0xa3, 0x64, 0x4a, 0x43, 0xeb, 0x93, 0x1a, 0x9a, 0x99, 0x4e, 0xa0, 0x96, - 0xa8, 0xaf, 0xa7, 0xe3, 0xbe, 0x8e, 0x3f, 0x37, 0xa0, 0xba, 0xa1, 0x39, 0x3f, 0xa4, 0x3a, 0xb4, - 0xbc, 0xc9, 0x97, 0x24, 0x6f, 0xea, 0xbf, 0x93, 0x17, 0x57, 0x01, 0x36, 0x6c, 0x87, 0x5e, 0xb5, - 0x7b, 0x8c, 0x7a, 0x53, 0x3a, 0x91, 0x4f, 0x52, 0x61, 0x48, 0x20, 0xb4, 0xa3, 0xe5, 0x5c, 0x8d, - 0xc4, 0xe1, 0x97, 0x21, 0x46, 0xf2, 0x25, 0x9a, 0x2d, 0x35, 0x11, 0xa2, 0x76, 0x20, 0xdb, 0x11, - 0xe2, 0xc9, 0x94, 0x1a, 0x7b, 0x46, 0x09, 0x65, 0x6f, 0x5e, 0x56, 0x87, 0x5f, 0x78, 0x51, 0x41, - 0x22, 0x5e, 0x7d, 0x1a, 0xfe, 0xc8, 0x61, 0xd6, 0xe3, 0xc8, 0x66, 0xa2, 0x4f, 0x40, 0x3f, 0x51, - 0xe5, 0x56, 0x7a, 0x6a, 0xb9, 0xa5, 0x6f, 0xee, 0xe1, 0x7b, 0xc6, 0xf7, 0xc2, 0xd8, 0x27, 0xcc, - 0xa1, 0x62, 0xdf, 0x19, 0x30, 0x3d, 0xda, 0xd1, 0x49, 0x1a, 0x85, 0xc7, 0x06, 0x98, 0x62, 0x1d, - 0xff, 0xc9, 0x80, 0x85, 0x6b, 0x94, 0xc5, 0xcb, 0x9f, 0x57, 0xc8, 0x98, 0xf8, 0x03, 0x38, 0x1a, - 0xe1, 0x5f, 0x49, 0x7f, 0x61, 0xa2, 0xe6, 0x39, 0x16, 0xca, 0xbf, 0xee, 0xb4, 0xe9, 0x63, 0xd5, - 0x2b, 0xc6, 0xcb, 0x9d, 0x3b, 0x50, 0x88, 0x2c, 0xa2, 0x2b, 0x13, 0x85, 0x4e, 0xe4, 0x65, 0x27, - 0x48, 0xd6, 0xcd, 0x92, 0x92, 0x49, 0x76, 0x8b, 0xaa, 0x8c, 0x0d, 0x8a, 0x82, 0x4d, 0x40, 0xc2, - 0x5c, 0x82, 0x6c, 0x34, 0x2d, 0x09, 0xe8, 0x8d, 0xa0, 0xe2, 0x09, 0xe6, 0xe8, 0x34, 0x98, 0x9e, - 0xfb, 0x48, 0x57, 0xb0, 0x73, 0xe1, 0x91, 0xc4, 0x7d, 0x44, 0xc4, 0x12, 0xbe, 0x0c, 0x29, 0xe2, - 0x3e, 0x42, 0x55, 0x00, 0xcf, 0x72, 0xba, 0xf4, 0x5e, 0xd0, 0x38, 0x15, 0x49, 0x04, 0x72, 0x40, - 0xc9, 0xb0, 0x0a, 0x47, 0xa3, 0x1c, 0x49, 0x73, 0xd7, 0x21, 0xfb, 0xe1, 0x30, 0xaa, 0xae, 0xd2, - 0x84, 0xba, 0x64, 0x0f, 0xae, 0x91, 0xb8, 0xcf, 0x40, 0x08, 0x47, 0x27, 0x21, 0xcf, 0xac, 0xad, - 0x1e, 0xbd, 0x15, 0x06, 0xb8, 0x10, 0xc0, 0x57, 0x79, 0xcf, 0x77, 0x2f, 0x52, 0xfb, 0x84, 0x00, - 0x74, 0x16, 0x16, 0x42, 0x9e, 0xef, 0x78, 0xb4, 0x63, 0x3f, 0x16, 0x16, 0x2e, 0x92, 0x7d, 0x70, - 0xb4, 0x0c, 0x47, 0x42, 0xd8, 0xa6, 0xa8, 0x31, 0x4c, 0x81, 0x3a, 0x09, 0xe6, 0xba, 0x11, 0xe2, - 0xbe, 0xff, 0x70, 0x68, 0xf5, 0xc4, 0xcd, 0x2b, 0x92, 0x08, 0x04, 0xff, 0xd9, 0x80, 0xa3, 0xd2, - 0xd4, 0xbc, 0xdb, 0x7f, 0x15, 0xbd, 0xfe, 0x33, 0x03, 0x50, 0x54, 0x02, 0xe5, 0x5a, 0xdf, 0x88, - 0x3e, 0xe3, 0xf0, 0x22, 0xa6, 0x20, 0x5a, 0x59, 0x09, 0x0a, 0x5f, 0x62, 0x30, 0x64, 0x44, 0x21, - 0x24, 0x7b, 0x6a, 0x53, 0xf6, 0xca, 0x12, 0x42, 0xd4, 0x97, 0xb7, 0xf8, 0x5b, 0x23, 0x46, 0x7d, - 0xd5, 0xe9, 0x8a, 0x16, 0x5f, 0x00, 0x88, 0xfc, 0xf0, 0xb3, 0xa8, 0xc3, 0x84, 0xd7, 0x98, 0xe1, - 0x59, 0x0a, 0x44, 0xf4, 0x00, 0xff, 0x3e, 0x09, 0x73, 0xf7, 0xdc, 0xde, 0x30, 0x4c, 0x89, 0xaf, - 0x52, 0xaa, 0x88, 0xb5, 0xdf, 0x69, 0xdd, 0x7e, 0x23, 0x30, 0x7d, 0x46, 0x07, 0xc2, 0xb3, 0x52, - 0x44, 0x8c, 0x11, 0x86, 0x22, 0xb3, 0xbc, 0x2e, 0x65, 0xb2, 0xaf, 0x29, 0x67, 0x44, 0xc1, 0x19, - 0x83, 0xa1, 0x25, 0x28, 0x58, 0xdd, 0xae, 0x47, 0xbb, 0x16, 0xa3, 0xcd, 0x51, 0x39, 0x2b, 0x0e, - 0x8b, 0x82, 0xf0, 0x47, 0x30, 0xaf, 0x95, 0xa5, 0x4c, 0xfa, 0x36, 0x64, 0x3f, 0x16, 0x90, 0x29, - 0x4f, 0x5e, 0x12, 0x55, 0x85, 0x31, 0x8d, 0x16, 0x7f, 0x1f, 0xd7, 0x3c, 0xe3, 0xeb, 0x90, 0x91, - 0xe8, 0xe8, 0x64, 0xb4, 0x3b, 0x91, 0x6f, 0x33, 0x7c, 0xae, 0x5a, 0x0d, 0x0c, 0x19, 0x49, 0x48, - 0x19, 0x5e, 0xf8, 0x86, 0x84, 0x10, 0xf5, 0xc5, 0xbf, 0x31, 0xe0, 0xd8, 0x1a, 0x65, 0xb4, 0xc5, - 0x68, 0xfb, 0xaa, 0x4d, 0x7b, 0xed, 0xc3, 0x36, 0xce, 0xc6, 0xa1, 0x1b, 0xe7, 0x69, 0x6f, 0x5f, - 0xa9, 0xe8, 0xdb, 0xd7, 0x3a, 0x1c, 0x9f, 0x64, 0x51, 0x69, 0xb4, 0x01, 0x99, 0x8e, 0x80, 0xec, - 0x7f, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x7b, 0x30, 0x17, 0x5b, 0x10, 0x1a, 0xe6, 0x16, 0x55, - 0xd1, 0x4e, 0x4e, 0xd0, 0x37, 0xc1, 0x64, 0xa3, 0x81, 0x0a, 0x72, 0xcd, 0x63, 0xff, 0x1a, 0xd7, - 0x8e, 0xc6, 0xb6, 0xdd, 0x1d, 0x0d, 0x28, 0x11, 0x28, 0xdc, 0x11, 0x5a, 0x96, 0xd7, 0xb6, 0x1d, - 0xab, 0x67, 0x33, 0xc9, 0xb8, 0x49, 0xa2, 0xa0, 0xb3, 0x67, 0x20, 0x1f, 0xfc, 0x7f, 0x80, 0x0a, - 0x90, 0xbd, 0x7a, 0x9b, 0xfc, 0xf0, 0x0a, 0x59, 0x5b, 0x48, 0xa0, 0x22, 0xe4, 0x9a, 0x57, 0x56, - 0x6f, 0x88, 0x99, 0xb1, 0xf2, 0x79, 0x5a, 0x07, 0x6f, 0x0f, 0x7d, 0x17, 0xd2, 0x32, 0x22, 0x1f, - 0x0f, 0x25, 0x8a, 0xbe, 0xd4, 0x57, 0x4e, 0xec, 0x83, 0x4b, 0x95, 0xe0, 0xc4, 0xdb, 0x06, 0xba, - 0x05, 0x05, 0x01, 0x54, 0x6f, 0x72, 0x27, 0x27, 0x9f, 0xc6, 0x62, 0x94, 0x4e, 0x1d, 0xb0, 0x1a, - 0xa1, 0x77, 0x09, 0xd2, 0xc2, 0xed, 0xa3, 0xdc, 0x44, 0xdf, 0x54, 0xa3, 0xdc, 0xc4, 0x5e, 0x29, - 0x71, 0x02, 0x7d, 0x07, 0x4c, 0xde, 0xa5, 0xa2, 0x48, 0xde, 0x8e, 0x3c, 0xa5, 0x55, 0x8e, 0x4f, - 0x82, 0x23, 0xc7, 0xbe, 0x17, 0xbc, 0x08, 0x9e, 0x98, 0x7c, 0x99, 0xd0, 0xdb, 0xcb, 0xfb, 0x17, - 0x82, 0x93, 0x6f, 0xcb, 0xa7, 0x2b, 0xdd, 0x1f, 0xa3, 0x53, 0xf1, 0xa3, 0x26, 0xda, 0xe9, 0x4a, - 0xf5, 0xa0, 0xe5, 0x80, 0xe0, 0x06, 0x14, 0x22, 0xbd, 0x69, 0x54, 0xad, 0xfb, 0x1b, 0xeb, 0xa8, - 0x5a, 0xa7, 0x34, 0xb4, 0x38, 0x81, 0xae, 0x41, 0x8e, 0x57, 0x3b, 0x3c, 0xe8, 0xa3, 0xd7, 0x26, - 0x8b, 0x9a, 0x48, 0x32, 0xab, 0x9c, 0x9c, 0xbe, 0x18, 0x10, 0xfa, 0x3e, 0xe4, 0xaf, 0x51, 0xa6, - 0x22, 0xc2, 0x89, 0xc9, 0x90, 0x32, 0x45, 0x53, 0xf1, 0xb0, 0x84, 0x13, 0xe8, 0x23, 0x51, 0x78, - 0xc5, 0xef, 0x18, 0xaa, 0x1d, 0x70, 0x97, 0x02, 0xbe, 0x96, 0x0e, 0x46, 0xd0, 0x94, 0x57, 0x7e, - 0xac, 0xff, 0xb1, 0x5c, 0xb3, 0x98, 0x85, 0x6e, 0xc3, 0xbc, 0x10, 0x39, 0xf8, 0x4b, 0x33, 0xe6, - 0x9a, 0xfb, 0xfe, 0x3f, 0x8d, 0xb9, 0xe6, 0xfe, 0xff, 0x51, 0x71, 0xa2, 0x79, 0xff, 0xe9, 0xb3, - 0x6a, 0xe2, 0x8b, 0x67, 0xd5, 0xc4, 0x57, 0xcf, 0xaa, 0xc6, 0xcf, 0x76, 0xab, 0xc6, 0xef, 0x76, - 0xab, 0xc6, 0x93, 0xdd, 0xaa, 0xf1, 0x74, 0xb7, 0x6a, 0xfc, 0x7d, 0xb7, 0x6a, 0xfc, 0x63, 0xb7, - 0x9a, 0xf8, 0x6a, 0xb7, 0x6a, 0x7c, 0xfa, 0xbc, 0x9a, 0x78, 0xfa, 0xbc, 0x9a, 0xf8, 0xe2, 0x79, - 0x35, 0x71, 0xff, 0xf5, 0xff, 0xd0, 0x05, 0xc8, 0x38, 0x95, 0x11, 0x9f, 0x0b, 0xff, 0x0e, 0x00, - 0x00, 0xff, 0xff, 0x46, 0xc6, 0x73, 0x88, 0x70, 0x1e, 0x00, 0x00, + 0xe8, 0xa9, 0x98, 0xd7, 0xbe, 0x44, 0xb9, 0xa1, 0xea, 0xa2, 0xf5, 0x85, 0x3b, 0xf3, 0xcd, 0x37, + 0xdf, 0x7c, 0xaf, 0xf9, 0x1e, 0x43, 0x78, 0x65, 0xb0, 0xdd, 0x6d, 0xf6, 0xdc, 0xee, 0xc0, 0x73, + 0x99, 0x1b, 0x0c, 0x16, 0xc5, 0x2f, 0x2a, 0xe8, 0x79, 0xad, 0xd2, 0x75, 0xbb, 0xae, 0xc4, 0xe1, + 0x23, 0xb9, 0x5e, 0x6b, 0x74, 0x5d, 0xb7, 0xdb, 0xa3, 0x4d, 0x31, 0xdb, 0x18, 0x6e, 0x36, 0x99, + 0xdd, 0xa7, 0x3e, 0xb3, 0xfa, 0x03, 0x85, 0x30, 0xaf, 0xa8, 0x3f, 0xe8, 0xf5, 0xdd, 0x0e, 0xed, + 0x35, 0x7d, 0x66, 0x31, 0x5f, 0xfe, 0x2a, 0x8c, 0x59, 0x8e, 0x31, 0x18, 0xfa, 0x5b, 0xe2, 0x47, + 0x02, 0x71, 0x05, 0xd0, 0x3a, 0xf3, 0xa8, 0xd5, 0x27, 0x16, 0xa3, 0x3e, 0xa1, 0x0f, 0x86, 0xd4, + 0x67, 0xf8, 0x06, 0xcc, 0xc6, 0xa0, 0xfe, 0xc0, 0x75, 0x7c, 0x8a, 0x2e, 0x40, 0xc9, 0x0f, 0xc1, + 0x55, 0x63, 0x3e, 0xb3, 0x50, 0x5a, 0xaa, 0x2c, 0x06, 0xa2, 0x84, 0x7b, 0x48, 0x14, 0x11, 0xff, + 0xd2, 0x00, 0x08, 0xd7, 0x50, 0x1d, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0x5b, 0x55, 0x63, 0xde, 0x58, + 0x30, 0x49, 0x04, 0x82, 0xce, 0xc2, 0xd1, 0x70, 0x76, 0xd3, 0x5d, 0xdf, 0xb2, 0xbc, 0x4e, 0x35, + 0x2d, 0xd0, 0xf6, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0xcd, 0xcc, 0x1b, 0x0b, 0x19, 0x22, + 0xc6, 0x68, 0x0e, 0x72, 0x8c, 0x3a, 0x96, 0xc3, 0xaa, 0xe6, 0xbc, 0xb1, 0x50, 0x24, 0x6a, 0xc6, + 0xe1, 0x5c, 0x76, 0xea, 0x57, 0xb3, 0xf3, 0xc6, 0xc2, 0x14, 0x51, 0x33, 0xfc, 0x59, 0x06, 0xca, + 0x1f, 0x0e, 0xa9, 0xb7, 0xa3, 0x14, 0x80, 0xea, 0x50, 0xf0, 0x69, 0x8f, 0xb6, 0x99, 0xeb, 0x09, + 0x06, 0x8b, 0xad, 0x74, 0xd5, 0x20, 0x01, 0x0c, 0x55, 0x20, 0xdb, 0xb3, 0xfb, 0x36, 0x13, 0x6c, + 0x4d, 0x11, 0x39, 0x41, 0x17, 0x21, 0xeb, 0x33, 0xcb, 0x63, 0x82, 0x97, 0xd2, 0x52, 0x6d, 0x51, + 0x1a, 0x6d, 0x51, 0x1b, 0x6d, 0xf1, 0x8e, 0x36, 0x5a, 0xab, 0xf0, 0x78, 0xd4, 0x48, 0x7d, 0xfa, + 0xd7, 0x86, 0x41, 0xe4, 0x16, 0x74, 0x01, 0x32, 0xd4, 0xe9, 0x08, 0x7e, 0xbf, 0xee, 0x4e, 0xbe, + 0x01, 0x9d, 0x83, 0x62, 0xc7, 0xf6, 0x68, 0x9b, 0xd9, 0xae, 0x23, 0xa4, 0x9a, 0x5e, 0x9a, 0x0d, + 0x2d, 0xb2, 0xa2, 0x97, 0x48, 0x88, 0x85, 0xce, 0x42, 0xce, 0xe7, 0xaa, 0xf3, 0xab, 0xf9, 0xf9, + 0xcc, 0x42, 0xb1, 0x55, 0xd9, 0x1b, 0x35, 0x66, 0x24, 0xe4, 0xac, 0xdb, 0xb7, 0x19, 0xed, 0x0f, + 0xd8, 0x0e, 0x51, 0x38, 0xe8, 0x0c, 0xe4, 0x3b, 0xb4, 0x47, 0xb9, 0xc1, 0x0b, 0xc2, 0xe0, 0x33, + 0x11, 0xf2, 0x62, 0x81, 0x68, 0x04, 0x74, 0x0f, 0xcc, 0x41, 0xcf, 0x72, 0xaa, 0x45, 0x21, 0xc5, + 0x74, 0x88, 0x78, 0xbb, 0x67, 0x39, 0xad, 0x0b, 0x5f, 0x8e, 0x1a, 0x4b, 0x5d, 0x9b, 0x6d, 0x0d, + 0x37, 0x16, 0xdb, 0x6e, 0xbf, 0xd9, 0xf5, 0xac, 0x4d, 0xcb, 0xb1, 0x9a, 0x3d, 0x77, 0xdb, 0x6e, + 0x72, 0xe7, 0x7c, 0x30, 0xa4, 0x9e, 0x4d, 0xbd, 0x26, 0xa7, 0xb1, 0x28, 0xec, 0xc1, 0xf7, 0x11, + 0x41, 0xf3, 0x9a, 0x59, 0xc8, 0xcd, 0xe4, 0xf1, 0x28, 0x0d, 0x68, 0xdd, 0xea, 0x0f, 0x7a, 0x74, + 0x22, 0x7b, 0x05, 0x96, 0x49, 0x1f, 0xda, 0x32, 0x99, 0x49, 0x2d, 0x13, 0xaa, 0xd9, 0x9c, 0x4c, + 0xcd, 0xd9, 0xaf, 0xab, 0xe6, 0xdc, 0x8b, 0x57, 0x33, 0xae, 0x82, 0xc9, 0x67, 0x68, 0x06, 0x32, + 0x9e, 0xf5, 0x50, 0x28, 0xb3, 0x4c, 0xf8, 0x10, 0xaf, 0x41, 0x4e, 0x32, 0x82, 0x6a, 0x49, 0x6d, + 0xc7, 0x6f, 0x46, 0xa8, 0xe9, 0x8c, 0xd6, 0xe1, 0x4c, 0xa8, 0xc3, 0x8c, 0xd0, 0x0e, 0xfe, 0x95, + 0x01, 0x53, 0xca, 0x84, 0x2a, 0xba, 0x6c, 0x40, 0x5e, 0xde, 0x6e, 0x1d, 0x59, 0x8e, 0x27, 0x23, + 0xcb, 0xe5, 0x8e, 0x35, 0x60, 0xd4, 0x6b, 0x35, 0x1f, 0x8f, 0x1a, 0xc6, 0x97, 0xa3, 0xc6, 0xeb, + 0xcf, 0x93, 0x52, 0x04, 0x39, 0x15, 0x75, 0x34, 0x61, 0xf4, 0x86, 0xe0, 0x8e, 0xf9, 0xca, 0x0f, + 0x8e, 0x2c, 0xca, 0x00, 0xb9, 0xea, 0x74, 0xa9, 0xcf, 0x29, 0x9b, 0xdc, 0x84, 0x44, 0xe2, 0xe0, + 0x9f, 0xc0, 0x6c, 0xcc, 0xd5, 0x14, 0x9f, 0xef, 0x40, 0xce, 0xe7, 0x0a, 0xd4, 0x6c, 0x46, 0x0c, + 0xb5, 0x2e, 0xe0, 0xad, 0x69, 0xc5, 0x5f, 0x4e, 0xce, 0x89, 0xc2, 0x9f, 0xec, 0xf4, 0x3f, 0x1b, + 0x50, 0x5e, 0xb3, 0x36, 0x68, 0x4f, 0xfb, 0x38, 0x02, 0xd3, 0xb1, 0xfa, 0x54, 0x69, 0x5c, 0x8c, + 0x79, 0x40, 0xfb, 0xd8, 0xea, 0x0d, 0xa9, 0x24, 0x59, 0x20, 0x6a, 0x36, 0x69, 0x24, 0x32, 0x0e, + 0x1d, 0x89, 0x8c, 0xd0, 0xdf, 0x2b, 0x90, 0xe5, 0x9e, 0xb5, 0x23, 0xa2, 0x50, 0x91, 0xc8, 0x09, + 0x7e, 0x1d, 0xa6, 0x94, 0x14, 0x4a, 0x7d, 0x21, 0xcb, 0x5c, 0x7d, 0x45, 0xcd, 0x32, 0xee, 0x43, + 0x4e, 0x6a, 0x1b, 0xbd, 0x0a, 0xc5, 0x20, 0xbb, 0x09, 0x69, 0x33, 0xad, 0xdc, 0xde, 0xa8, 0x91, + 0x66, 0x3e, 0x09, 0x17, 0x50, 0x03, 0xb2, 0x62, 0xa7, 0x90, 0xdc, 0x68, 0x15, 0xf7, 0x46, 0x0d, + 0x09, 0x20, 0xf2, 0x83, 0x4e, 0x80, 0xb9, 0xc5, 0x13, 0x0c, 0x57, 0x81, 0xd9, 0x2a, 0xec, 0x8d, + 0x1a, 0x62, 0x4e, 0xc4, 0x2f, 0xbe, 0x0a, 0xe5, 0x35, 0xda, 0xb5, 0xda, 0x3b, 0xea, 0xd0, 0x8a, + 0x26, 0xc7, 0x0f, 0x34, 0x34, 0x8d, 0x53, 0x50, 0x0e, 0x4e, 0xbc, 0xdf, 0xf7, 0x95, 0x53, 0x97, + 0x02, 0xd8, 0x0d, 0x1f, 0xff, 0xc2, 0x00, 0x65, 0x67, 0x84, 0x21, 0xd7, 0xe3, 0xb2, 0xfa, 0x2a, + 0x06, 0xc1, 0xde, 0xa8, 0xa1, 0x20, 0x44, 0x7d, 0xd1, 0x25, 0xc8, 0xfb, 0xe2, 0x44, 0x4e, 0x2c, + 0xe9, 0x3e, 0x62, 0xa1, 0x75, 0x84, 0xbb, 0xc1, 0xde, 0xa8, 0xa1, 0x11, 0x89, 0x1e, 0xa0, 0xc5, + 0x58, 0xe6, 0x94, 0x82, 0x4d, 0xef, 0x8d, 0x1a, 0x11, 0x68, 0x34, 0x93, 0xe2, 0x7f, 0x18, 0x50, + 0xba, 0x63, 0xd9, 0x81, 0x0b, 0x55, 0xb5, 0x89, 0xc2, 0x18, 0x29, 0x01, 0xfc, 0x4a, 0x77, 0x68, + 0xcf, 0xda, 0xb9, 0xe2, 0x7a, 0x82, 0xee, 0x14, 0x09, 0xe6, 0x61, 0xb2, 0x33, 0xc7, 0x26, 0xbb, + 0xec, 0xe4, 0x21, 0xf5, 0xbf, 0x18, 0xc0, 0xae, 0x99, 0x85, 0xf4, 0x4c, 0x06, 0xff, 0xde, 0x80, + 0xb2, 0x94, 0x5c, 0xb9, 0xdd, 0x0f, 0x21, 0x27, 0x15, 0x23, 0x64, 0x7f, 0x4e, 0x70, 0x79, 0x63, + 0x92, 0xc0, 0xa2, 0x68, 0xa2, 0xf7, 0x60, 0xba, 0xe3, 0xb9, 0x83, 0x01, 0xed, 0xac, 0xab, 0x10, + 0x96, 0x4e, 0x86, 0xb0, 0x95, 0xe8, 0x3a, 0x49, 0xa0, 0xe3, 0xbf, 0x18, 0x30, 0xa5, 0xa2, 0x85, + 0xb2, 0x55, 0xa0, 0x5f, 0xe3, 0xd0, 0x29, 0x2b, 0x3d, 0x69, 0xca, 0x9a, 0x83, 0x5c, 0xd7, 0x73, + 0x87, 0x03, 0xbf, 0x9a, 0x91, 0x77, 0x53, 0xce, 0x26, 0x4b, 0x65, 0xf8, 0x1a, 0x4c, 0x6b, 0x51, + 0x0e, 0x08, 0x99, 0xb5, 0x64, 0xc8, 0x5c, 0xed, 0x50, 0x87, 0xd9, 0x9b, 0x76, 0x10, 0x04, 0x15, + 0x3e, 0xfe, 0x99, 0x01, 0x33, 0x49, 0x14, 0xb4, 0x12, 0xb9, 0x67, 0x9c, 0xdc, 0xe9, 0x83, 0xc9, + 0x2d, 0x8a, 0xe0, 0xe3, 0xbf, 0xef, 0x30, 0x6f, 0x47, 0x93, 0x96, 0x7b, 0x6b, 0x6f, 0x43, 0x29, + 0xb2, 0xc8, 0x53, 0xd4, 0x36, 0x55, 0x37, 0x83, 0xf0, 0x61, 0x18, 0x12, 0xd2, 0x32, 0xa0, 0x89, + 0x09, 0xfe, 0xb9, 0x01, 0x53, 0x31, 0x5b, 0xa2, 0x77, 0xc0, 0xdc, 0xf4, 0xdc, 0xfe, 0x44, 0x86, + 0x12, 0x3b, 0xd0, 0x37, 0x21, 0xcd, 0xdc, 0x89, 0xcc, 0x94, 0x66, 0x2e, 0xb7, 0x92, 0x12, 0x3f, + 0x23, 0xab, 0x5b, 0x39, 0xc3, 0x6f, 0x43, 0x51, 0x08, 0x74, 0xdb, 0xb2, 0xbd, 0xb1, 0xd9, 0x62, + 0xbc, 0x40, 0x97, 0xe0, 0x88, 0x8c, 0x84, 0xe3, 0x37, 0x97, 0xc7, 0x6d, 0x2e, 0xeb, 0xcd, 0xaf, + 0x40, 0x76, 0x79, 0x6b, 0xe8, 0x6c, 0xf3, 0x2d, 0x1d, 0x8b, 0x59, 0x7a, 0x0b, 0x1f, 0xe3, 0x63, + 0x30, 0xcb, 0xef, 0x20, 0xf5, 0xfc, 0x65, 0x77, 0xe8, 0x30, 0xdd, 0x5d, 0x9c, 0x85, 0x4a, 0x1c, + 0xac, 0xbc, 0xa4, 0x02, 0xd9, 0x36, 0x07, 0x08, 0x1a, 0x53, 0x44, 0x4e, 0xf0, 0x6f, 0x0c, 0x40, + 0x57, 0x29, 0x13, 0xa7, 0xac, 0xae, 0x04, 0xd7, 0xa3, 0x06, 0x85, 0xbe, 0xc5, 0xda, 0x5b, 0xd4, + 0xf3, 0x75, 0x0d, 0xa2, 0xe7, 0xff, 0x8b, 0x6a, 0x0f, 0x9f, 0x83, 0xd9, 0x18, 0x97, 0x4a, 0xa6, + 0x1a, 0x14, 0xda, 0x0a, 0xa6, 0xf2, 0x5d, 0x30, 0xc7, 0x7f, 0x48, 0x43, 0x41, 0x6c, 0x20, 0x74, + 0x13, 0x9d, 0x83, 0xd2, 0xa6, 0xed, 0x74, 0xa9, 0x37, 0xf0, 0x6c, 0xa5, 0x02, 0xb3, 0x75, 0x64, + 0x6f, 0xd4, 0x88, 0x82, 0x49, 0x74, 0x82, 0xde, 0x84, 0xfc, 0xd0, 0xa7, 0xde, 0x7d, 0x5b, 0xde, + 0xf4, 0x62, 0xab, 0xb2, 0x3b, 0x6a, 0xe4, 0xbe, 0xe7, 0x53, 0x6f, 0x75, 0x85, 0x67, 0x9e, 0xa1, + 0x18, 0x11, 0xf9, 0xed, 0xa0, 0xeb, 0xca, 0x4d, 0x45, 0x11, 0xd6, 0xfa, 0x16, 0x67, 0x3f, 0x11, + 0xea, 0x06, 0x9e, 0xdb, 0xa7, 0x6c, 0x8b, 0x0e, 0xfd, 0x66, 0xdb, 0xed, 0xf7, 0x5d, 0xa7, 0x29, + 0x7a, 0x49, 0x21, 0x34, 0x4f, 0x9f, 0x7c, 0xbb, 0xf2, 0xdc, 0x3b, 0x90, 0x67, 0x5b, 0x9e, 0x3b, + 0xec, 0x6e, 0x89, 0xac, 0x90, 0x69, 0x5d, 0x9c, 0x9c, 0x9e, 0xa6, 0x40, 0xf4, 0x00, 0x9d, 0xe2, + 0xda, 0xa2, 0xed, 0x6d, 0x7f, 0xd8, 0x97, 0x1d, 0x5a, 0x2b, 0xbb, 0x37, 0x6a, 0x18, 0x6f, 0x92, + 0x00, 0x8c, 0x3f, 0x49, 0x43, 0x43, 0x38, 0xea, 0x5d, 0x51, 0x36, 0x5c, 0x71, 0xbd, 0x1b, 0x94, + 0x79, 0x76, 0xfb, 0xa6, 0xd5, 0xa7, 0xda, 0x37, 0x1a, 0x50, 0xea, 0x0b, 0xe0, 0xfd, 0xc8, 0x15, + 0x80, 0x7e, 0x80, 0x87, 0x4e, 0x02, 0x88, 0x3b, 0x23, 0xd7, 0xe5, 0x6d, 0x28, 0x0a, 0x88, 0x58, + 0x5e, 0x8e, 0x69, 0xaa, 0x39, 0xa1, 0x64, 0x4a, 0x43, 0xab, 0x49, 0x0d, 0x4d, 0x4c, 0x27, 0x50, + 0x4b, 0xd4, 0xd7, 0xb3, 0x71, 0x5f, 0xc7, 0x9f, 0x1b, 0x50, 0x5f, 0xd3, 0x9c, 0x1f, 0x52, 0x1d, + 0x5a, 0xde, 0xf4, 0x0b, 0x92, 0x37, 0xf3, 0x9f, 0xc9, 0x8b, 0xeb, 0x00, 0x6b, 0xb6, 0x43, 0xaf, + 0xd8, 0x3d, 0x46, 0xbd, 0x31, 0x9d, 0xc8, 0x27, 0x99, 0x30, 0x24, 0x10, 0xba, 0xa9, 0xe5, 0x5c, + 0x8e, 0xc4, 0xe1, 0x17, 0x21, 0x46, 0xfa, 0x05, 0x9a, 0x2d, 0x93, 0x08, 0x51, 0xdb, 0x90, 0xdf, + 0x14, 0xe2, 0xc9, 0x94, 0x1a, 0x7b, 0x46, 0x09, 0x65, 0x6f, 0x5d, 0x52, 0x87, 0x9f, 0x7f, 0x5e, + 0x41, 0x22, 0x5e, 0x7d, 0x9a, 0xfe, 0x8e, 0xc3, 0xac, 0x47, 0x91, 0xcd, 0x44, 0x9f, 0x80, 0x7e, + 0xac, 0xca, 0xad, 0xec, 0xd8, 0x72, 0x4b, 0xdf, 0xdc, 0xc3, 0xf7, 0x8c, 0xef, 0x86, 0xb1, 0x4f, + 0x98, 0x43, 0xc5, 0xbe, 0xd3, 0x60, 0x7a, 0x74, 0x53, 0x27, 0x69, 0x14, 0x1e, 0x1b, 0x60, 0x8a, + 0x75, 0xfc, 0x47, 0x03, 0x66, 0xae, 0x52, 0x16, 0x2f, 0x7f, 0x5e, 0x22, 0x63, 0xe2, 0x0f, 0xe0, + 0x68, 0x84, 0x7f, 0x25, 0xfd, 0xf9, 0x44, 0xcd, 0x73, 0x2c, 0x94, 0x7f, 0xd5, 0xe9, 0xd0, 0x47, + 0xaa, 0x57, 0x8c, 0x97, 0x3b, 0xb7, 0xa1, 0x14, 0x59, 0x44, 0x97, 0x13, 0x85, 0x4e, 0xe4, 0x65, + 0x27, 0x48, 0xd6, 0xad, 0x8a, 0x92, 0x49, 0x76, 0x8b, 0xaa, 0x8c, 0x0d, 0x8a, 0x82, 0x75, 0x40, + 0xc2, 0x5c, 0x82, 0x6c, 0x34, 0x2d, 0x09, 0xe8, 0xf5, 0xa0, 0xe2, 0x09, 0xe6, 0xe8, 0x14, 0x98, + 0x9e, 0xfb, 0x50, 0x57, 0xb0, 0x53, 0xe1, 0x91, 0xc4, 0x7d, 0x48, 0xc4, 0x12, 0xbe, 0x04, 0x19, + 0xe2, 0x3e, 0x44, 0x75, 0x00, 0xcf, 0x72, 0xba, 0xf4, 0x6e, 0xd0, 0x38, 0x95, 0x49, 0x04, 0x72, + 0x40, 0xc9, 0xb0, 0x0c, 0x47, 0xa3, 0x1c, 0x49, 0x73, 0x2f, 0x42, 0xfe, 0xc3, 0x61, 0x54, 0x5d, + 0x95, 0x84, 0xba, 0x64, 0x0f, 0xae, 0x91, 0xb8, 0xcf, 0x40, 0x08, 0x47, 0x27, 0xa0, 0xc8, 0xac, + 0x8d, 0x1e, 0xbd, 0x19, 0x06, 0xb8, 0x10, 0xc0, 0x57, 0x79, 0xcf, 0x77, 0x37, 0x52, 0xfb, 0x84, + 0x00, 0x74, 0x06, 0x66, 0x42, 0x9e, 0x6f, 0x7b, 0x74, 0xd3, 0x7e, 0x24, 0x2c, 0x5c, 0x26, 0xfb, + 0xe0, 0x68, 0x01, 0x8e, 0x84, 0xb0, 0x75, 0x51, 0x63, 0x98, 0x02, 0x35, 0x09, 0xe6, 0xba, 0x11, + 0xe2, 0xbe, 0xff, 0x60, 0x68, 0xf5, 0xc4, 0xcd, 0x2b, 0x93, 0x08, 0x04, 0xff, 0xc9, 0x80, 0xa3, + 0xd2, 0xd4, 0xbc, 0xdb, 0x7f, 0x19, 0xbd, 0xfe, 0x33, 0x03, 0x50, 0x54, 0x02, 0xe5, 0x5a, 0xaf, + 0x45, 0x9f, 0x71, 0x78, 0x11, 0x53, 0x12, 0xad, 0xac, 0x04, 0x85, 0x2f, 0x31, 0x18, 0x72, 0xa2, + 0x10, 0x92, 0x3d, 0xb5, 0x29, 0x7b, 0x65, 0x09, 0x21, 0xea, 0xcb, 0x5b, 0xfc, 0x8d, 0x1d, 0x46, + 0x7d, 0xd5, 0xe9, 0x8a, 0x16, 0x5f, 0x00, 0x88, 0xfc, 0xf0, 0xb3, 0xa8, 0xc3, 0x84, 0xd7, 0x98, + 0xe1, 0x59, 0x0a, 0x44, 0xf4, 0x00, 0xff, 0x2e, 0x0d, 0x53, 0x77, 0xdd, 0xde, 0x30, 0x4c, 0x89, + 0x2f, 0x53, 0xaa, 0x88, 0xb5, 0xdf, 0x59, 0xdd, 0x7e, 0x23, 0x30, 0x7d, 0x46, 0x07, 0xc2, 0xb3, + 0x32, 0x44, 0x8c, 0x11, 0x86, 0x32, 0xb3, 0xbc, 0x2e, 0x65, 0xb2, 0xaf, 0xa9, 0xe6, 0x44, 0xc1, + 0x19, 0x83, 0xa1, 0x79, 0x28, 0x59, 0xdd, 0xae, 0x47, 0xbb, 0x16, 0xa3, 0xad, 0x9d, 0x6a, 0x5e, + 0x1c, 0x16, 0x05, 0xe1, 0x8f, 0x60, 0x5a, 0x2b, 0x4b, 0x99, 0xf4, 0x2d, 0xc8, 0x7f, 0x2c, 0x20, + 0x63, 0x9e, 0xbc, 0x24, 0xaa, 0x0a, 0x63, 0x1a, 0x2d, 0xfe, 0x3e, 0xae, 0x79, 0xc6, 0xd7, 0x20, + 0x27, 0xd1, 0xd1, 0x89, 0x68, 0x77, 0x22, 0xdf, 0x66, 0xf8, 0x5c, 0xb5, 0x1a, 0x18, 0x72, 0x92, + 0x90, 0x32, 0xbc, 0xf0, 0x0d, 0x09, 0x21, 0xea, 0x8b, 0x7f, 0x6d, 0xc0, 0xb1, 0x15, 0xca, 0x68, + 0x9b, 0xd1, 0xce, 0x15, 0x9b, 0xf6, 0x3a, 0x87, 0x6d, 0x9c, 0x8d, 0x43, 0x37, 0xce, 0xe3, 0xde, + 0xbe, 0x32, 0xd1, 0xb7, 0xaf, 0x55, 0x98, 0x4b, 0xb2, 0xa8, 0x34, 0xda, 0x84, 0xdc, 0xa6, 0x80, + 0xec, 0x7f, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x7b, 0x30, 0x15, 0x5b, 0x10, 0x1a, 0xe6, 0x16, + 0x55, 0xd1, 0x4e, 0x4e, 0xd0, 0x37, 0xc0, 0x64, 0x3b, 0x03, 0x15, 0xe4, 0x5a, 0xc7, 0xfe, 0x39, + 0x6a, 0x1c, 0x8d, 0x6d, 0xbb, 0xb3, 0x33, 0xa0, 0x44, 0xa0, 0x70, 0x47, 0x68, 0x5b, 0x5e, 0xc7, + 0x76, 0xac, 0x9e, 0xcd, 0x24, 0xe3, 0x26, 0x89, 0x82, 0x62, 0x2a, 0x96, 0xde, 0xf3, 0xff, 0xa7, + 0xe2, 0x1f, 0x84, 0x2a, 0xd6, 0x2c, 0x2a, 0x15, 0xbf, 0x07, 0xd3, 0x9d, 0xd8, 0xca, 0xc1, 0xaa, + 0x96, 0x0f, 0x94, 0x09, 0x74, 0xfc, 0x5a, 0xa8, 0x72, 0x01, 0x19, 0xaf, 0xf2, 0x33, 0xa7, 0xa1, + 0x18, 0xfc, 0xcb, 0x82, 0x4a, 0x90, 0xbf, 0x72, 0x8b, 0x7c, 0xff, 0x32, 0x59, 0x99, 0x49, 0xa1, + 0x32, 0x14, 0x5a, 0x97, 0x97, 0xaf, 0x8b, 0x99, 0xb1, 0xf4, 0x79, 0x56, 0xa7, 0x38, 0x0f, 0x7d, + 0x07, 0xb2, 0x32, 0x6f, 0xcd, 0x85, 0xcc, 0x44, 0xff, 0xcf, 0xa8, 0x1d, 0xdf, 0x07, 0x97, 0x52, + 0xe1, 0xd4, 0x5b, 0x06, 0xba, 0x09, 0x25, 0x01, 0x54, 0x2f, 0x97, 0x27, 0x92, 0x0f, 0x88, 0x31, + 0x4a, 0x27, 0x0f, 0x58, 0x8d, 0xd0, 0xbb, 0x08, 0x59, 0x29, 0xe0, 0x5c, 0xa2, 0xbc, 0x18, 0xc3, + 0x4d, 0xec, 0x2d, 0x17, 0xa7, 0xd0, 0xb7, 0xc1, 0xe4, 0xbd, 0x3c, 0x8a, 0x54, 0x37, 0x91, 0x07, + 0xc7, 0xda, 0x5c, 0x12, 0x1c, 0x39, 0xf6, 0xdd, 0xe0, 0xdd, 0xf4, 0x78, 0xf2, 0xfd, 0x46, 0x6f, + 0xaf, 0xee, 0x5f, 0x08, 0x4e, 0xbe, 0x25, 0x1f, 0xf8, 0xf4, 0x2b, 0x02, 0x3a, 0x19, 0x3f, 0x2a, + 0xf1, 0xe8, 0x50, 0xab, 0x1f, 0xb4, 0x1c, 0x10, 0x5c, 0x83, 0x52, 0xa4, 0x83, 0x8f, 0xaa, 0x75, + 0xff, 0xf3, 0x43, 0x54, 0xad, 0x63, 0xda, 0x7e, 0x9c, 0x42, 0x57, 0xa1, 0xc0, 0x6b, 0x42, 0x9e, + 0x1a, 0xd1, 0x2b, 0xc9, 0xd2, 0x2f, 0x92, 0xf2, 0x6b, 0x27, 0xc6, 0x2f, 0x06, 0x84, 0xbe, 0x0b, + 0xc5, 0xab, 0x94, 0xa9, 0xb8, 0x79, 0x3c, 0x19, 0x78, 0xc7, 0x68, 0x2a, 0x1e, 0xbc, 0x71, 0x0a, + 0x7d, 0x24, 0xca, 0xd3, 0x78, 0x24, 0x42, 0x8d, 0x03, 0x22, 0x4e, 0xc0, 0xd7, 0xfc, 0xc1, 0x08, + 0x9a, 0xf2, 0xd2, 0x8f, 0xf4, 0xff, 0xba, 0x2b, 0x16, 0xb3, 0xd0, 0x2d, 0x98, 0x16, 0x22, 0x07, + 0x7f, 0xfc, 0xc6, 0x5c, 0x73, 0xdf, 0xbf, 0xcc, 0x31, 0xd7, 0xdc, 0xff, 0x6f, 0x33, 0x4e, 0xb5, + 0xee, 0x3d, 0x79, 0x5a, 0x4f, 0x7d, 0xf1, 0xb4, 0x9e, 0xfa, 0xea, 0x69, 0xdd, 0xf8, 0xe9, 0x6e, + 0xdd, 0xf8, 0xed, 0x6e, 0xdd, 0x78, 0xbc, 0x5b, 0x37, 0x9e, 0xec, 0xd6, 0x8d, 0xbf, 0xed, 0xd6, + 0x8d, 0xbf, 0xef, 0xd6, 0x53, 0x5f, 0xed, 0xd6, 0x8d, 0x4f, 0x9f, 0xd5, 0x53, 0x4f, 0x9e, 0xd5, + 0x53, 0x5f, 0x3c, 0xab, 0xa7, 0xee, 0xbd, 0xfa, 0x6f, 0x7a, 0x25, 0x19, 0x6a, 0x72, 0xe2, 0x73, + 0xfe, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x75, 0x76, 0x03, 0x96, 0x1f, 0x00, 0x00, } func (x Direction) String() string { @@ -4460,6 +4610,97 @@ func (this *DetectedField) Equal(that interface{}) bool { } return true } +func (this *DetectedLabelsRequest) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsRequest) + if !ok { + that2, ok := that.(DetectedLabelsRequest) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Start == nil { + if this.Start != nil { + return false + } + } else if !this.Start.Equal(*that1.Start) { + return false + } + if that1.End == nil { + if this.End != nil { + return false + } + } else if !this.End.Equal(*that1.End) { + return false + } + if this.Query != that1.Query { + return false + } + return true +} +func (this *DetectedLabelsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsResponse) + if !ok { + that2, ok := that.(DetectedLabelsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if len(this.DetectedLabels) != len(that1.DetectedLabels) { + return false + } + for i := range this.DetectedLabels { + if !this.DetectedLabels[i].Equal(that1.DetectedLabels[i]) { + return false + } + } + return true +} +func (this *DetectedLabel) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabel) + if !ok { + that2, ok := that.(DetectedLabel) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if this.Label != that1.Label { + return false + } + return true +} func (this *StreamRatesRequest) GoString() string { if this == nil { return "nil" @@ -5077,6 +5318,40 @@ func (this *DetectedField) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *DetectedLabelsRequest) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 7) + s = append(s, "&logproto.DetectedLabelsRequest{") + s = append(s, "Start: "+fmt.Sprintf("%#v", this.Start)+",\n") + s = append(s, "End: "+fmt.Sprintf("%#v", this.End)+",\n") + s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabelsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedLabelsResponse{") + if this.DetectedLabels != nil { + s = append(s, "DetectedLabels: "+fmt.Sprintf("%#v", this.DetectedLabels)+",\n") + } + s = append(s, "}") + return strings.Join(s, "") +} +func (this *DetectedLabel) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 5) + s = append(s, "&logproto.DetectedLabel{") + s = append(s, "Label: "+fmt.Sprintf("%#v", this.Label)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func valueToGoStringLogproto(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -7795,45 +8070,162 @@ func (m *DetectedField) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { - offset -= sovLogproto(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ +func (m *DetectedLabelsRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err } - dAtA[offset] = uint8(v) - return base + return dAtA[:n], nil } -func (m *StreamRatesRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - return n + +func (m *DetectedLabelsRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *StreamRatesResponse) Size() (n int) { - if m == nil { - return 0 - } +func (m *DetectedLabelsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i var l int _ = l - if len(m.StreamRates) > 0 { - for _, e := range m.StreamRates { - l = e.Size() - n += 1 + l + sovLogproto(uint64(l)) + if len(m.Query) > 0 { + i -= len(m.Query) + copy(dAtA[i:], m.Query) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Query))) + i-- + dAtA[i] = 0x1a + } + if m.End != nil { + n23, err23 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.End):]) + if err23 != nil { + return 0, err23 } + i -= n23 + i = encodeVarintLogproto(dAtA, i, uint64(n23)) + i-- + dAtA[i] = 0x12 } - return n + if m.Start != nil { + n24, err24 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start):]) + if err24 != nil { + return 0, err24 + } + i -= n24 + i = encodeVarintLogproto(dAtA, i, uint64(n24)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil } -func (m *StreamRate) Size() (n int) { - if m == nil { - return 0 - } +func (m *DetectedLabelsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabelsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabelsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.DetectedLabels) > 0 { + for iNdEx := len(m.DetectedLabels) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.DetectedLabels[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintLogproto(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *DetectedLabel) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabel) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabel) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Label) > 0 { + i -= len(m.Label) + copy(dAtA[i:], m.Label) + i = encodeVarintLogproto(dAtA, i, uint64(len(m.Label))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func encodeVarintLogproto(dAtA []byte, offset int, v uint64) int { + offset -= sovLogproto(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *StreamRatesRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + return n +} + +func (m *StreamRatesResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.StreamRates) > 0 { + for _, e := range m.StreamRates { + l = e.Size() + n += 1 + l + sovLogproto(uint64(l)) + } + } + return n +} + +func (m *StreamRate) Size() (n int) { + if m == nil { + return 0 + } var l int _ = l if m.StreamHash != 0 { @@ -8744,6 +9136,55 @@ func (m *DetectedField) Size() (n int) { return n } +func (m *DetectedLabelsRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Start != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start) + n += 1 + l + sovLogproto(uint64(l)) + } + if m.End != nil { + l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.End) + n += 1 + l + sovLogproto(uint64(l)) + } + l = len(m.Query) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + return n +} + +func (m *DetectedLabelsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.DetectedLabels) > 0 { + for _, e := range m.DetectedLabels { + l = e.Size() + n += 1 + l + sovLogproto(uint64(l)) + } + } + return n +} + +func (m *DetectedLabel) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Label) + if l > 0 { + n += 1 + l + sovLogproto(uint64(l)) + } + return n +} + func sovLogproto(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -9386,6 +9827,43 @@ func (this *DetectedField) String() string { }, "") return s } +func (this *DetectedLabelsRequest) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabelsRequest{`, + `Start:` + strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1) + `,`, + `End:` + strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1) + `,`, + `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabelsResponse) String() string { + if this == nil { + return "nil" + } + repeatedStringForDetectedLabels := "[]*DetectedLabel{" + for _, f := range this.DetectedLabels { + repeatedStringForDetectedLabels += strings.Replace(f.String(), "DetectedLabel", "DetectedLabel", 1) + "," + } + repeatedStringForDetectedLabels += "}" + s := strings.Join([]string{`&DetectedLabelsResponse{`, + `DetectedLabels:` + repeatedStringForDetectedLabels + `,`, + `}`, + }, "") + return s +} +func (this *DetectedLabel) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabel{`, + `Label:` + fmt.Sprintf("%v", this.Label) + `,`, + `}`, + }, "") + return s +} func valueToStringLogproto(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -15677,6 +16155,335 @@ func (m *DetectedField) Unmarshal(dAtA []byte) error { } return nil } +func (m *DetectedLabelsRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabelsRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabelsRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Start", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Start == nil { + m.Start = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.Start, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field End", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.End == nil { + m.End = new(time.Time) + } + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.End, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Query", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Query = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabelsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabelsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.DetectedLabels = append(m.DetectedLabels, &DetectedLabel{}) + if err := m.DetectedLabels[len(m.DetectedLabels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *DetectedLabel) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabel: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabel: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Label", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthLogproto + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthLogproto + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Label = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipLogproto(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthLogproto + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipLogproto(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 136400a555f2e..9dd58a8b5f3a4 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -444,3 +444,23 @@ message DetectedField { string type = 2 [(gogoproto.casttype) = "DetectedFieldType"]; uint64 cardinality = 3; } + +message DetectedLabelsRequest { + google.protobuf.Timestamp start = 1 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + google.protobuf.Timestamp end = 2 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = true + ]; + string query = 3; +} + +message DetectedLabelsResponse { + repeated DetectedLabel detectedLabels = 1; +} + +message DetectedLabel { + string label = 1; +} diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index b4cc8632ff918..ee23dc8f3f5fa 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -578,3 +578,7 @@ func extractShard(shards []string) *astmapper.ShardAnnotation { return &shard } + +func RecordDetectedLabelsQueryMetrics(_ context.Context, _ log.Logger, _ time.Time, _ time.Time, _ string, _ string, _ logql_stats.Result) { + // TODO(shantanu) log metrics here +} diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index 2b438ad158ec1..cc616924c1f07 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -1040,6 +1040,7 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { t.Server.HTTP.Path("/loki/api/v1/label/{name}/values").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/series").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/detected_fields").Methods("GET", "POST").Handler(frontendHandler) + t.Server.HTTP.Path("/loki/api/v1/detected_labels").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/stats").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/shards").Methods("GET", "POST").Handler(frontendHandler) t.Server.HTTP.Path("/loki/api/v1/index/volume").Methods("GET", "POST").Handler(frontendHandler) diff --git a/pkg/querier/handler.go b/pkg/querier/handler.go index b2a7fd70cae44..f5415344ab0c1 100644 --- a/pkg/querier/handler.go +++ b/pkg/querier/handler.go @@ -120,6 +120,13 @@ func (h *Handler) Do(ctx context.Context, req queryrangebase.Request) (queryrang return &queryrange.DetectedFieldsResponse{ Response: result, }, nil + case *queryrange.DetectedLabelsRequest: + result, err := h.api.DetectedLabelsHandler(ctx, &concrete.DetectedLabelsRequest) + if err != nil { + return nil, err + } + + return &queryrange.DetectedLabelsResponse{Response: result}, nil default: return nil, fmt.Errorf("unsupported query type %T", req) } diff --git a/pkg/querier/http.go b/pkg/querier/http.go index c3b74428b5b1f..348de10d0e16d 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -409,6 +409,16 @@ func (q *QuerierAPI) validateMaxEntriesLimits(ctx context.Context, expr syntax.E return nil } +// DetectedLabelsHandler returns a response for detected labels +func (q *QuerierAPI) DetectedLabelsHandler(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + resp, err := q.querier.DetectedLabels(ctx, req) + + if err != nil { + return nil, err + } + return resp, nil +} + // WrapQuerySpanAndTimeout applies a context deadline and a span logger to a query call. // // The timeout is based on the per-tenant query timeout configuration. diff --git a/pkg/querier/multi_tenant_querier.go b/pkg/querier/multi_tenant_querier.go index 6338b51e978a6..0643caeb7b315 100644 --- a/pkg/querier/multi_tenant_querier.go +++ b/pkg/querier/multi_tenant_querier.go @@ -258,6 +258,28 @@ func (q *MultiTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeReq return merged, nil } +func (q *MultiTenantQuerier) DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + // TODO(shantanu) + tenantIDs, err := tenant.TenantID(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.DetectedLabels(ctx, req) + } + + //resp := make([]*logproto.DetectedLabels, len(tenantIDs)) + + return &logproto.DetectedLabelsResponse{ + DetectedLabels: []*logproto.DetectedLabel{ + {Label: "cluster"}, + {Label: "namespace"}, + {Label: "instance"}, + }, + }, nil +} + // removeTenantSelector filters the given tenant IDs based on any tenant ID filter the in passed selector. func removeTenantSelector(params logql.SelectSampleParams, tenantIDs []string) (map[string]struct{}, syntax.Expr, error) { expr, err := params.Expr() diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index c2e744a6f03d9..ff83302a0e288 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -96,6 +96,7 @@ type Querier interface { IndexShards(ctx context.Context, req *loghttp.RangeQuery, targetBytesPerShard uint64) (*logproto.ShardsResponse, error) Volume(ctx context.Context, req *logproto.VolumeRequest) (*logproto.VolumeResponse, error) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) + DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) } type Limits querier_limits.Limits @@ -910,3 +911,13 @@ func (q *SingleTenantQuerier) DetectedFields(_ context.Context, _ *logproto.Dete }, }, nil } + +func (q *SingleTenantQuerier) DetectedLabels(_ context.Context, _ *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + return &logproto.DetectedLabelsResponse{ + DetectedLabels: []*logproto.DetectedLabel{ + {Label: "namespace"}, + {Label: "cluster"}, + {Label: "instance"}, + }, + }, nil +} diff --git a/pkg/querier/querier_mock_test.go b/pkg/querier/querier_mock_test.go index b94037898c554..7be2c0cefed0d 100644 --- a/pkg/querier/querier_mock_test.go +++ b/pkg/querier/querier_mock_test.go @@ -592,6 +592,18 @@ func (q *querierMock) DetectedFields(ctx context.Context, req *logproto.Detected return resp.(*logproto.DetectedFieldsResponse), err } +func (q *querierMock) DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { + args := q.MethodCalled("DetectedFields", ctx, req) + + resp := args.Get(0) + err := args.Error(1) + if resp == nil { + return nil, err + } + + return resp.(*logproto.DetectedLabelsResponse), err +} + type engineMock struct { util.ExtendedMock } diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index f56ef55c08e20..d850df4954560 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -6,12 +6,12 @@ import ( "context" "errors" "fmt" - io "io" + "io" "net/http" "net/url" "regexp" "sort" - strings "strings" + "strings" "time" "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" @@ -261,6 +261,80 @@ func (r *LabelRequest) Path() string { func (*LabelRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { return } +type DetectedLabelsRequest struct { + path string + logproto.DetectedLabelsRequest +} + +// NewDetectedLabelsRequest creates a new request for detected labels +func NewDetectedLabelsRequest(start, end time.Time, query, path string) *DetectedLabelsRequest { + return &DetectedLabelsRequest{ + DetectedLabelsRequest: logproto.DetectedLabelsRequest{ + Start: &start, + End: &end, + Query: query, + }, + path: path, + } +} + +func (r *DetectedLabelsRequest) AsProto() *logproto.DetectedLabelsRequest { + return &r.DetectedLabelsRequest +} + +func (r *DetectedLabelsRequest) GetEnd() time.Time { + return *r.End +} + +func (r *DetectedLabelsRequest) GetEndTs() time.Time { + return *r.End +} + +func (r *DetectedLabelsRequest) GetStart() time.Time { + return *r.Start +} + +func (r *DetectedLabelsRequest) GetStartTs() time.Time { + return *r.Start +} + +func (r *DetectedLabelsRequest) GetStep() int64 { + return 0 +} + +func (r *DetectedLabelsRequest) WithStartEnd(s, e time.Time) queryrangebase.Request { + clone := *r + clone.Start = &s + clone.End = &e + return &clone +} + +// WithStartEndForCache implements resultscache.Request. +func (r *DetectedLabelsRequest) WithStartEndForCache(s time.Time, e time.Time) resultscache.Request { + return r.WithStartEnd(s, e).(resultscache.Request) +} + +func (r *DetectedLabelsRequest) WithQuery(query string) queryrangebase.Request { + clone := *r + clone.Query = query + return &clone +} + +func (r *DetectedLabelsRequest) LogToSpan(sp opentracing.Span) { + sp.LogFields( + otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + ) +} + +func (r *DetectedLabelsRequest) Path() string { + return r.path +} + +func (*DetectedLabelsRequest) GetCachingOptions() (res queryrangebase.CachingOptions) { + return +} + func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (queryrangebase.Request, error) { if err := r.ParseForm(); err != nil { return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) @@ -399,6 +473,16 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer DetectedFieldsRequest: *req, path: r.URL.Path, }, nil + case DetectedLabelsOp: + req, err := loghttp.ParseDetectedLabelsQuery(r) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *req, + path: r.URL.Path, + }, nil default: return nil, httpgrpc.Errorf(http.StatusNotFound, fmt.Sprintf("unknown request path: %s", r.URL.Path)) } @@ -602,6 +686,15 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) DetectedFieldsRequest: *req, path: httpReq.URL.Path, }, ctx, nil + case DetectedLabelsOp: + req, err := loghttp.ParseDetectedLabelsQuery(httpReq) + if err != nil { + return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *req, + path: httpReq.URL.Path, + }, ctx, err default: return nil, ctx, httpgrpc.Errorf(http.StatusBadRequest, fmt.Sprintf("unknown request path in HTTP gRPC decode: %s", r.Url)) } @@ -878,6 +971,26 @@ func (c Codec) EncodeRequest(ctx context.Context, r queryrangebase.Request) (*ht Header: header, } + return req.WithContext(ctx), nil + case *DetectedLabelsRequest: + params := url.Values{ + "start": []string{fmt.Sprintf("%d", request.Start.UnixNano())}, + "end": []string{fmt.Sprintf("%d", request.End.UnixNano())}, + "query": []string{request.GetQuery()}, + } + + u := &url.URL{ + Path: "/loki/api/v1/detected_labels", + RawQuery: params.Encode(), + } + req := &http.Request{ + Method: "GET", + RequestURI: u.String(), // This is what the httpgrpc code looks at. + URL: u, + Body: http.NoBody, + Header: header, + } + return req.WithContext(ctx), nil default: return nil, httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid request format, got (%T)", r)) @@ -906,6 +1019,8 @@ func (c Codec) Path(r queryrangebase.Request) string { return "/loki/api/v1/index/volume_range" case *DetectedFieldsRequest: return "/loki/api/v1/detected_fields" + case *DetectedLabelsRequest: + return "/loki/api/v1/detected_labels" } return "other" @@ -1234,6 +1349,10 @@ func encodeResponseJSONTo(version loghttp.Version, res queryrangebase.Response, if err := marshal.WriteDetectedFieldsResponseJSON(response.Response, w); err != nil { return err } + case *DetectedLabelsResponse: + if err := marshal.WriteDetectedLabelsResponseJSON(response.Response, w); err != nil { + return err + } default: return httpgrpc.Errorf(http.StatusInternalServerError, fmt.Sprintf("invalid response format, got (%T)", res)) } diff --git a/pkg/querier/queryrange/extensions.go b/pkg/querier/queryrange/extensions.go index b2924341eac72..40e5321e9db66 100644 --- a/pkg/querier/queryrange/extensions.go +++ b/pkg/querier/queryrange/extensions.go @@ -253,3 +253,20 @@ func (m *DetectedFieldsResponse) WithHeaders(h []queryrangebase.PrometheusRespon m.Headers = h return m } + +// GetHeaders returns the HTTP headers in the response. +func (m *DetectedLabelsResponse) GetHeaders() []*queryrangebase.PrometheusResponseHeader { + if m != nil { + return convertPrometheusResponseHeadersToPointers(m.Headers) + } + return nil +} + +func (m *DetectedLabelsResponse) SetHeader(name, value string) { + m.Headers = setHeader(m.Headers, name, value) +} + +func (m *DetectedLabelsResponse) WithHeaders(h []queryrangebase.PrometheusResponseHeader) queryrangebase.Response { + m.Headers = h + return m +} diff --git a/pkg/querier/queryrange/marshal.go b/pkg/querier/queryrange/marshal.go index 473b3714464e2..3640012f88a29 100644 --- a/pkg/querier/queryrange/marshal.go +++ b/pkg/querier/queryrange/marshal.go @@ -212,6 +212,8 @@ func QueryResponseUnwrap(res *QueryResponse) (queryrangebase.Response, error) { return concrete.TopkSketches, nil case *QueryResponse_QuantileSketches: return concrete.QuantileSketches, nil + case *QueryResponse_DetectedLabels: + return concrete.DetectedLabels, nil default: return nil, fmt.Errorf("unsupported QueryResponse response type, got (%T)", res.Response) } @@ -247,6 +249,8 @@ func QueryResponseWrap(res queryrangebase.Response) (*QueryResponse, error) { p.Response = &QueryResponse_QuantileSketches{response} case *ShardsResponse: p.Response = &QueryResponse_ShardsResponse{response} + case *DetectedLabelsResponse: + p.Response = &QueryResponse_DetectedLabels{response} default: return nil, fmt.Errorf("invalid response format, got (%T)", res) } @@ -335,6 +339,10 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra return &LabelRequest{ LabelRequest: *concrete.Labels, }, ctx, nil + case *QueryRequest_DetectedLabels: + return &DetectedLabelsRequest{ + DetectedLabelsRequest: *concrete.DetectedLabels, + }, ctx, nil default: return nil, ctx, fmt.Errorf("unsupported request type while unwrapping, got (%T)", req.Request) } @@ -361,6 +369,8 @@ func (Codec) QueryRequestWrap(ctx context.Context, r queryrangebase.Request) (*Q result.Request = &QueryRequest_Streams{Streams: req} case *logproto.ShardsRequest: result.Request = &QueryRequest_ShardsRequest{ShardsRequest: req} + case *DetectedLabelsRequest: + result.Request = &QueryRequest_DetectedLabels{DetectedLabels: &req.DetectedLabelsRequest} default: return nil, fmt.Errorf("unsupported request type while wrapping, got (%T)", r) } diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index 1eaee30c61eb8..2312afbd71e7c 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -905,6 +905,43 @@ func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo +type DetectedLabelsResponse struct { + Response *github_com_grafana_loki_pkg_logproto.DetectedLabelsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.DetectedLabelsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` +} + +func (m *DetectedLabelsResponse) Reset() { *m = DetectedLabelsResponse{} } +func (*DetectedLabelsResponse) ProtoMessage() {} +func (*DetectedLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_51b9d53b40d11902, []int{15} +} +func (m *DetectedLabelsResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *DetectedLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_DetectedLabelsResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *DetectedLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLabelsResponse.Merge(m, src) +} +func (m *DetectedLabelsResponse) XXX_Size() int { + return m.Size() +} +func (m *DetectedLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLabelsResponse proto.InternalMessageInfo + type QueryResponse struct { Status *rpc.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` // Types that are valid to be assigned to Response: @@ -918,13 +955,14 @@ type QueryResponse struct { // *QueryResponse_QuantileSketches // *QueryResponse_ShardsResponse // *QueryResponse_DetectedFields + // *QueryResponse_DetectedLabels Response isQueryResponse_Response `protobuf_oneof:"response"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } func (*QueryResponse) ProtoMessage() {} func (*QueryResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{15} + return fileDescriptor_51b9d53b40d11902, []int{16} } func (m *QueryResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -990,6 +1028,9 @@ type QueryResponse_ShardsResponse struct { type QueryResponse_DetectedFields struct { DetectedFields *DetectedFieldsResponse `protobuf:"bytes,11,opt,name=detectedFields,proto3,oneof"` } +type QueryResponse_DetectedLabels struct { + DetectedLabels *DetectedLabelsResponse `protobuf:"bytes,12,opt,name=detectedLabels,proto3,oneof"` +} func (*QueryResponse_Series) isQueryResponse_Response() {} func (*QueryResponse_Labels) isQueryResponse_Response() {} @@ -1001,6 +1042,7 @@ func (*QueryResponse_TopkSketches) isQueryResponse_Response() {} func (*QueryResponse_QuantileSketches) isQueryResponse_Response() {} func (*QueryResponse_ShardsResponse) isQueryResponse_Response() {} func (*QueryResponse_DetectedFields) isQueryResponse_Response() {} +func (*QueryResponse_DetectedLabels) isQueryResponse_Response() {} func (m *QueryResponse) GetResponse() isQueryResponse_Response { if m != nil { @@ -1086,6 +1128,13 @@ func (m *QueryResponse) GetDetectedFields() *DetectedFieldsResponse { return nil } +func (m *QueryResponse) GetDetectedLabels() *DetectedLabelsResponse { + if x, ok := m.GetResponse().(*QueryResponse_DetectedLabels); ok { + return x.DetectedLabels + } + return nil +} + // XXX_OneofWrappers is for the internal use of the proto package. func (*QueryResponse) XXX_OneofWrappers() []interface{} { return []interface{}{ @@ -1099,6 +1148,7 @@ func (*QueryResponse) XXX_OneofWrappers() []interface{} { (*QueryResponse_QuantileSketches)(nil), (*QueryResponse_ShardsResponse)(nil), (*QueryResponse_DetectedFields)(nil), + (*QueryResponse_DetectedLabels)(nil), } } @@ -1112,6 +1162,7 @@ type QueryRequest struct { // *QueryRequest_Volume // *QueryRequest_ShardsRequest // *QueryRequest_DetectedFields + // *QueryRequest_DetectedLabels Request isQueryRequest_Request `protobuf_oneof:"request"` Metadata map[string]string `protobuf:"bytes,7,rep,name=metadata,proto3" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } @@ -1119,7 +1170,7 @@ type QueryRequest struct { func (m *QueryRequest) Reset() { *m = QueryRequest{} } func (*QueryRequest) ProtoMessage() {} func (*QueryRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_51b9d53b40d11902, []int{16} + return fileDescriptor_51b9d53b40d11902, []int{17} } func (m *QueryRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1179,6 +1230,9 @@ type QueryRequest_ShardsRequest struct { type QueryRequest_DetectedFields struct { DetectedFields *logproto.DetectedFieldsRequest `protobuf:"bytes,9,opt,name=detectedFields,proto3,oneof"` } +type QueryRequest_DetectedLabels struct { + DetectedLabels *logproto.DetectedLabelsRequest `protobuf:"bytes,10,opt,name=detectedLabels,proto3,oneof"` +} func (*QueryRequest_Series) isQueryRequest_Request() {} func (*QueryRequest_Labels) isQueryRequest_Request() {} @@ -1188,6 +1242,7 @@ func (*QueryRequest_Streams) isQueryRequest_Request() {} func (*QueryRequest_Volume) isQueryRequest_Request() {} func (*QueryRequest_ShardsRequest) isQueryRequest_Request() {} func (*QueryRequest_DetectedFields) isQueryRequest_Request() {} +func (*QueryRequest_DetectedLabels) isQueryRequest_Request() {} func (m *QueryRequest) GetRequest() isQueryRequest_Request { if m != nil { @@ -1252,6 +1307,13 @@ func (m *QueryRequest) GetDetectedFields() *logproto.DetectedFieldsRequest { return nil } +func (m *QueryRequest) GetDetectedLabels() *logproto.DetectedLabelsRequest { + if x, ok := m.GetRequest().(*QueryRequest_DetectedLabels); ok { + return x.DetectedLabels + } + return nil +} + func (m *QueryRequest) GetMetadata() map[string]string { if m != nil { return m.Metadata @@ -1270,6 +1332,7 @@ func (*QueryRequest) XXX_OneofWrappers() []interface{} { (*QueryRequest_Volume)(nil), (*QueryRequest_ShardsRequest)(nil), (*QueryRequest_DetectedFields)(nil), + (*QueryRequest_DetectedLabels)(nil), } } @@ -1289,6 +1352,7 @@ func init() { proto.RegisterType((*QuantileSketchResponse)(nil), "queryrange.QuantileSketchResponse") proto.RegisterType((*ShardsResponse)(nil), "queryrange.ShardsResponse") proto.RegisterType((*DetectedFieldsResponse)(nil), "queryrange.DetectedFieldsResponse") + proto.RegisterType((*DetectedLabelsResponse)(nil), "queryrange.DetectedLabelsResponse") proto.RegisterType((*QueryResponse)(nil), "queryrange.QueryResponse") proto.RegisterType((*QueryRequest)(nil), "queryrange.QueryRequest") proto.RegisterMapType((map[string]string)(nil), "queryrange.QueryRequest.MetadataEntry") @@ -1299,112 +1363,114 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1665 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0xcd, 0x6f, 0x1b, 0xc5, - 0x1b, 0xf6, 0xfa, 0x33, 0x9e, 0x7c, 0xfc, 0xf2, 0x9b, 0x44, 0xe9, 0x92, 0xb6, 0x5e, 0xcb, 0x12, - 0x6d, 0x40, 0xb0, 0xa6, 0x49, 0xe9, 0x37, 0x1f, 0x5d, 0xd2, 0xca, 0x11, 0x2d, 0x6a, 0x37, 0x11, - 0x07, 0xc4, 0x65, 0x62, 0x4f, 0x9c, 0x25, 0x6b, 0xef, 0x66, 0x67, 0x9c, 0x36, 0x07, 0x24, 0xfe, - 0x00, 0x90, 0xfa, 0x57, 0x20, 0x24, 0xaa, 0x9e, 0x38, 0x71, 0x44, 0x02, 0x7a, 0xec, 0xb1, 0xb2, - 0xc4, 0x42, 0xd3, 0x0b, 0xca, 0xa9, 0x27, 0xce, 0x68, 0x3e, 0x76, 0x3d, 0x6b, 0x3b, 0xad, 0x53, - 0x84, 0xd4, 0x48, 0x5c, 0xec, 0x99, 0xd9, 0xf7, 0x99, 0x9d, 0x7d, 0xde, 0xe7, 0x7d, 0xe7, 0x9d, - 0x01, 0xa7, 0xfd, 0xad, 0x66, 0x75, 0xbb, 0x83, 0x03, 0x07, 0x07, 0xfc, 0x7f, 0x37, 0x40, 0xed, - 0x26, 0x56, 0x9a, 0xa6, 0x1f, 0x78, 0xd4, 0x83, 0xa0, 0x37, 0x32, 0xbf, 0xd8, 0x74, 0xe8, 0x66, - 0x67, 0xdd, 0xac, 0x7b, 0xad, 0x6a, 0xd3, 0x6b, 0x7a, 0xd5, 0xa6, 0xe7, 0x35, 0x5d, 0x8c, 0x7c, - 0x87, 0xc8, 0x66, 0x35, 0xf0, 0xeb, 0x55, 0x42, 0x11, 0xed, 0x10, 0x81, 0x9f, 0x9f, 0x65, 0x86, - 0xbc, 0xc9, 0x21, 0x72, 0xd4, 0x90, 0xe6, 0xbc, 0xb7, 0xde, 0xd9, 0xa8, 0x52, 0xa7, 0x85, 0x09, - 0x45, 0x2d, 0x3f, 0x32, 0x60, 0xeb, 0x73, 0xbd, 0xa6, 0x40, 0x3a, 0xed, 0x06, 0xbe, 0xdb, 0x44, - 0x14, 0xdf, 0x41, 0xbb, 0xd2, 0xe0, 0x78, 0xc2, 0x20, 0x6a, 0xc8, 0x87, 0xaf, 0x25, 0x1e, 0x92, - 0x2d, 0x4c, 0xeb, 0x9b, 0xf2, 0x51, 0x59, 0x3e, 0xda, 0x76, 0x5b, 0x5e, 0x03, 0xbb, 0x7c, 0xb1, - 0x44, 0xfc, 0x4a, 0x8b, 0x19, 0x66, 0xe1, 0x77, 0xc8, 0x26, 0xff, 0x91, 0x83, 0x1f, 0xbd, 0x90, - 0xaf, 0x75, 0x44, 0x70, 0xb5, 0x81, 0x37, 0x9c, 0xb6, 0x43, 0x1d, 0xaf, 0x4d, 0xd4, 0xb6, 0x9c, - 0xe4, 0xdc, 0x68, 0x93, 0xf4, 0xfb, 0xa0, 0xf2, 0x20, 0x03, 0xc6, 0x6f, 0x78, 0x5b, 0x8e, 0x8d, - 0xb7, 0x3b, 0x98, 0x50, 0x38, 0x0b, 0x72, 0xdc, 0x46, 0xd7, 0xca, 0xda, 0x42, 0xd1, 0x16, 0x1d, - 0x36, 0xea, 0x3a, 0x2d, 0x87, 0xea, 0xe9, 0xb2, 0xb6, 0x30, 0x69, 0x8b, 0x0e, 0x84, 0x20, 0x4b, - 0x28, 0xf6, 0xf5, 0x4c, 0x59, 0x5b, 0xc8, 0xd8, 0xbc, 0x0d, 0xe7, 0xc1, 0x98, 0xd3, 0xa6, 0x38, - 0xd8, 0x41, 0xae, 0x5e, 0xe4, 0xe3, 0x71, 0x1f, 0xbe, 0x0f, 0x0a, 0x84, 0xa2, 0x80, 0xae, 0x11, - 0x3d, 0x5b, 0xd6, 0x16, 0xc6, 0x17, 0xe7, 0x4d, 0xe1, 0x2b, 0x33, 0xf2, 0x95, 0xb9, 0x16, 0xf9, - 0xca, 0x1a, 0x7b, 0x18, 0x1a, 0xa9, 0x7b, 0xbf, 0x1b, 0x9a, 0x1d, 0x81, 0xe0, 0x25, 0x90, 0xc3, - 0xed, 0xc6, 0x1a, 0xd1, 0x73, 0x87, 0x40, 0x0b, 0x08, 0x3c, 0x03, 0x8a, 0x0d, 0x27, 0xc0, 0x75, - 0xc6, 0x99, 0x9e, 0x2f, 0x6b, 0x0b, 0x53, 0x8b, 0x33, 0x66, 0xec, 0xda, 0xe5, 0xe8, 0x91, 0xdd, - 0xb3, 0x62, 0x9f, 0xe7, 0x23, 0xba, 0xa9, 0x17, 0x38, 0x13, 0xbc, 0x0d, 0x2b, 0x20, 0x4f, 0x36, - 0x51, 0xd0, 0x20, 0xfa, 0x58, 0x39, 0xb3, 0x50, 0xb4, 0xc0, 0x7e, 0x68, 0xc8, 0x11, 0x5b, 0xfe, - 0xc3, 0xcf, 0x41, 0xd6, 0x77, 0x51, 0x5b, 0x07, 0x7c, 0x95, 0xd3, 0xa6, 0xc2, 0xf9, 0x2d, 0x17, - 0xb5, 0xad, 0x73, 0xdd, 0xd0, 0x48, 0xc8, 0x3d, 0x40, 0x1b, 0xa8, 0x8d, 0xaa, 0xae, 0xb7, 0xe5, - 0x54, 0x55, 0x37, 0xb2, 0x59, 0xcc, 0xdb, 0x0c, 0xcd, 0x70, 0x36, 0x9f, 0xb5, 0xf2, 0x6b, 0x1a, - 0x40, 0xe6, 0xb0, 0x95, 0x36, 0xa1, 0xa8, 0x4d, 0x5f, 0xc6, 0x6f, 0x57, 0x40, 0x9e, 0xc5, 0xc4, - 0x1a, 0xe1, 0x9e, 0x1b, 0x95, 0x48, 0x89, 0x49, 0x32, 0x99, 0x3d, 0x14, 0x93, 0xb9, 0xa1, 0x4c, - 0xe6, 0x5f, 0xc8, 0x64, 0xe1, 0x5f, 0x61, 0x52, 0x07, 0x59, 0xd6, 0x83, 0xd3, 0x20, 0x13, 0xa0, - 0x3b, 0x9c, 0xb8, 0x09, 0x9b, 0x35, 0x2b, 0xdf, 0x67, 0xc1, 0x84, 0x08, 0x0a, 0xe2, 0x7b, 0x6d, - 0x82, 0xd9, 0x62, 0x57, 0x79, 0xe6, 0x11, 0xf4, 0xca, 0xc5, 0xf2, 0x11, 0x5b, 0x3e, 0x81, 0x1f, - 0x82, 0xec, 0x32, 0xa2, 0x88, 0x53, 0x3d, 0xbe, 0x38, 0xab, 0x2e, 0x96, 0xcd, 0xc5, 0x9e, 0x59, - 0x73, 0x8c, 0xcd, 0xfd, 0xd0, 0x98, 0x6a, 0x20, 0x8a, 0xde, 0xf2, 0x5a, 0x0e, 0xc5, 0x2d, 0x9f, - 0xee, 0xda, 0x1c, 0x09, 0xdf, 0x05, 0xc5, 0x6b, 0x41, 0xe0, 0x05, 0x6b, 0xbb, 0x3e, 0xe6, 0xae, - 0x29, 0x5a, 0xc7, 0xf6, 0x43, 0x63, 0x06, 0x47, 0x83, 0x0a, 0xa2, 0x67, 0x09, 0xdf, 0x00, 0x39, - 0xde, 0xe1, 0xce, 0x28, 0x5a, 0x33, 0xfb, 0xa1, 0xf1, 0x3f, 0x0e, 0x51, 0xcc, 0x85, 0x45, 0xd2, - 0x77, 0xb9, 0x91, 0x7c, 0x17, 0x4b, 0x28, 0xaf, 0x4a, 0x48, 0x07, 0x85, 0x1d, 0x1c, 0x10, 0x36, - 0x4d, 0x81, 0x8f, 0x47, 0x5d, 0x78, 0x15, 0x00, 0x46, 0x8c, 0x43, 0xa8, 0x53, 0x67, 0x51, 0xc2, - 0xc8, 0x98, 0x34, 0x45, 0x12, 0xb4, 0x31, 0xe9, 0xb8, 0xd4, 0x82, 0x92, 0x05, 0xc5, 0xd0, 0x56, - 0xda, 0xf0, 0xbe, 0x06, 0x0a, 0x35, 0x8c, 0x1a, 0x38, 0x20, 0x7a, 0xb1, 0x9c, 0x59, 0x18, 0x5f, - 0x7c, 0xdd, 0x54, 0x33, 0xde, 0xad, 0xc0, 0x6b, 0x61, 0xba, 0x89, 0x3b, 0x24, 0x72, 0x90, 0xb0, - 0xb6, 0xb6, 0xba, 0xa1, 0xb1, 0x3e, 0x8a, 0x1e, 0x46, 0xca, 0xb2, 0x07, 0xbe, 0x67, 0x3f, 0x34, - 0xb4, 0xb7, 0xed, 0x68, 0x89, 0x95, 0xdf, 0x34, 0xf0, 0x7f, 0xe6, 0xe1, 0x55, 0x36, 0x37, 0x51, - 0x02, 0xb2, 0x85, 0x68, 0x7d, 0x53, 0xd7, 0x98, 0xbc, 0x6d, 0xd1, 0x51, 0x53, 0x60, 0xfa, 0x1f, - 0xa5, 0xc0, 0xcc, 0xe1, 0x53, 0x60, 0x14, 0x85, 0xd9, 0xa1, 0x51, 0x98, 0x3b, 0x28, 0x0a, 0x2b, - 0x5f, 0x67, 0x44, 0xc6, 0x89, 0xbe, 0xef, 0x10, 0x31, 0x71, 0x3d, 0x8e, 0x89, 0x0c, 0x5f, 0x6d, - 0x2c, 0x35, 0x31, 0xd7, 0x4a, 0x03, 0xb7, 0xa9, 0xb3, 0xe1, 0xe0, 0xe0, 0x05, 0x91, 0xa1, 0xc8, - 0x2d, 0x93, 0x94, 0x9b, 0xaa, 0x95, 0xec, 0x2b, 0xaf, 0x95, 0xbe, 0xe8, 0xc8, 0xbd, 0x44, 0x74, - 0x54, 0x9e, 0xa5, 0xc1, 0x1c, 0x73, 0xc7, 0x0d, 0xb4, 0x8e, 0xdd, 0x4f, 0x50, 0xeb, 0x90, 0x2e, - 0x39, 0xa5, 0xb8, 0xa4, 0x68, 0xc1, 0xff, 0x28, 0x1f, 0x81, 0xf2, 0x6f, 0x35, 0x30, 0x16, 0xe5, - 0x70, 0x68, 0x02, 0x20, 0x60, 0x3c, 0x4d, 0x0b, 0xa2, 0xa7, 0x18, 0x38, 0x88, 0x47, 0x6d, 0xc5, - 0x02, 0x7e, 0x01, 0xf2, 0xa2, 0x27, 0xa3, 0xe0, 0x98, 0x12, 0x05, 0x34, 0xc0, 0xa8, 0x75, 0xb5, - 0x81, 0x7c, 0x8a, 0x03, 0xeb, 0x22, 0x5b, 0x45, 0x37, 0x34, 0x4e, 0x3f, 0x8f, 0x22, 0x5e, 0x37, - 0x0a, 0x1c, 0x73, 0xae, 0x78, 0xa7, 0x2d, 0xdf, 0x50, 0xf9, 0x46, 0x03, 0xd3, 0x6c, 0xa1, 0x8c, - 0x9a, 0x58, 0x15, 0xcb, 0x60, 0x2c, 0x90, 0x6d, 0xbe, 0xdc, 0xf1, 0xc5, 0x8a, 0x99, 0xa4, 0x75, - 0x08, 0x95, 0x56, 0xf6, 0x61, 0x68, 0x68, 0x76, 0x8c, 0x84, 0x4b, 0x09, 0x1a, 0xd3, 0xc3, 0x68, - 0x64, 0x90, 0x54, 0x82, 0xb8, 0x1f, 0xd3, 0x00, 0xae, 0xb0, 0x02, 0x9b, 0x89, 0xaf, 0xa7, 0xd3, - 0xce, 0xc0, 0x8a, 0x4e, 0xf4, 0x48, 0x19, 0xb4, 0xb7, 0x2e, 0x77, 0x43, 0xe3, 0xfc, 0xf3, 0x58, - 0x79, 0x0e, 0x58, 0xf9, 0x04, 0x55, 0xb8, 0xe9, 0x57, 0x7f, 0x5f, 0x79, 0x90, 0x06, 0x53, 0x9f, - 0x7a, 0x6e, 0xa7, 0x85, 0x63, 0xe2, 0x5a, 0x03, 0xc4, 0xe9, 0x3d, 0xe2, 0x92, 0xb6, 0xd6, 0xf9, - 0x6e, 0x68, 0x2c, 0x8d, 0x44, 0x5a, 0x12, 0x78, 0x74, 0x09, 0xbb, 0x9f, 0x06, 0xb3, 0x6b, 0x9e, - 0xff, 0xf1, 0x2a, 0x3f, 0x94, 0x29, 0x79, 0x11, 0x0f, 0xd0, 0x36, 0xdb, 0xa3, 0x8d, 0x21, 0x6e, - 0x22, 0x1a, 0x38, 0x77, 0xad, 0xa5, 0x6e, 0x68, 0x54, 0x47, 0xa2, 0xac, 0x07, 0x3a, 0xba, 0x74, - 0xfd, 0x94, 0x06, 0x73, 0xb7, 0x3b, 0xa8, 0x4d, 0x1d, 0x17, 0x0b, 0xca, 0x62, 0xc2, 0x76, 0x07, - 0x08, 0x2b, 0xf5, 0x08, 0x4b, 0x62, 0x24, 0x75, 0xef, 0x75, 0x43, 0xe3, 0xe2, 0x48, 0xd4, 0x0d, - 0x83, 0x1f, 0x5d, 0x12, 0x7f, 0x48, 0x83, 0xa9, 0x55, 0x51, 0x2f, 0x45, 0x5f, 0x40, 0x86, 0x90, - 0xa7, 0xde, 0x32, 0xf8, 0xeb, 0x66, 0x12, 0x71, 0x88, 0x50, 0x4d, 0x02, 0x8f, 0x2e, 0x6d, 0xbf, - 0xa4, 0xc1, 0xdc, 0x32, 0xa6, 0xb8, 0x4e, 0x71, 0xe3, 0xba, 0x83, 0x5d, 0x85, 0xbe, 0x2f, 0x07, - 0xe8, 0x2b, 0x2b, 0x47, 0x94, 0xa1, 0x18, 0xeb, 0x83, 0x6e, 0x68, 0x5c, 0x1e, 0x89, 0xc0, 0xe1, - 0x13, 0x1c, 0x5d, 0x22, 0x7f, 0xce, 0x81, 0x49, 0x7e, 0xb0, 0x8d, 0xf9, 0x7b, 0x13, 0xc8, 0x92, - 0x4f, 0xb2, 0x07, 0xa3, 0x33, 0x42, 0xe0, 0xd7, 0xcd, 0x55, 0x59, 0x0c, 0x0a, 0x0b, 0x78, 0x01, - 0xe4, 0x09, 0xaf, 0xc4, 0xe5, 0x86, 0x5e, 0xea, 0x3f, 0xb5, 0x26, 0x6b, 0xfe, 0x5a, 0xca, 0x96, - 0xf6, 0xf0, 0x0a, 0xc8, 0xbb, 0xac, 0x00, 0x8d, 0x4e, 0x22, 0x95, 0x7e, 0xe4, 0x60, 0x79, 0xca, - 0xd0, 0x02, 0x03, 0xcf, 0x81, 0x1c, 0xaf, 0x1c, 0xe4, 0x3d, 0x50, 0xe2, 0xb5, 0x83, 0x5b, 0x78, - 0x2d, 0x65, 0x0b, 0x73, 0xb8, 0x08, 0xb2, 0x7e, 0xe0, 0xb5, 0x64, 0x15, 0x77, 0xa2, 0xff, 0x9d, - 0x6a, 0xd9, 0x53, 0x4b, 0xd9, 0xdc, 0x16, 0x9e, 0x65, 0x47, 0x2e, 0x56, 0x2f, 0x11, 0x7e, 0x84, - 0x65, 0x5b, 0x66, 0x1f, 0x4c, 0x81, 0x44, 0xa6, 0xf0, 0x2c, 0xc8, 0xef, 0xf0, 0x6d, 0x51, 0x5e, - 0x3e, 0xcc, 0xab, 0xa0, 0xe4, 0x86, 0xc9, 0xbe, 0x4b, 0xd8, 0xc2, 0xeb, 0x60, 0x82, 0x7a, 0xfe, - 0x56, 0xb4, 0x01, 0xc9, 0xe3, 0x6f, 0x59, 0xc5, 0x0e, 0xdb, 0xa0, 0x6a, 0x29, 0x3b, 0x81, 0x83, - 0xb7, 0xc0, 0xf4, 0x76, 0x22, 0x4d, 0x62, 0xc2, 0x6f, 0xd3, 0xfa, 0x78, 0x1e, 0x9e, 0xbd, 0x6b, - 0x29, 0x7b, 0x00, 0x0d, 0x97, 0xc1, 0x14, 0x49, 0xe4, 0x0e, 0x79, 0x3d, 0x95, 0xf8, 0xae, 0x64, - 0x76, 0xa9, 0xa5, 0xec, 0x3e, 0x0c, 0xbc, 0x01, 0xa6, 0x1a, 0x89, 0x00, 0xd2, 0xc7, 0x07, 0x57, - 0x35, 0x3c, 0xc4, 0xd8, 0x6c, 0x49, 0xac, 0x05, 0x7a, 0x91, 0x5e, 0xf9, 0x2b, 0x0b, 0x26, 0xa4, - 0x8e, 0xc5, 0xf9, 0xf9, 0x7c, 0x2c, 0x4d, 0x21, 0xe3, 0x93, 0x07, 0x49, 0x93, 0x9b, 0x2b, 0xca, - 0x7c, 0x27, 0x56, 0xa6, 0xd0, 0xf4, 0x5c, 0x2f, 0x7b, 0x70, 0x4d, 0x2a, 0x08, 0xa9, 0xc6, 0xa5, - 0x48, 0x8d, 0x42, 0xca, 0xc7, 0x87, 0xd7, 0xa2, 0x11, 0x4a, 0x4a, 0xf1, 0x12, 0x28, 0x38, 0xe2, - 0x0a, 0x6e, 0x98, 0x88, 0x07, 0x6f, 0xe8, 0x98, 0xb8, 0x24, 0x00, 0x2e, 0xf5, 0x24, 0x29, 0x94, - 0x7c, 0x6c, 0x50, 0x92, 0x31, 0x28, 0x52, 0xe4, 0x99, 0x58, 0x91, 0x79, 0x89, 0x19, 0x28, 0xe0, - 0xe2, 0x0f, 0x93, 0x72, 0xbc, 0x06, 0x26, 0x23, 0x07, 0xf2, 0x47, 0x52, 0x8f, 0x27, 0x0f, 0xda, - 0x8e, 0x22, 0x7c, 0x12, 0x05, 0x57, 0x06, 0xbc, 0x2e, 0xb4, 0x68, 0x1c, 0x9c, 0x97, 0xa3, 0x99, - 0xfa, 0x80, 0xb0, 0x06, 0xc6, 0x5a, 0x98, 0x22, 0x76, 0xe2, 0xd4, 0x0b, 0x3c, 0xbb, 0x9e, 0x4a, - 0x0a, 0xba, 0xa7, 0x00, 0xf3, 0xa6, 0x34, 0xbc, 0xd6, 0xa6, 0xc1, 0xae, 0x3c, 0x5c, 0xc4, 0xe8, - 0xf9, 0xcb, 0x60, 0x32, 0x61, 0x00, 0xa7, 0x41, 0x66, 0x0b, 0x47, 0xf7, 0x9f, 0xac, 0x09, 0x67, - 0x41, 0x6e, 0x07, 0xb9, 0x1d, 0xcc, 0x85, 0x50, 0xb4, 0x45, 0xe7, 0x52, 0xfa, 0x82, 0x66, 0x15, - 0x41, 0x21, 0x10, 0x6f, 0xb1, 0x1a, 0x8f, 0x9e, 0x94, 0x52, 0x8f, 0x9f, 0x94, 0x52, 0xcf, 0x9e, - 0x94, 0xb4, 0xaf, 0xf6, 0x4a, 0xda, 0x77, 0x7b, 0x25, 0xed, 0xe1, 0x5e, 0x49, 0x7b, 0xb4, 0x57, - 0xd2, 0xfe, 0xd8, 0x2b, 0x69, 0x7f, 0xee, 0x95, 0x52, 0xcf, 0xf6, 0x4a, 0xda, 0xbd, 0xa7, 0xa5, - 0xd4, 0xa3, 0xa7, 0xa5, 0xd4, 0xe3, 0xa7, 0xa5, 0xd4, 0x67, 0xe6, 0xe1, 0x12, 0xfd, 0x7a, 0x9e, - 0xd3, 0xb4, 0xf4, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x87, 0x26, 0x7a, 0x65, 0x1c, 0x19, 0x00, - 0x00, + // 1712 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4d, 0x6f, 0xdb, 0x46, + 0x1a, 0x16, 0xf5, 0x69, 0x8d, 0x3f, 0xd6, 0x3b, 0x36, 0x1c, 0xae, 0x93, 0x88, 0x82, 0x80, 0x4d, + 0xbc, 0x8b, 0x5d, 0x6a, 0x63, 0x67, 0xf3, 0xbd, 0xdb, 0x86, 0x75, 0x02, 0x19, 0x4d, 0x8a, 0x84, + 0x36, 0x7a, 0x28, 0x7a, 0x19, 0x4b, 0x63, 0x99, 0x35, 0x25, 0xd2, 0x9c, 0x91, 0x13, 0x1f, 0x0a, + 0xf4, 0x07, 0xb4, 0x40, 0x80, 0xfe, 0x87, 0xa2, 0x40, 0x83, 0x9c, 0x7a, 0xea, 0xad, 0x3d, 0xb4, + 0x39, 0xe6, 0x18, 0x08, 0xa8, 0xda, 0x38, 0x97, 0xc2, 0xa7, 0xfc, 0x84, 0x62, 0x3e, 0x48, 0x0d, + 0x45, 0x39, 0x91, 0x52, 0x14, 0x88, 0x8b, 0x5e, 0xa4, 0xf9, 0x78, 0x9f, 0xe1, 0xf0, 0x79, 0x9f, + 0xf7, 0xe5, 0x3b, 0x03, 0xce, 0xfa, 0x3b, 0xcd, 0xea, 0x6e, 0x07, 0x07, 0x0e, 0x0e, 0xf8, 0xff, + 0x7e, 0x80, 0xda, 0x4d, 0xac, 0x34, 0x4d, 0x3f, 0xf0, 0xa8, 0x07, 0x41, 0x7f, 0x64, 0x71, 0xb9, + 0xe9, 0xd0, 0xed, 0xce, 0xa6, 0x59, 0xf7, 0x5a, 0xd5, 0xa6, 0xd7, 0xf4, 0xaa, 0x4d, 0xcf, 0x6b, + 0xba, 0x18, 0xf9, 0x0e, 0x91, 0xcd, 0x6a, 0xe0, 0xd7, 0xab, 0x84, 0x22, 0xda, 0x21, 0x02, 0xbf, + 0x38, 0xcf, 0x0c, 0x79, 0x93, 0x43, 0xe4, 0xa8, 0x21, 0xcd, 0x79, 0x6f, 0xb3, 0xb3, 0x55, 0xa5, + 0x4e, 0x0b, 0x13, 0x8a, 0x5a, 0x7e, 0x68, 0xc0, 0xf6, 0xe7, 0x7a, 0x4d, 0x81, 0x74, 0xda, 0x0d, + 0x7c, 0xbf, 0x89, 0x28, 0xbe, 0x87, 0xf6, 0xa5, 0xc1, 0xc9, 0x98, 0x41, 0xd8, 0x90, 0x93, 0x7f, + 0x8b, 0x4d, 0x92, 0x1d, 0x4c, 0xeb, 0xdb, 0x72, 0xaa, 0x2c, 0xa7, 0x76, 0xdd, 0x96, 0xd7, 0xc0, + 0x2e, 0xdf, 0x2c, 0x11, 0xbf, 0xd2, 0x62, 0x8e, 0x59, 0xf8, 0x1d, 0xb2, 0xcd, 0x7f, 0xe4, 0xe0, + 0x3b, 0xaf, 0xe4, 0x6b, 0x13, 0x11, 0x5c, 0x6d, 0xe0, 0x2d, 0xa7, 0xed, 0x50, 0xc7, 0x6b, 0x13, + 0xb5, 0x2d, 0x17, 0xb9, 0x30, 0xda, 0x22, 0x83, 0x3e, 0xa8, 0x3c, 0xca, 0x80, 0xc9, 0x5b, 0xde, + 0x8e, 0x63, 0xe3, 0xdd, 0x0e, 0x26, 0x14, 0xce, 0x83, 0x1c, 0xb7, 0xd1, 0xb5, 0xb2, 0xb6, 0x54, + 0xb4, 0x45, 0x87, 0x8d, 0xba, 0x4e, 0xcb, 0xa1, 0x7a, 0xba, 0xac, 0x2d, 0x4d, 0xdb, 0xa2, 0x03, + 0x21, 0xc8, 0x12, 0x8a, 0x7d, 0x3d, 0x53, 0xd6, 0x96, 0x32, 0x36, 0x6f, 0xc3, 0x45, 0x30, 0xe1, + 0xb4, 0x29, 0x0e, 0xf6, 0x90, 0xab, 0x17, 0xf9, 0x78, 0xd4, 0x87, 0xff, 0x07, 0x05, 0x42, 0x51, + 0x40, 0x37, 0x88, 0x9e, 0x2d, 0x6b, 0x4b, 0x93, 0xcb, 0x8b, 0xa6, 0xf0, 0x95, 0x19, 0xfa, 0xca, + 0xdc, 0x08, 0x7d, 0x65, 0x4d, 0x3c, 0xee, 0x19, 0xa9, 0x07, 0x3f, 0x19, 0x9a, 0x1d, 0x82, 0xe0, + 0x15, 0x90, 0xc3, 0xed, 0xc6, 0x06, 0xd1, 0x73, 0x63, 0xa0, 0x05, 0x04, 0x9e, 0x03, 0xc5, 0x86, + 0x13, 0xe0, 0x3a, 0xe3, 0x4c, 0xcf, 0x97, 0xb5, 0xa5, 0x99, 0xe5, 0x39, 0x33, 0x72, 0xed, 0x6a, + 0x38, 0x65, 0xf7, 0xad, 0xd8, 0xeb, 0xf9, 0x88, 0x6e, 0xeb, 0x05, 0xce, 0x04, 0x6f, 0xc3, 0x0a, + 0xc8, 0x93, 0x6d, 0x14, 0x34, 0x88, 0x3e, 0x51, 0xce, 0x2c, 0x15, 0x2d, 0x70, 0xd8, 0x33, 0xe4, + 0x88, 0x2d, 0xff, 0xe1, 0x87, 0x20, 0xeb, 0xbb, 0xa8, 0xad, 0x03, 0xbe, 0xcb, 0x59, 0x53, 0xe1, + 0xfc, 0x8e, 0x8b, 0xda, 0xd6, 0x85, 0x6e, 0xcf, 0x88, 0xc9, 0x3d, 0x40, 0x5b, 0xa8, 0x8d, 0xaa, + 0xae, 0xb7, 0xe3, 0x54, 0x55, 0x37, 0xb2, 0x55, 0xcc, 0xbb, 0x0c, 0xcd, 0x70, 0x36, 0x5f, 0xb5, + 0xf2, 0x43, 0x1a, 0x40, 0xe6, 0xb0, 0xb5, 0x36, 0xa1, 0xa8, 0x4d, 0x5f, 0xc7, 0x6f, 0xd7, 0x40, + 0x9e, 0xc5, 0xc4, 0x06, 0xe1, 0x9e, 0x1b, 0x95, 0x48, 0x89, 0x89, 0x33, 0x99, 0x1d, 0x8b, 0xc9, + 0xdc, 0x50, 0x26, 0xf3, 0xaf, 0x64, 0xb2, 0xf0, 0xbb, 0x30, 0xa9, 0x83, 0x2c, 0xeb, 0xc1, 0x59, + 0x90, 0x09, 0xd0, 0x3d, 0x4e, 0xdc, 0x94, 0xcd, 0x9a, 0x95, 0xaf, 0xb2, 0x60, 0x4a, 0x04, 0x05, + 0xf1, 0xbd, 0x36, 0xc1, 0x6c, 0xb3, 0xeb, 0x3c, 0xf3, 0x08, 0x7a, 0xe5, 0x66, 0xf9, 0x88, 0x2d, + 0x67, 0xe0, 0xdb, 0x20, 0xbb, 0x8a, 0x28, 0xe2, 0x54, 0x4f, 0x2e, 0xcf, 0xab, 0x9b, 0x65, 0x6b, + 0xb1, 0x39, 0x6b, 0x81, 0xb1, 0x79, 0xd8, 0x33, 0x66, 0x1a, 0x88, 0xa2, 0x7f, 0x79, 0x2d, 0x87, + 0xe2, 0x96, 0x4f, 0xf7, 0x6d, 0x8e, 0x84, 0xff, 0x05, 0xc5, 0x1b, 0x41, 0xe0, 0x05, 0x1b, 0xfb, + 0x3e, 0xe6, 0xae, 0x29, 0x5a, 0x27, 0x0e, 0x7b, 0xc6, 0x1c, 0x0e, 0x07, 0x15, 0x44, 0xdf, 0x12, + 0xfe, 0x03, 0xe4, 0x78, 0x87, 0x3b, 0xa3, 0x68, 0xcd, 0x1d, 0xf6, 0x8c, 0xbf, 0x70, 0x88, 0x62, + 0x2e, 0x2c, 0xe2, 0xbe, 0xcb, 0x8d, 0xe4, 0xbb, 0x48, 0x42, 0x79, 0x55, 0x42, 0x3a, 0x28, 0xec, + 0xe1, 0x80, 0xb0, 0x65, 0x0a, 0x7c, 0x3c, 0xec, 0xc2, 0xeb, 0x00, 0x30, 0x62, 0x1c, 0x42, 0x9d, + 0x3a, 0x8b, 0x12, 0x46, 0xc6, 0xb4, 0x29, 0x92, 0xa0, 0x8d, 0x49, 0xc7, 0xa5, 0x16, 0x94, 0x2c, + 0x28, 0x86, 0xb6, 0xd2, 0x86, 0x0f, 0x35, 0x50, 0xa8, 0x61, 0xd4, 0xc0, 0x01, 0xd1, 0x8b, 0xe5, + 0xcc, 0xd2, 0xe4, 0xf2, 0xdf, 0x4d, 0x35, 0xe3, 0xdd, 0x09, 0xbc, 0x16, 0xa6, 0xdb, 0xb8, 0x43, + 0x42, 0x07, 0x09, 0x6b, 0x6b, 0xa7, 0xdb, 0x33, 0x36, 0x47, 0xd1, 0xc3, 0x48, 0x59, 0xf6, 0xc8, + 0xe7, 0x1c, 0xf6, 0x0c, 0xed, 0xdf, 0x76, 0xb8, 0xc5, 0xca, 0x8f, 0x1a, 0xf8, 0x2b, 0xf3, 0xf0, + 0x3a, 0x5b, 0x9b, 0x28, 0x01, 0xd9, 0x42, 0xb4, 0xbe, 0xad, 0x6b, 0x4c, 0xde, 0xb6, 0xe8, 0xa8, + 0x29, 0x30, 0xfd, 0x9b, 0x52, 0x60, 0x66, 0xfc, 0x14, 0x18, 0x46, 0x61, 0x76, 0x68, 0x14, 0xe6, + 0x8e, 0x8a, 0xc2, 0xca, 0xa7, 0x19, 0x91, 0x71, 0xc2, 0xf7, 0x1b, 0x23, 0x26, 0x6e, 0x46, 0x31, + 0x91, 0xe1, 0xbb, 0x8d, 0xa4, 0x26, 0xd6, 0x5a, 0x6b, 0xe0, 0x36, 0x75, 0xb6, 0x1c, 0x1c, 0xbc, + 0x22, 0x32, 0x14, 0xb9, 0x65, 0xe2, 0x72, 0x53, 0xb5, 0x92, 0x7d, 0xe3, 0xb5, 0x32, 0x10, 0x1d, + 0xb9, 0xd7, 0x88, 0x8e, 0xca, 0x8b, 0x34, 0x58, 0x60, 0xee, 0xb8, 0x85, 0x36, 0xb1, 0xfb, 0x1e, + 0x6a, 0x8d, 0xe9, 0x92, 0x33, 0x8a, 0x4b, 0x8a, 0x16, 0xfc, 0x93, 0xf2, 0x11, 0x28, 0xff, 0x42, + 0x03, 0x13, 0x61, 0x0e, 0x87, 0x26, 0x00, 0x02, 0xc6, 0xd3, 0xb4, 0x20, 0x7a, 0x86, 0x81, 0x83, + 0x68, 0xd4, 0x56, 0x2c, 0xe0, 0x47, 0x20, 0x2f, 0x7a, 0x32, 0x0a, 0x4e, 0x28, 0x51, 0x40, 0x03, + 0x8c, 0x5a, 0xd7, 0x1b, 0xc8, 0xa7, 0x38, 0xb0, 0x2e, 0xb3, 0x5d, 0x74, 0x7b, 0xc6, 0xd9, 0x97, + 0x51, 0xc4, 0xeb, 0x46, 0x81, 0x63, 0xce, 0x15, 0xcf, 0xb4, 0xe5, 0x13, 0x2a, 0x9f, 0x69, 0x60, + 0x96, 0x6d, 0x94, 0x51, 0x13, 0xa9, 0x62, 0x15, 0x4c, 0x04, 0xb2, 0xcd, 0xb7, 0x3b, 0xb9, 0x5c, + 0x31, 0xe3, 0xb4, 0x0e, 0xa1, 0xd2, 0xca, 0x3e, 0xee, 0x19, 0x9a, 0x1d, 0x21, 0xe1, 0x4a, 0x8c, + 0xc6, 0xf4, 0x30, 0x1a, 0x19, 0x24, 0x15, 0x23, 0xee, 0x9b, 0x34, 0x80, 0x6b, 0xac, 0xc0, 0x66, + 0xe2, 0xeb, 0xeb, 0xb4, 0x93, 0xd8, 0xd1, 0xa9, 0x3e, 0x29, 0x49, 0x7b, 0xeb, 0x6a, 0xb7, 0x67, + 0x5c, 0x7c, 0x19, 0x2b, 0x2f, 0x01, 0x2b, 0xaf, 0xa0, 0x0a, 0x37, 0xfd, 0xe6, 0x7f, 0x57, 0x1e, + 0xa5, 0xc1, 0xcc, 0xfb, 0x9e, 0xdb, 0x69, 0xe1, 0x88, 0xb8, 0x56, 0x82, 0x38, 0xbd, 0x4f, 0x5c, + 0xdc, 0xd6, 0xba, 0xd8, 0xed, 0x19, 0x2b, 0x23, 0x91, 0x16, 0x07, 0x1e, 0x5f, 0xc2, 0x1e, 0xa6, + 0xc1, 0xfc, 0x86, 0xe7, 0xbf, 0xbb, 0xce, 0x0f, 0x65, 0x4a, 0x5e, 0xc4, 0x09, 0xda, 0xe6, 0xfb, + 0xb4, 0x31, 0xc4, 0x6d, 0x44, 0x03, 0xe7, 0xbe, 0xb5, 0xd2, 0xed, 0x19, 0xd5, 0x91, 0x28, 0xeb, + 0x83, 0x8e, 0x2f, 0x5d, 0xdf, 0xa5, 0xc1, 0xc2, 0xdd, 0x0e, 0x6a, 0x53, 0xc7, 0xc5, 0x82, 0xb2, + 0x88, 0xb0, 0xfd, 0x04, 0x61, 0xa5, 0x3e, 0x61, 0x71, 0x8c, 0xa4, 0xee, 0x7f, 0xdd, 0x9e, 0x71, + 0x79, 0x24, 0xea, 0x86, 0xc1, 0x8f, 0x2f, 0x89, 0x5f, 0xa7, 0xc1, 0xcc, 0xba, 0xa8, 0x97, 0xc2, + 0x37, 0x20, 0x43, 0xc8, 0x53, 0x6f, 0x19, 0xfc, 0x4d, 0x33, 0x8e, 0x18, 0x23, 0x54, 0xe3, 0xc0, + 0xe3, 0x4b, 0xdb, 0xf7, 0x69, 0xb0, 0xb0, 0x8a, 0x29, 0xae, 0x53, 0xdc, 0xb8, 0xe9, 0x60, 0x57, + 0xa1, 0xef, 0xe3, 0x04, 0x7d, 0x65, 0xe5, 0x88, 0x32, 0x14, 0x63, 0xbd, 0xd5, 0xed, 0x19, 0x57, + 0x47, 0x22, 0x70, 0xf8, 0x02, 0x7f, 0x0c, 0x22, 0x79, 0x45, 0x38, 0x26, 0x91, 0x71, 0xcc, 0x6b, + 0x10, 0x19, 0x5f, 0xe0, 0xf8, 0x12, 0xf9, 0x79, 0x1e, 0x4c, 0xf3, 0x1b, 0x82, 0x88, 0xbf, 0x7f, + 0x02, 0x59, 0x3b, 0x4b, 0xf6, 0x60, 0x78, 0xd8, 0x0a, 0xfc, 0xba, 0xb9, 0x2e, 0xab, 0x6a, 0x61, + 0x01, 0x2f, 0x81, 0x3c, 0xe1, 0x47, 0x1a, 0x59, 0x19, 0x95, 0x06, 0x8f, 0xff, 0xf1, 0xc3, 0x53, + 0x2d, 0x65, 0x4b, 0x7b, 0x78, 0x0d, 0xe4, 0x5d, 0x4e, 0xa1, 0x3c, 0xd2, 0x55, 0x06, 0x91, 0xc9, + 0x3a, 0x9f, 0xa1, 0x05, 0x06, 0x5e, 0x00, 0x39, 0x5e, 0x82, 0xc9, 0x0b, 0xb5, 0xd8, 0x63, 0x93, + 0xb5, 0x50, 0x2d, 0x65, 0x0b, 0x73, 0xb8, 0x0c, 0xb2, 0x7e, 0xe0, 0xb5, 0x64, 0x39, 0x7c, 0x6a, + 0xf0, 0x99, 0x6a, 0xfd, 0x58, 0x4b, 0xd9, 0xdc, 0x16, 0x9e, 0x67, 0x67, 0x57, 0x56, 0x78, 0x12, + 0x7e, 0x17, 0xc0, 0x6a, 0x8f, 0x01, 0x98, 0x02, 0x09, 0x4d, 0xe1, 0x79, 0x90, 0xdf, 0xe3, 0xf5, + 0x85, 0xbc, 0xc5, 0x59, 0x54, 0x41, 0xf1, 0xca, 0x83, 0xbd, 0x97, 0xb0, 0x85, 0x37, 0xc1, 0x14, + 0xf5, 0xfc, 0x9d, 0xf0, 0x4b, 0x2e, 0xef, 0x11, 0xca, 0x2a, 0x76, 0xd8, 0x97, 0xbe, 0x96, 0xb2, + 0x63, 0x38, 0x78, 0x07, 0xcc, 0xee, 0xc6, 0xbe, 0x37, 0x98, 0xf0, 0x6b, 0xc9, 0x01, 0x9e, 0x87, + 0x7f, 0x06, 0x6b, 0x29, 0x3b, 0x81, 0x86, 0xab, 0x60, 0x86, 0xc4, 0x92, 0xb0, 0xbc, 0xe7, 0x8b, + 0xbd, 0x57, 0x3c, 0x4d, 0xd7, 0x52, 0xf6, 0x00, 0x06, 0xde, 0x02, 0x33, 0x8d, 0x58, 0x26, 0xd2, + 0x27, 0x93, 0xbb, 0x1a, 0x9e, 0xab, 0xd8, 0x6a, 0x71, 0xac, 0xba, 0x9a, 0x08, 0x47, 0x7d, 0xea, + 0xe8, 0xd5, 0xe2, 0x01, 0xab, 0xae, 0x26, 0x66, 0x2c, 0xd0, 0xcf, 0x1b, 0x95, 0x6f, 0x73, 0x60, + 0x4a, 0x46, 0x85, 0xb8, 0xd6, 0xb8, 0x18, 0x09, 0x5d, 0x04, 0xc5, 0xe9, 0xa3, 0x84, 0xce, 0xcd, + 0x15, 0x9d, 0xff, 0x27, 0xd2, 0xb9, 0x88, 0x90, 0x85, 0x7e, 0x2e, 0xe2, 0xcf, 0x55, 0x10, 0x52, + 0xdb, 0x2b, 0xa1, 0xb6, 0x45, 0x60, 0x9c, 0x1c, 0x7e, 0x44, 0x08, 0x51, 0x52, 0xd8, 0x57, 0x40, + 0xc1, 0x11, 0x37, 0xa3, 0xc3, 0x42, 0x22, 0x79, 0x71, 0xca, 0xa4, 0x2a, 0x01, 0x70, 0xa5, 0x2f, + 0x70, 0x11, 0x17, 0x27, 0x92, 0x02, 0x8f, 0x40, 0xa1, 0xbe, 0xcf, 0x45, 0xfa, 0xce, 0x4b, 0x4c, + 0xa2, 0xae, 0x8e, 0x5e, 0x4c, 0x8a, 0xfb, 0x06, 0x98, 0x0e, 0xe5, 0xc0, 0xa7, 0xa4, 0xba, 0x4f, + 0x1f, 0x55, 0x25, 0x84, 0xf8, 0x38, 0x0a, 0xae, 0x25, 0x34, 0x24, 0x94, 0x6d, 0x1c, 0xfd, 0xb9, + 0x0c, 0x57, 0x1a, 0x14, 0xd0, 0x5a, 0x42, 0x40, 0xe0, 0xa8, 0xa5, 0x42, 0xf9, 0x24, 0x96, 0x12, + 0x13, 0xb0, 0x06, 0x26, 0x5a, 0x98, 0xa2, 0x06, 0xa2, 0x48, 0x2f, 0xf0, 0xb4, 0x7f, 0x26, 0x1e, + 0x69, 0x7d, 0x31, 0x99, 0xb7, 0xa5, 0xe1, 0x8d, 0x36, 0x0d, 0xf6, 0xe5, 0xf1, 0x31, 0x42, 0x2f, + 0x5e, 0x05, 0xd3, 0x31, 0x03, 0x38, 0x0b, 0x32, 0x3b, 0x38, 0xbc, 0xe1, 0x66, 0x4d, 0x38, 0x0f, + 0x72, 0x7b, 0xc8, 0xed, 0x60, 0xae, 0xa9, 0xa2, 0x2d, 0x3a, 0x57, 0xd2, 0x97, 0x34, 0xab, 0x08, + 0x0a, 0x81, 0x78, 0x8a, 0xd5, 0x78, 0xf2, 0xac, 0x94, 0x7a, 0xfa, 0xac, 0x94, 0x7a, 0xf1, 0xac, + 0xa4, 0x7d, 0x72, 0x50, 0xd2, 0xbe, 0x3c, 0x28, 0x69, 0x8f, 0x0f, 0x4a, 0xda, 0x93, 0x83, 0x92, + 0xf6, 0xf3, 0x41, 0x49, 0xfb, 0xe5, 0xa0, 0x94, 0x7a, 0x71, 0x50, 0xd2, 0x1e, 0x3c, 0x2f, 0xa5, + 0x9e, 0x3c, 0x2f, 0xa5, 0x9e, 0x3e, 0x2f, 0xa5, 0x3e, 0x30, 0xc7, 0xfb, 0x02, 0x6d, 0xe6, 0x39, + 0x4d, 0x2b, 0xbf, 0x06, 0x00, 0x00, 0xff, 0xff, 0x7c, 0x1e, 0xb8, 0x33, 0xfe, 0x1a, 0x00, 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -2008,6 +2074,42 @@ func (this *DetectedFieldsResponse) Equal(that interface{}) bool { } return true } +func (this *DetectedLabelsResponse) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*DetectedLabelsResponse) + if !ok { + that2, ok := that.(DetectedLabelsResponse) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if that1.Response == nil { + if this.Response != nil { + return false + } + } else if !this.Response.Equal(*that1.Response) { + return false + } + if len(this.Headers) != len(that1.Headers) { + return false + } + for i := range this.Headers { + if !this.Headers[i].Equal(that1.Headers[i]) { + return false + } + } + return true +} func (this *QueryResponse) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2281,6 +2383,30 @@ func (this *QueryResponse_DetectedFields) Equal(that interface{}) bool { } return true } +func (this *QueryResponse_DetectedLabels) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryResponse_DetectedLabels) + if !ok { + that2, ok := that.(QueryResponse_DetectedLabels) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedLabels.Equal(that1.DetectedLabels) { + return false + } + return true +} func (this *QueryRequest) Equal(that interface{}) bool { if that == nil { return this == nil @@ -2511,6 +2637,30 @@ func (this *QueryRequest_DetectedFields) Equal(that interface{}) bool { } return true } +func (this *QueryRequest_DetectedLabels) Equal(that interface{}) bool { + if that == nil { + return this == nil + } + + that1, ok := that.(*QueryRequest_DetectedLabels) + if !ok { + that2, ok := that.(QueryRequest_DetectedLabels) + if ok { + that1 = &that2 + } else { + return false + } + } + if that1 == nil { + return this == nil + } else if this == nil { + return false + } + if !this.DetectedLabels.Equal(that1.DetectedLabels) { + return false + } + return true +} func (this *LokiRequest) GoString() string { if this == nil { return "nil" @@ -2712,11 +2862,22 @@ func (this *DetectedFieldsResponse) GoString() string { s = append(s, "}") return strings.Join(s, "") } +func (this *DetectedLabelsResponse) GoString() string { + if this == nil { + return "nil" + } + s := make([]string, 0, 6) + s = append(s, "&queryrange.DetectedLabelsResponse{") + s = append(s, "Response: "+fmt.Sprintf("%#v", this.Response)+",\n") + s = append(s, "Headers: "+fmt.Sprintf("%#v", this.Headers)+",\n") + s = append(s, "}") + return strings.Join(s, "") +} func (this *QueryResponse) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 15) + s := make([]string, 0, 16) s = append(s, "&queryrange.QueryResponse{") if this.Status != nil { s = append(s, "Status: "+fmt.Sprintf("%#v", this.Status)+",\n") @@ -2807,11 +2968,19 @@ func (this *QueryResponse_DetectedFields) GoString() string { `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") return s } +func (this *QueryResponse_DetectedLabels) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryResponse_DetectedLabels{` + + `DetectedLabels:` + fmt.Sprintf("%#v", this.DetectedLabels) + `}`}, ", ") + return s +} func (this *QueryRequest) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 13) + s := make([]string, 0, 14) s = append(s, "&queryrange.QueryRequest{") if this.Request != nil { s = append(s, "Request: "+fmt.Sprintf("%#v", this.Request)+",\n") @@ -2896,6 +3065,14 @@ func (this *QueryRequest_DetectedFields) GoString() string { `DetectedFields:` + fmt.Sprintf("%#v", this.DetectedFields) + `}`}, ", ") return s } +func (this *QueryRequest_DetectedLabels) GoString() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&queryrange.QueryRequest_DetectedLabels{` + + `DetectedLabels:` + fmt.Sprintf("%#v", this.DetectedLabels) + `}`}, ", ") + return s +} func valueToGoStringQueryrange(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -3785,6 +3962,55 @@ func (m *DetectedFieldsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) return len(dAtA) - i, nil } +func (m *DetectedLabelsResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DetectedLabelsResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DetectedLabelsResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Headers) > 0 { + for iNdEx := len(m.Headers) - 1; iNdEx >= 0; iNdEx-- { + { + size := m.Headers[iNdEx].Size() + i -= size + if _, err := m.Headers[iNdEx].MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Response != nil { + { + size := m.Response.Size() + i -= size + if _, err := m.Response.MarshalTo(dAtA[i:]); err != nil { + return 0, err + } + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *QueryResponse) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -4029,6 +4255,26 @@ func (m *QueryResponse_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, e } return len(dAtA) - i, nil } +func (m *QueryResponse_DetectedLabels) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryResponse_DetectedLabels) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedLabels != nil { + { + size, err := m.DetectedLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x62 + } + return len(dAtA) - i, nil +} func (m *QueryRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -4240,6 +4486,26 @@ func (m *QueryRequest_DetectedFields) MarshalToSizedBuffer(dAtA []byte) (int, er } return len(dAtA) - i, nil } +func (m *QueryRequest_DetectedLabels) MarshalTo(dAtA []byte) (int, error) { + return m.MarshalToSizedBuffer(dAtA[:m.Size()]) +} + +func (m *QueryRequest_DetectedLabels) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + if m.DetectedLabels != nil { + { + size, err := m.DetectedLabels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQueryrange(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + return len(dAtA) - i, nil +} func encodeVarintQueryrange(dAtA []byte, offset int, v uint64) int { offset -= sovQueryrange(v) base := offset @@ -4619,6 +4885,25 @@ func (m *DetectedFieldsResponse) Size() (n int) { return n } +func (m *DetectedLabelsResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Response != nil { + l = m.Response.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + if len(m.Headers) > 0 { + for _, e := range m.Headers { + l = e.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + } + return n +} + func (m *QueryResponse) Size() (n int) { if m == nil { return 0 @@ -4755,6 +5040,18 @@ func (m *QueryResponse_DetectedFields) Size() (n int) { } return n } +func (m *QueryResponse_DetectedLabels) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedLabels != nil { + l = m.DetectedLabels.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func (m *QueryRequest) Size() (n int) { if m == nil { return 0 @@ -4871,6 +5168,18 @@ func (m *QueryRequest_DetectedFields) Size() (n int) { } return n } +func (m *QueryRequest_DetectedLabels) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.DetectedLabels != nil { + l = m.DetectedLabels.Size() + n += 1 + l + sovQueryrange(uint64(l)) + } + return n +} func sovQueryrange(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 @@ -5076,6 +5385,17 @@ func (this *DetectedFieldsResponse) String() string { }, "") return s } +func (this *DetectedLabelsResponse) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&DetectedLabelsResponse{`, + `Response:` + fmt.Sprintf("%v", this.Response) + `,`, + `Headers:` + fmt.Sprintf("%v", this.Headers) + `,`, + `}`, + }, "") + return s +} func (this *QueryResponse) String() string { if this == nil { return "nil" @@ -5187,6 +5507,16 @@ func (this *QueryResponse_DetectedFields) String() string { }, "") return s } +func (this *QueryResponse_DetectedLabels) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryResponse_DetectedLabels{`, + `DetectedLabels:` + strings.Replace(fmt.Sprintf("%v", this.DetectedLabels), "DetectedLabelsResponse", "DetectedLabelsResponse", 1) + `,`, + `}`, + }, "") + return s +} func (this *QueryRequest) String() string { if this == nil { return "nil" @@ -5288,6 +5618,16 @@ func (this *QueryRequest_DetectedFields) String() string { }, "") return s } +func (this *QueryRequest_DetectedLabels) String() string { + if this == nil { + return "nil" + } + s := strings.Join([]string{`&QueryRequest_DetectedLabels{`, + `DetectedLabels:` + strings.Replace(fmt.Sprintf("%v", this.DetectedLabels), "DetectedLabelsRequest", "logproto.DetectedLabelsRequest", 1) + `,`, + `}`, + }, "") + return s +} func valueToStringQueryrange(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { @@ -7874,6 +8214,129 @@ func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: DetectedLabelsResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: DetectedLabelsResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Response", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Response == nil { + m.Response = &github_com_grafana_loki_pkg_logproto.DetectedLabelsResponse{} + } + if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Headers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQueryrange(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthQueryrange + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *QueryResponse) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -8289,6 +8752,41 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { } m.Response = &QueryResponse_DetectedFields{v} iNdEx = postIndex + case 12: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &DetectedLabelsResponse{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Response = &QueryResponse_DetectedLabels{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) @@ -8749,6 +9247,41 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { } m.Request = &QueryRequest_DetectedFields{v} iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field DetectedLabels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQueryrange + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQueryrange + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQueryrange + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + v := &logproto.DetectedLabelsRequest{} + if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + m.Request = &QueryRequest_DetectedLabels{v} + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipQueryrange(dAtA[iNdEx:]) diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index 33ae8e90357bc..2513d1debb5e8 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -183,6 +183,14 @@ message DetectedFieldsResponse { ]; } +message DetectedLabelsResponse { + logproto.DetectedLabelsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.DetectedLabelsResponse"]; + repeated definitions.PrometheusResponseHeader Headers = 2 [ + (gogoproto.jsontag) = "-", + (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + ]; +} + message QueryResponse { google.rpc.Status status = 1; oneof response { @@ -196,6 +204,7 @@ message QueryResponse { QuantileSketchResponse quantileSketches = 9; ShardsResponse shardsResponse = 10; DetectedFieldsResponse detectedFields = 11; + DetectedLabelsResponse detectedLabels = 12; } } @@ -209,6 +218,7 @@ message QueryRequest { logproto.VolumeRequest volume = 6; indexgatewaypb.ShardsRequest shardsRequest = 8; logproto.DetectedFieldsRequest detectedFields = 9; + logproto.DetectedLabelsRequest detectedLabels = 10; } map metadata = 7 [(gogoproto.nullable) = false]; } diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 3d1a5daf1afb4..fed95d0110aae 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -246,22 +246,23 @@ func NewMiddleware( statsRT = indexStatsTripperware.Wrap(next) seriesVolumeRT = seriesVolumeTripperware.Wrap(next) detectedFieldsRT = next //TODO(twhitney): add middlewares for detected fields + detectedLabelsRT = next // TODO(shantanu): add middlewares ) - return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, detectedFieldsRT, limits) + return newRoundTripper(log, next, limitedRT, logFilterRT, metricRT, seriesRT, labelsRT, instantRT, statsRT, seriesVolumeRT, detectedFieldsRT, detectedLabelsRT, limits) }), StopperWrapper{resultsCache, statsCache, volumeCache}, nil } type roundTripper struct { logger log.Logger - next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields base.Handler + next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields, detectedLabels base.Handler limits Limits } // newRoundTripper creates a new queryrange roundtripper -func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields base.Handler, limits Limits) roundTripper { +func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labels, instantMetric, indexStats, seriesVolume, detectedFields, detectedLabels base.Handler, limits Limits) roundTripper { return roundTripper{ logger: logger, limited: limited, @@ -274,6 +275,7 @@ func newRoundTripper(logger log.Logger, next, limited, log, metric, series, labe indexStats: indexStats, seriesVolume: seriesVolume, detectedFields: detectedFields, + detectedLabels: detectedLabels, next: next, } } @@ -378,6 +380,7 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, ) return r.detectedFields.Do(ctx, req) + // TODO(shantanu): Add DetectedLabels default: return r.next.Do(ctx, req) } @@ -412,6 +415,7 @@ const ( VolumeRangeOp = "volume_range" IndexShardsOp = "index_shards" DetectedFieldsOp = "detected_fields" + DetectedLabelsOp = "detected_labels" ) func getOperation(path string) string { @@ -434,6 +438,8 @@ func getOperation(path string) string { return IndexShardsOp case path == "/loki/api/v1/detected_fields": return DetectedFieldsOp + case path == "/loki/api/v1/detected_labels": + return DetectedLabelsOp default: return "" } diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index cd287a6ac4ab0..9996790a51147 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -1004,6 +1004,7 @@ func TestPostQueries(t *testing.T) { handler, handler, handler, + handler, fakeLimits{}, ).Do(ctx, lreq) require.NoError(t, err) diff --git a/pkg/querier/queryrange/stats.go b/pkg/querier/queryrange/stats.go index 029c6df720fa7..7cc2943fecaf4 100644 --- a/pkg/querier/queryrange/stats.go +++ b/pkg/querier/queryrange/stats.go @@ -37,6 +37,7 @@ const ( queryTypeVolume = "volume" queryTypeShards = "shards" queryTypeDetectedFields = "detected_fields" + queryTypeDetectedLabels = "detected_labels" ) var ( @@ -64,6 +65,8 @@ func recordQueryMetrics(data *queryData) { logql.RecordVolumeQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.params.Limit(), data.params.Step(), data.status, *data.statistics) case queryTypeDetectedFields: logql.RecordDetectedFieldsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) + case queryTypeDetectedLabels: + logql.RecordDetectedLabelsQueryMetrics(data.ctx, logger, data.params.Start(), data.params.End(), data.params.QueryString(), data.status, *data.statistics) default: level.Error(logger).Log("msg", "failed to record query metrics", "err", fmt.Errorf("expected one of the *LokiRequest, *LokiInstantRequest, *LokiSeriesRequest, *LokiLabelNamesRequest, got %s", data.queryType)) } diff --git a/pkg/util/marshal/marshal.go b/pkg/util/marshal/marshal.go index 09a9d8ea8af6f..08c90a348c2b3 100644 --- a/pkg/util/marshal/marshal.go +++ b/pkg/util/marshal/marshal.go @@ -184,3 +184,13 @@ func WriteDetectedFieldsResponseJSON(r *logproto.DetectedFieldsResponse, w io.Wr s.WriteRaw("\n") return s.Flush() } + +// WriteDetectedLabelsResponseJSON marshals a logproto.DetectedLabelsResponse to JSON and then +// writes it to the provided io.Writer. +func WriteDetectedLabelsResponseJSON(r *logproto.DetectedLabelsResponse, w io.Writer) error { + s := jsoniter.ConfigFastest.BorrowStream(w) + defer jsoniter.ConfigFastest.ReturnStream(s) + s.WriteVal(r) + s.WriteRaw("\n") + return s.Flush() +} From 018856c1c1dc59e81834e9b8745a5a5250f97336 Mon Sep 17 00:00:00 2001 From: Callum Styan Date: Mon, 1 Apr 2024 06:40:16 -0700 Subject: [PATCH 44/54] fix: fix span logging based on changes to request types timestamps (#12393) Signed-off-by: Callum Styan --- pkg/logproto/compat_test.go | 52 ++++ pkg/querier/queryrange/codec.go | 12 +- pkg/querier/queryrange/codec_test.go | 92 ++++++ .../queryrangebase/query_range_test.go | 26 ++ .../mocktracer/mocklogrecord.go | 105 +++++++ .../opentracing-go/mocktracer/mockspan.go | 284 ++++++++++++++++++ .../opentracing-go/mocktracer/mocktracer.go | 105 +++++++ .../opentracing-go/mocktracer/propagation.go | 120 ++++++++ vendor/modules.txt | 1 + 9 files changed, 791 insertions(+), 6 deletions(-) create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go diff --git a/pkg/logproto/compat_test.go b/pkg/logproto/compat_test.go index d4de93638f827..c4e3307d03e39 100644 --- a/pkg/logproto/compat_test.go +++ b/pkg/logproto/compat_test.go @@ -5,10 +5,14 @@ import ( "fmt" "math" "testing" + "time" "unsafe" jsoniter "github.com/json-iterator/go" + "github.com/opentracing/opentracing-go/mocktracer" + "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/model/timestamp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -338,6 +342,54 @@ func TestFilterChunkRefRequestGetQuery(t *testing.T) { } } +func TestIndexStatsRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := IndexStatsRequest{ + From: model.Time(now.UnixMilli()), + Through: model.Time(end.UnixMilli()), + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + +func TestVolumeRequest(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := VolumeRequest{ + From: model.Time(now.UnixMilli()), + Through: model.Time(end.UnixMilli()), + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + func benchmarkMergeLabelResponses(b *testing.B, responses []*LabelResponse) { b.ReportAllocs() for n := 0; n < b.N; n++ { diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index d850df4954560..6bcfb03b33364 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -84,8 +84,8 @@ func (r *LokiRequest) WithShards(shards logql.Shards) *LokiRequest { func (r *LokiRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( otlog.String("query", r.GetQuery()), - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), otlog.Int64("step (ms)", r.GetStep()), otlog.Int64("interval (ms)", r.GetInterval()), otlog.Int64("limit", int64(r.GetLimit())), @@ -179,8 +179,8 @@ func (r *LokiSeriesRequest) GetStep() int64 { func (r *LokiSeriesRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( otlog.String("matchers", strings.Join(r.GetMatch(), ",")), - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), otlog.String("shards", strings.Join(r.GetShards(), ",")), ) } @@ -250,8 +250,8 @@ func (r *LabelRequest) WithQuery(query string) queryrangebase.Request { func (r *LabelRequest) LogToSpan(sp opentracing.Span) { sp.LogFields( - otlog.String("start", timestamp.Time(r.GetStart().UnixNano()).String()), - otlog.String("end", timestamp.Time(r.GetEnd().UnixNano()).String()), + otlog.String("start", timestamp.Time(r.GetStart().UnixMilli()).String()), + otlog.String("end", timestamp.Time(r.GetEnd().UnixMilli()).String()), ) } diff --git a/pkg/querier/queryrange/codec_test.go b/pkg/querier/queryrange/codec_test.go index 6a692c9180c94..cdc95865e12c7 100644 --- a/pkg/querier/queryrange/codec_test.go +++ b/pkg/querier/queryrange/codec_test.go @@ -16,8 +16,10 @@ import ( "github.com/gorilla/mux" "github.com/grafana/dskit/user" + "github.com/opentracing/opentracing-go/mocktracer" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" @@ -424,6 +426,96 @@ func Test_codec_DecodeResponse(t *testing.T) { } } +func TestLokiRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LokiRequest{ + StartTs: now, + EndTs: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + +func TestLokiInstantRequestSpanLogging(t *testing.T) { + now := time.Now() + req := LokiInstantRequest{ + TimeTs: now, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "ts" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + } + } +} + +func TestLokiSeriesRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LokiSeriesRequest{ + StartTs: now, + EndTs: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + + } + } + } +} + +func TestLabelRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := LabelRequest{ + LabelRequest: logproto.LabelRequest{ + Start: &now, + End: &end, + }, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + func Test_codec_DecodeProtobufResponseParity(t *testing.T) { // test fixtures from pkg/util/marshal_test var queryTests = []struct { diff --git a/pkg/querier/queryrange/queryrangebase/query_range_test.go b/pkg/querier/queryrange/queryrangebase/query_range_test.go index 4a59b2977b649..ada34f5aba895 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range_test.go +++ b/pkg/querier/queryrange/queryrangebase/query_range_test.go @@ -7,8 +7,11 @@ import ( "net/http" "strconv" "testing" + "time" jsoniter "github.com/json-iterator/go" + "github.com/opentracing/opentracing-go/mocktracer" + "github.com/prometheus/prometheus/model/timestamp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -269,6 +272,29 @@ func TestMergeAPIResponses(t *testing.T) { } } +func TestPrometheusRequestSpanLogging(t *testing.T) { + now := time.Now() + end := now.Add(1000 * time.Second) + req := PrometheusRequest{ + Start: now, + End: end, + } + + span := mocktracer.MockSpan{} + req.LogToSpan(&span) + + for _, l := range span.Logs() { + for _, field := range l.Fields { + if field.Key == "start" { + require.Equal(t, timestamp.Time(now.UnixMilli()).String(), field.ValueString) + } + if field.Key == "end" { + require.Equal(t, timestamp.Time(end.UnixMilli()).String(), field.ValueString) + } + } + } +} + func mustParse(t *testing.T, response string) Response { var resp PrometheusResponse // Needed as goimports automatically add a json import otherwise. diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go new file mode 100644 index 0000000000000..2ce96d9d3887e --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "fmt" + "reflect" + "time" + + "github.com/opentracing/opentracing-go/log" +) + +// MockLogRecord represents data logged to a Span via Span.LogFields or +// Span.LogKV. +type MockLogRecord struct { + Timestamp time.Time + Fields []MockKeyValue +} + +// MockKeyValue represents a single key:value pair. +type MockKeyValue struct { + Key string + + // All MockLogRecord values are coerced to strings via fmt.Sprint(), though + // we retain their type separately. + ValueKind reflect.Kind + ValueString string +} + +// EmitString belongs to the log.Encoder interface +func (m *MockKeyValue) EmitString(key, value string) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitBool belongs to the log.Encoder interface +func (m *MockKeyValue) EmitBool(key string, value bool) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt(key string, value int) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt32(key string, value int32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt64(key string, value int64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint32(key string, value uint32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint64(key string, value uint64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat32(key string, value float32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat64(key string, value float64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitObject belongs to the log.Encoder interface +func (m *MockKeyValue) EmitObject(key string, value interface{}) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitLazyLogger belongs to the log.Encoder interface +func (m *MockKeyValue) EmitLazyLogger(value log.LazyLogger) { + var meta MockKeyValue + value(&meta) + m.Key = meta.Key + m.ValueKind = meta.ValueKind + m.ValueString = meta.ValueString +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go new file mode 100644 index 0000000000000..8c7932ce65b30 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go @@ -0,0 +1,284 @@ +package mocktracer + +import ( + "fmt" + "sync" + "sync/atomic" + "time" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" +) + +// MockSpanContext is an opentracing.SpanContext implementation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +// +// By default all spans have Sampled=true flag, unless {"sampling.priority": 0} +// tag is set. +type MockSpanContext struct { + TraceID int + SpanID int + Sampled bool + Baggage map[string]string +} + +var mockIDSource = uint32(42) + +func nextMockID() int { + return int(atomic.AddUint32(&mockIDSource, 1)) +} + +// ForeachBaggageItem belongs to the SpanContext interface +func (c MockSpanContext) ForeachBaggageItem(handler func(k, v string) bool) { + for k, v := range c.Baggage { + if !handler(k, v) { + break + } + } +} + +// WithBaggageItem creates a new context with an extra baggage item. +func (c MockSpanContext) WithBaggageItem(key, value string) MockSpanContext { + var newBaggage map[string]string + if c.Baggage == nil { + newBaggage = map[string]string{key: value} + } else { + newBaggage = make(map[string]string, len(c.Baggage)+1) + for k, v := range c.Baggage { + newBaggage[k] = v + } + newBaggage[key] = value + } + // Use positional parameters so the compiler will help catch new fields. + return MockSpanContext{c.TraceID, c.SpanID, c.Sampled, newBaggage} +} + +// MockSpan is an opentracing.Span implementation that exports its internal +// state for testing purposes. +type MockSpan struct { + sync.RWMutex + + ParentID int + + OperationName string + StartTime time.Time + FinishTime time.Time + + // All of the below are protected by the embedded RWMutex. + SpanContext MockSpanContext + tags map[string]interface{} + logs []MockLogRecord + tracer *MockTracer +} + +func newMockSpan(t *MockTracer, name string, opts opentracing.StartSpanOptions) *MockSpan { + tags := opts.Tags + if tags == nil { + tags = map[string]interface{}{} + } + traceID := nextMockID() + parentID := int(0) + var baggage map[string]string + sampled := true + if len(opts.References) > 0 { + traceID = opts.References[0].ReferencedContext.(MockSpanContext).TraceID + parentID = opts.References[0].ReferencedContext.(MockSpanContext).SpanID + sampled = opts.References[0].ReferencedContext.(MockSpanContext).Sampled + baggage = opts.References[0].ReferencedContext.(MockSpanContext).Baggage + } + spanContext := MockSpanContext{traceID, nextMockID(), sampled, baggage} + startTime := opts.StartTime + if startTime.IsZero() { + startTime = time.Now() + } + return &MockSpan{ + ParentID: parentID, + OperationName: name, + StartTime: startTime, + tags: tags, + logs: []MockLogRecord{}, + SpanContext: spanContext, + + tracer: t, + } +} + +// Tags returns a copy of tags accumulated by the span so far +func (s *MockSpan) Tags() map[string]interface{} { + s.RLock() + defer s.RUnlock() + tags := make(map[string]interface{}) + for k, v := range s.tags { + tags[k] = v + } + return tags +} + +// Tag returns a single tag +func (s *MockSpan) Tag(k string) interface{} { + s.RLock() + defer s.RUnlock() + return s.tags[k] +} + +// Logs returns a copy of logs accumulated in the span so far +func (s *MockSpan) Logs() []MockLogRecord { + s.RLock() + defer s.RUnlock() + logs := make([]MockLogRecord, len(s.logs)) + copy(logs, s.logs) + return logs +} + +// Context belongs to the Span interface +func (s *MockSpan) Context() opentracing.SpanContext { + s.Lock() + defer s.Unlock() + return s.SpanContext +} + +// SetTag belongs to the Span interface +func (s *MockSpan) SetTag(key string, value interface{}) opentracing.Span { + s.Lock() + defer s.Unlock() + if key == string(ext.SamplingPriority) { + if v, ok := value.(uint16); ok { + s.SpanContext.Sampled = v > 0 + return s + } + if v, ok := value.(int); ok { + s.SpanContext.Sampled = v > 0 + return s + } + } + s.tags[key] = value + return s +} + +// SetBaggageItem belongs to the Span interface +func (s *MockSpan) SetBaggageItem(key, val string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.SpanContext = s.SpanContext.WithBaggageItem(key, val) + return s +} + +// BaggageItem belongs to the Span interface +func (s *MockSpan) BaggageItem(key string) string { + s.RLock() + defer s.RUnlock() + return s.SpanContext.Baggage[key] +} + +// Finish belongs to the Span interface +func (s *MockSpan) Finish() { + s.Lock() + s.FinishTime = time.Now() + s.Unlock() + s.tracer.recordSpan(s) +} + +// FinishWithOptions belongs to the Span interface +func (s *MockSpan) FinishWithOptions(opts opentracing.FinishOptions) { + s.Lock() + s.FinishTime = opts.FinishTime + s.Unlock() + + // Handle any late-bound LogRecords. + for _, lr := range opts.LogRecords { + s.logFieldsWithTimestamp(lr.Timestamp, lr.Fields...) + } + // Handle (deprecated) BulkLogData. + for _, ld := range opts.BulkLogData { + if ld.Payload != nil { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event), + log.Object("payload", ld.Payload)) + } else { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event)) + } + } + + s.tracer.recordSpan(s) +} + +// String allows printing span for debugging +func (s *MockSpan) String() string { + return fmt.Sprintf( + "traceId=%d, spanId=%d, parentId=%d, sampled=%t, name=%s", + s.SpanContext.TraceID, s.SpanContext.SpanID, s.ParentID, + s.SpanContext.Sampled, s.OperationName) +} + +// LogFields belongs to the Span interface +func (s *MockSpan) LogFields(fields ...log.Field) { + s.logFieldsWithTimestamp(time.Now(), fields...) +} + +// The caller MUST NOT hold s.Lock +func (s *MockSpan) logFieldsWithTimestamp(ts time.Time, fields ...log.Field) { + lr := MockLogRecord{ + Timestamp: ts, + Fields: make([]MockKeyValue, len(fields)), + } + for i, f := range fields { + outField := &(lr.Fields[i]) + f.Marshal(outField) + } + + s.Lock() + defer s.Unlock() + s.logs = append(s.logs, lr) +} + +// LogKV belongs to the Span interface. +// +// This implementations coerces all "values" to strings, though that is not +// something all implementations need to do. Indeed, a motivated person can and +// probably should have this do a typed switch on the values. +func (s *MockSpan) LogKV(keyValues ...interface{}) { + if len(keyValues)%2 != 0 { + s.LogFields(log.Error(fmt.Errorf("Non-even keyValues len: %v", len(keyValues)))) + return + } + fields, err := log.InterleavedKVToFields(keyValues...) + if err != nil { + s.LogFields(log.Error(err), log.String("function", "LogKV")) + return + } + s.LogFields(fields...) +} + +// LogEvent belongs to the Span interface +func (s *MockSpan) LogEvent(event string) { + s.LogFields(log.String("event", event)) +} + +// LogEventWithPayload belongs to the Span interface +func (s *MockSpan) LogEventWithPayload(event string, payload interface{}) { + s.LogFields(log.String("event", event), log.Object("payload", payload)) +} + +// Log belongs to the Span interface +func (s *MockSpan) Log(data opentracing.LogData) { + panic("MockSpan.Log() no longer supported") +} + +// SetOperationName belongs to the Span interface +func (s *MockSpan) SetOperationName(operationName string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.OperationName = operationName + return s +} + +// Tracer belongs to the Span interface +func (s *MockSpan) Tracer() opentracing.Tracer { + return s.tracer +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go new file mode 100644 index 0000000000000..4533da7b1f782 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "sync" + + "github.com/opentracing/opentracing-go" +) + +// New returns a MockTracer opentracing.Tracer implementation that's intended +// to facilitate tests of OpenTracing instrumentation. +func New() *MockTracer { + t := &MockTracer{ + finishedSpans: []*MockSpan{}, + injectors: make(map[interface{}]Injector), + extractors: make(map[interface{}]Extractor), + } + + // register default injectors/extractors + textPropagator := new(TextMapPropagator) + t.RegisterInjector(opentracing.TextMap, textPropagator) + t.RegisterExtractor(opentracing.TextMap, textPropagator) + + httpPropagator := &TextMapPropagator{HTTPHeaders: true} + t.RegisterInjector(opentracing.HTTPHeaders, httpPropagator) + t.RegisterExtractor(opentracing.HTTPHeaders, httpPropagator) + + return t +} + +// MockTracer is only intended for testing OpenTracing instrumentation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +type MockTracer struct { + sync.RWMutex + finishedSpans []*MockSpan + injectors map[interface{}]Injector + extractors map[interface{}]Extractor +} + +// FinishedSpans returns all spans that have been Finish()'ed since the +// MockTracer was constructed or since the last call to its Reset() method. +func (t *MockTracer) FinishedSpans() []*MockSpan { + t.RLock() + defer t.RUnlock() + spans := make([]*MockSpan, len(t.finishedSpans)) + copy(spans, t.finishedSpans) + return spans +} + +// Reset clears the internally accumulated finished spans. Note that any +// extant MockSpans will still append to finishedSpans when they Finish(), +// even after a call to Reset(). +func (t *MockTracer) Reset() { + t.Lock() + defer t.Unlock() + t.finishedSpans = []*MockSpan{} +} + +// StartSpan belongs to the Tracer interface. +func (t *MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpanOption) opentracing.Span { + sso := opentracing.StartSpanOptions{} + for _, o := range opts { + o.Apply(&sso) + } + return newMockSpan(t, operationName, sso) +} + +// RegisterInjector registers injector for given format +func (t *MockTracer) RegisterInjector(format interface{}, injector Injector) { + t.injectors[format] = injector +} + +// RegisterExtractor registers extractor for given format +func (t *MockTracer) RegisterExtractor(format interface{}, extractor Extractor) { + t.extractors[format] = extractor +} + +// Inject belongs to the Tracer interface. +func (t *MockTracer) Inject(sm opentracing.SpanContext, format interface{}, carrier interface{}) error { + spanContext, ok := sm.(MockSpanContext) + if !ok { + return opentracing.ErrInvalidSpanContext + } + injector, ok := t.injectors[format] + if !ok { + return opentracing.ErrUnsupportedFormat + } + return injector.Inject(spanContext, carrier) +} + +// Extract belongs to the Tracer interface. +func (t *MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) { + extractor, ok := t.extractors[format] + if !ok { + return nil, opentracing.ErrUnsupportedFormat + } + return extractor.Extract(carrier) +} + +func (t *MockTracer) recordSpan(span *MockSpan) { + t.Lock() + defer t.Unlock() + t.finishedSpans = append(t.finishedSpans, span) +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go new file mode 100644 index 0000000000000..8364f1d182528 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go @@ -0,0 +1,120 @@ +package mocktracer + +import ( + "fmt" + "net/url" + "strconv" + "strings" + + "github.com/opentracing/opentracing-go" +) + +const mockTextMapIdsPrefix = "mockpfx-ids-" +const mockTextMapBaggagePrefix = "mockpfx-baggage-" + +var emptyContext = MockSpanContext{} + +// Injector is responsible for injecting SpanContext instances in a manner suitable +// for propagation via a format-specific "carrier" object. Typically the +// injection will take place across an RPC boundary, but message queues and +// other IPC mechanisms are also reasonable places to use an Injector. +type Injector interface { + // Inject takes `SpanContext` and injects it into `carrier`. The actual type + // of `carrier` depends on the `format` passed to `Tracer.Inject()`. + // + // Implementations may return opentracing.ErrInvalidCarrier or any other + // implementation-specific error if injection fails. + Inject(ctx MockSpanContext, carrier interface{}) error +} + +// Extractor is responsible for extracting SpanContext instances from a +// format-specific "carrier" object. Typically the extraction will take place +// on the server side of an RPC boundary, but message queues and other IPC +// mechanisms are also reasonable places to use an Extractor. +type Extractor interface { + // Extract decodes a SpanContext instance from the given `carrier`, + // or (nil, opentracing.ErrSpanContextNotFound) if no context could + // be found in the `carrier`. + Extract(carrier interface{}) (MockSpanContext, error) +} + +// TextMapPropagator implements Injector/Extractor for TextMap and HTTPHeaders formats. +type TextMapPropagator struct { + HTTPHeaders bool +} + +// Inject implements the Injector interface +func (t *TextMapPropagator) Inject(spanContext MockSpanContext, carrier interface{}) error { + writer, ok := carrier.(opentracing.TextMapWriter) + if !ok { + return opentracing.ErrInvalidCarrier + } + // Ids: + writer.Set(mockTextMapIdsPrefix+"traceid", strconv.Itoa(spanContext.TraceID)) + writer.Set(mockTextMapIdsPrefix+"spanid", strconv.Itoa(spanContext.SpanID)) + writer.Set(mockTextMapIdsPrefix+"sampled", fmt.Sprint(spanContext.Sampled)) + // Baggage: + for baggageKey, baggageVal := range spanContext.Baggage { + safeVal := baggageVal + if t.HTTPHeaders { + safeVal = url.QueryEscape(baggageVal) + } + writer.Set(mockTextMapBaggagePrefix+baggageKey, safeVal) + } + return nil +} + +// Extract implements the Extractor interface +func (t *TextMapPropagator) Extract(carrier interface{}) (MockSpanContext, error) { + reader, ok := carrier.(opentracing.TextMapReader) + if !ok { + return emptyContext, opentracing.ErrInvalidCarrier + } + rval := MockSpanContext{0, 0, true, nil} + err := reader.ForeachKey(func(key, val string) error { + lowerKey := strings.ToLower(key) + switch { + case lowerKey == mockTextMapIdsPrefix+"traceid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.TraceID = i + case lowerKey == mockTextMapIdsPrefix+"spanid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.SpanID = i + case lowerKey == mockTextMapIdsPrefix+"sampled": + b, err := strconv.ParseBool(val) + if err != nil { + return err + } + rval.Sampled = b + case strings.HasPrefix(lowerKey, mockTextMapBaggagePrefix): + // Baggage: + if rval.Baggage == nil { + rval.Baggage = make(map[string]string) + } + safeVal := val + if t.HTTPHeaders { + // unescape errors are ignored, nothing can be done + if rawVal, err := url.QueryUnescape(val); err == nil { + safeVal = rawVal + } + } + rval.Baggage[lowerKey[len(mockTextMapBaggagePrefix):]] = safeVal + } + return nil + }) + if rval.TraceID == 0 || rval.SpanID == 0 { + return emptyContext, opentracing.ErrSpanContextNotFound + } + if err != nil { + return emptyContext, err + } + return rval, nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 141a17a31996b..9bbf3e0af8662 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1225,6 +1225,7 @@ github.com/opentracing-contrib/go-stdlib/nethttp github.com/opentracing/opentracing-go github.com/opentracing/opentracing-go/ext github.com/opentracing/opentracing-go/log +github.com/opentracing/opentracing-go/mocktracer # github.com/oschwald/geoip2-golang v1.9.0 ## explicit; go 1.19 github.com/oschwald/geoip2-golang From a207206641c6f6c82ee2cd9b230ee9605ae20f11 Mon Sep 17 00:00:00 2001 From: Sourav <43824012+sourav-py@users.noreply.github.com> Date: Tue, 2 Apr 2024 02:47:37 +0530 Subject: [PATCH 45/54] docs: Update _index.md (#12409) --- docs/sources/send-data/_index.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/sources/send-data/_index.md b/docs/sources/send-data/_index.md index 981d98fe1e12c..f3106edf2352c 100644 --- a/docs/sources/send-data/_index.md +++ b/docs/sources/send-data/_index.md @@ -59,6 +59,7 @@ These third-party clients also enable sending logs to Loki: - [promtail-client](https://github.com/afiskon/promtail-client) (Go) - [push-to-loki.py](https://github.com/sleleko/devops-kb/blob/master/python/push-to-loki.py) (Python 3) - [python-logging-loki](https://pypi.org/project/python-logging-loki/) (Python 3) +- [nextlog](https://pypi.org/project/nextlog/) (Python 3) - [Serilog-Sinks-Loki](https://github.com/JosephWoodward/Serilog-Sinks-Loki) (C#) - [Vector Loki Sink](https://vector.dev/docs/reference/configuration/sinks/loki/) - [winston-loki](https://github.com/JaniAnttonen/winston-loki) (JS) From 420169aaab30ab6142526ebf941d6bd45059a0e6 Mon Sep 17 00:00:00 2001 From: stayweek <165480133+stayweek@users.noreply.github.com> Date: Tue, 2 Apr 2024 05:18:06 +0800 Subject: [PATCH 46/54] fix: fix some comments (#12417) Signed-off-by: stayweek Co-authored-by: J Stickler --- clients/pkg/logentry/stages/extensions.go | 2 +- clients/pkg/logentry/stages/limit_test.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/clients/pkg/logentry/stages/extensions.go b/clients/pkg/logentry/stages/extensions.go index f25ffe02e8403..d2e788dcb9d1a 100644 --- a/clients/pkg/logentry/stages/extensions.go +++ b/clients/pkg/logentry/stages/extensions.go @@ -128,7 +128,7 @@ type CriConfig struct { MaxPartialLineSizeTruncate bool `mapstructure:"max_partial_line_size_truncate"` } -// validateDropConfig validates the DropConfig for the dropStage +// validateCriConfig validates the CriConfig for the cri stage func validateCriConfig(cfg *CriConfig) error { if cfg.MaxPartialLines == 0 { cfg.MaxPartialLines = MaxPartialLinesSize diff --git a/clients/pkg/logentry/stages/limit_test.go b/clients/pkg/logentry/stages/limit_test.go index b439db4908b2f..840db40d37cb2 100644 --- a/clients/pkg/logentry/stages/limit_test.go +++ b/clients/pkg/logentry/stages/limit_test.go @@ -60,7 +60,7 @@ var testNonAppLogLine = ` var plName = "testPipeline" -// TestLimitPipeline is used to verify we properly parse the yaml config and create a working pipeline +// TestLimitWaitPipeline is used to verify we properly parse the yaml config and create a working pipeline func TestLimitWaitPipeline(t *testing.T) { registry := prometheus.NewRegistry() pl, err := NewPipeline(util_log.Logger, loadConfig(testLimitWaitYaml), &plName, registry) @@ -78,7 +78,7 @@ func TestLimitWaitPipeline(t *testing.T) { assert.Equal(t, out[0].Line, testMatchLogLineApp1) } -// TestLimitPipeline is used to verify we properly parse the yaml config and create a working pipeline +// TestLimitDropPipeline is used to verify we properly parse the yaml config and create a working pipeline func TestLimitDropPipeline(t *testing.T) { registry := prometheus.NewRegistry() pl, err := NewPipeline(util_log.Logger, loadConfig(testLimitDropYaml), &plName, registry) From a331746c2f3832daedc2cced440101ff5e57ead8 Mon Sep 17 00:00:00 2001 From: Shantanu Alshi Date: Tue, 2 Apr 2024 12:15:29 +0530 Subject: [PATCH 47/54] feat(detected_labels): Add pod as a detected label to demo (#12422) --- pkg/querier/querier.go | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index ff83302a0e288..9527980e9d5f0 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -918,6 +918,7 @@ func (q *SingleTenantQuerier) DetectedLabels(_ context.Context, _ *logproto.Dete {Label: "namespace"}, {Label: "cluster"}, {Label: "instance"}, + {Label: "pod"}, }, }, nil } From 36c703dae82615c8d67ca7ca88f1d7e11a5ec4e5 Mon Sep 17 00:00:00 2001 From: Anton Kolesnikov Date: Tue, 2 Apr 2024 14:47:21 +0800 Subject: [PATCH 48/54] feat: add pattern match line filter (#12398) --- CHANGELOG.md | 1 + pkg/logql/log/filter.go | 45 ++ pkg/logql/log/parser.go | 2 +- pkg/logql/log/pattern/ast.go | 33 +- pkg/logql/log/pattern/parser.go | 6 +- pkg/logql/log/pattern/pattern.go | 93 ++- pkg/logql/log/pattern/pattern_test.go | 91 ++- pkg/logql/syntax/ast.go | 2 +- pkg/logql/syntax/ast_test.go | 58 +- pkg/logql/syntax/expr.y | 4 +- pkg/logql/syntax/expr.y.go | 915 +++++++++++----------- pkg/logql/syntax/lex.go | 2 + pkg/logql/syntax/linefilter_test.go | 5 +- pkg/storage/bloom/v1/bloom_tester.go | 22 +- pkg/storage/bloom/v1/bloom_tester_test.go | 36 + 15 files changed, 827 insertions(+), 488 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fdc5c9c29ed06..289045cf2f6bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -63,6 +63,7 @@ * [11970](https://github.com/grafana/loki/pull/11897) **masslessparticle** Ksonnet: Introduces memory limits to the compactor configuration to avoid unbounded memory usage. * [12318](https://github.com/grafana/loki/pull/12318) **DylanGuedes** Memcached: Add mTLS support. * [12392](https://github.com/grafana/loki/pull/12392) **sandeepsukhani** Detect name of service emitting logs and add it as a label. +* [12398](https://github.com/grafana/loki/pull/12398) **kolesnikovae** LogQL: Introduces pattern match filter operators. ##### Fixes * [11074](https://github.com/grafana/loki/pull/11074) **hainenber** Fix panic in lambda-promtail due to mishandling of empty DROP_LABELS env var. diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 164741f4c8c96..8a9a162828638 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -11,6 +11,7 @@ import ( "github.com/prometheus/prometheus/model/labels" + "github.com/grafana/loki/pkg/logql/log/pattern" "github.com/grafana/loki/pkg/util" ) @@ -23,6 +24,8 @@ const ( LineMatchNotEqual LineMatchRegexp LineMatchNotRegexp + LineMatchPattern + LineMatchNotPattern ) func (t LineMatchType) String() string { @@ -35,6 +38,10 @@ func (t LineMatchType) String() string { return "|~" case LineMatchNotRegexp: return "!~" + case LineMatchPattern: + return "|>" + case LineMatchNotPattern: + return "!>" default: return "" } @@ -553,6 +560,10 @@ func NewFilter(match string, mt LineMatchType) (Filterer, error) { return newContainsFilter([]byte(match), false), nil case LineMatchNotEqual: return NewNotFilter(newContainsFilter([]byte(match), false)), nil + case LineMatchPattern: + return newPatternFilterer([]byte(match), true) + case LineMatchNotPattern: + return newPatternFilterer([]byte(match), false) default: return nil, fmt.Errorf("unknown matcher: %v", match) } @@ -783,3 +794,37 @@ func (s *RegexSimplifier) simplifyConcatAlternate(reg *syntax.Regexp, literal [] } return nil, false } + +type patternFilter struct { + matcher *pattern.Matcher + pattern []byte +} + +func newPatternFilterer(p []byte, match bool) (MatcherFilterer, error) { + m, err := pattern.ParseLineFilter(p) + if err != nil { + return nil, err + } + filter := &patternFilter{ + matcher: m, + pattern: p, + } + if !match { + return NewNotFilter(filter), nil + } + return filter, nil +} + +func (f *patternFilter) Filter(line []byte) bool { return f.matcher.Test(line) } + +func (f *patternFilter) Matches(test Checker) bool { + return test.Test(f.pattern, false, false) +} + +func (f *patternFilter) ToStage() Stage { + return StageFunc{ + process: func(_ int64, line []byte, _ *LabelsBuilder) ([]byte, bool) { + return line, f.Filter(line) + }, + } +} diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 90d4a4bebf8ab..afdc7f91a4f9d 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -373,7 +373,7 @@ func (l *LogfmtParser) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte func (l *LogfmtParser) RequiredLabelNames() []string { return []string{} } type PatternParser struct { - matcher pattern.Matcher + matcher *pattern.Matcher names []string } diff --git a/pkg/logql/log/pattern/ast.go b/pkg/logql/log/pattern/ast.go index b4cf8e813f968..677db07dcfbef 100644 --- a/pkg/logql/log/pattern/ast.go +++ b/pkg/logql/log/pattern/ast.go @@ -20,6 +20,21 @@ func (e expr) validate() error { return ErrNoCapture } // Consecutive captures are not allowed. + if err := e.validateNoConsecutiveCaptures(); err != nil { + return err + } + caps := e.captures() + uniq := map[string]struct{}{} + for _, c := range caps { + if _, ok := uniq[c]; ok { + return fmt.Errorf("duplicate capture name (%s): %w", c, ErrInvalidExpr) + } + uniq[c] = struct{}{} + } + return nil +} + +func (e expr) validateNoConsecutiveCaptures() error { for i, n := range e { if i+1 >= len(e) { break @@ -30,21 +45,21 @@ func (e expr) validate() error { } } } + return nil +} - caps := e.captures() - uniq := map[string]struct{}{} - for _, c := range caps { - if _, ok := uniq[c]; ok { - return fmt.Errorf("duplicate capture name (%s): %w", c, ErrInvalidExpr) +func (e expr) validateNoNamedCaptures() error { + for i, n := range e { + if c, ok := e[i].(capture); ok && !c.isUnnamed() { + return fmt.Errorf("%w: found '%s'", ErrCaptureNotAllowed, n.String()) } - uniq[c] = struct{}{} } return nil } func (e expr) captures() (captures []string) { for _, n := range e { - if c, ok := n.(capture); ok && !c.isUnamed() { + if c, ok := n.(capture); ok && !c.isUnnamed() { captures = append(captures, c.Name()) } } @@ -65,8 +80,8 @@ func (c capture) Name() string { return string(c) } -func (c capture) isUnamed() bool { - return string(c) == underscore +func (c capture) isUnnamed() bool { + return len(c) == 1 && c[0] == underscore[0] } type literals []byte diff --git a/pkg/logql/log/pattern/parser.go b/pkg/logql/log/pattern/parser.go index d1bc2515c9cb2..b2a868f7af76c 100644 --- a/pkg/logql/log/pattern/parser.go +++ b/pkg/logql/log/pattern/parser.go @@ -19,8 +19,12 @@ func init() { } func parseExpr(input string) (expr, error) { + return parseExprBytes([]byte(input)) +} + +func parseExprBytes(input []byte) (expr, error) { l := newLexer() - l.setData([]byte(input)) + l.setData(input) e := exprNewParser().Parse(l) if e != 0 || len(l.errs) > 0 { return nil, l.errs[0] diff --git a/pkg/logql/log/pattern/pattern.go b/pkg/logql/log/pattern/pattern.go index b08c91b610af9..83dc6a473a165 100644 --- a/pkg/logql/log/pattern/pattern.go +++ b/pkg/logql/log/pattern/pattern.go @@ -6,23 +6,19 @@ import ( ) var ( - ErrNoCapture = errors.New("at least one capture is required") - ErrInvalidExpr = errors.New("invalid expression") + ErrNoCapture = errors.New("at least one capture is required") + ErrCaptureNotAllowed = errors.New("named captures are not allowed") + ErrInvalidExpr = errors.New("invalid expression") ) -type Matcher interface { - Matches(in []byte) [][]byte - Names() []string -} - -type matcher struct { +type Matcher struct { e expr captures [][]byte names []string } -func New(in string) (Matcher, error) { +func New(in string) (*Matcher, error) { e, err := parseExpr(in) if err != nil { return nil, err @@ -30,16 +26,47 @@ func New(in string) (Matcher, error) { if err := e.validate(); err != nil { return nil, err } - return &matcher{ + return &Matcher{ e: e, captures: make([][]byte, 0, e.captureCount()), names: e.captures(), }, nil } +func ParseLineFilter(in []byte) (*Matcher, error) { + if len(in) == 0 { + return new(Matcher), nil + } + e, err := parseExprBytes(in) + if err != nil { + return nil, err + } + if err = e.validateNoConsecutiveCaptures(); err != nil { + return nil, err + } + if err = e.validateNoNamedCaptures(); err != nil { + return nil, err + } + return &Matcher{e: e}, nil +} + +func ParseLiterals(in string) ([][]byte, error) { + e, err := parseExpr(in) + if err != nil { + return nil, err + } + lit := make([][]byte, 0, len(e)) + for _, n := range e { + if l, ok := n.(literals); ok { + lit = append(lit, l) + } + } + return lit, nil +} + // Matches matches the given line with the provided pattern. // Matches invalidates the previous returned captures array. -func (m *matcher) Matches(in []byte) [][]byte { +func (m *Matcher) Matches(in []byte) [][]byte { if len(in) == 0 { return nil } @@ -62,7 +89,7 @@ func (m *matcher) Matches(in []byte) [][]byte { // from now we have capture - literals - capture ... (literals)? for len(expr) != 0 { if len(expr) == 1 { // we're ending on a capture. - if !(expr[0].(capture)).isUnamed() { + if !(expr[0].(capture)).isUnnamed() { captures = append(captures, in) } return captures @@ -73,13 +100,13 @@ func (m *matcher) Matches(in []byte) [][]byte { i := bytes.Index(in, ls) if i == -1 { // if a capture is missed we return up to the end as the capture. - if !capt.isUnamed() { + if !capt.isUnnamed() { captures = append(captures, in) } return captures } - if capt.isUnamed() { + if capt.isUnnamed() { in = in[len(ls)+i:] continue } @@ -90,6 +117,42 @@ func (m *matcher) Matches(in []byte) [][]byte { return captures } -func (m *matcher) Names() []string { +func (m *Matcher) Names() []string { return m.names } + +func (m *Matcher) Test(in []byte) bool { + if len(in) == 0 || len(m.e) == 0 { + // An empty line can only match an empty pattern. + return len(in) == 0 && len(m.e) == 0 + } + var off int + for i := 0; i < len(m.e); i++ { + lit, ok := m.e[i].(literals) + if !ok { + continue + } + j := bytes.Index(in[off:], lit) + if j == -1 { + return false + } + if i != 0 && j == 0 { + // This means we either have repetitive literals, or an empty + // capture. Either way, the line does not match the pattern. + return false + } + off += j + len(lit) + } + // If we end up on a literal, we only consider the test successful if + // the remaining input is empty. Otherwise, if we end up on a capture, + // the remainder (the captured text) must not be empty. + // + // For example, "foo bar baz" does not match "<_> bar", but it matches + // "<_> baz" and "foo <_>". + // + // Empty captures are not allowed as well: " bar " does not match + // "<_> bar <_>", but matches "<_>bar<_>". + _, reqRem := m.e[len(m.e)-1].(capture) + hasRem := off != len(in) + return reqRem == hasRem +} diff --git a/pkg/logql/log/pattern/pattern_test.go b/pkg/logql/log/pattern/pattern_test.go index da0c6a180527f..0d1c47f0bea29 100644 --- a/pkg/logql/log/pattern/pattern_test.go +++ b/pkg/logql/log/pattern/pattern_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -11,97 +12,144 @@ var fixtures = []struct { expr string in string expected []string + matches bool }{ { "foo bar", "foo buzz bar", []string{"buzz"}, + true, }, { "foo bar", "foo buzz bar", []string{"buzz", ""}, + false, + }, + { + "foo bar", + "foo buzz bar", + []string{"", "buzz"}, + false, }, { " bar", " bar", []string{"", ""}, + false, + }, + { + "bar", + " bar ", + []string{" ", " "}, + true, + }, + { + " bar", + " bar ", + []string{"", " "}, + false, + }, + { + "bar ", + " bar ", + []string{" ", ""}, + false, + }, + { + "", + " bar ", + []string{" bar "}, + true, }, { "?<_>", `/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1`, []string{"/api/plugins/versioncheck"}, + true, }, { "?<_>", `/api/plugins/status`, []string{"/api/plugins/status"}, + false, }, { // Common Log Format ` [<_>] " <_>" `, `127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`, []string{"127.0.0.1", "user-identifier", "frank", "GET", "/apache_pb.gif", "200", "2326"}, + true, }, { // Combined Log Format ` - - [<_>] " <_>" `, `35.191.8.106 - - [19/May/2021:07:21:49 +0000] "GET /api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1 HTTP/1.1" 200 107 "-" "Go-http-client/2.0" "80.153.74.144, 34.120.177.193" "TLSv1.3" "DE" "DEBW"`, []string{"35.191.8.106", "GET", "/api/plugins/versioncheck?slugIn=snuids-trafficlights-panel,input,gel&grafanaVersion=7.0.0-beta1", "200", "107"}, + false, }, { // MySQL `<_> [] [] [] `, `2020-08-06T14:25:02.835618Z 0 [Note] [MY-012487] [InnoDB] DDL log recovery : begin`, []string{"0", "Note", "MY-012487", "InnoDB"}, + false, }, { // MySQL `<_> [] `, `2021-05-19T07:40:12.215792Z 42761518 [Note] Aborted connection 42761518 to db: 'hosted_grafana' user: 'hosted_grafana' host: '10.36.4.122' (Got an error reading communication packets)`, []string{"42761518", "Note"}, + false, }, { // Kubernetes api-server ` <_> <_> ] `, `W0519 07:46:47.647050 1 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {https://kubernetes-etcd-1.kubernetes-etcd:2379 0 }. Err :connection error: desc = "transport: Error while dialing dial tcp 10.32.85.85:2379: connect: connection refused". Reconnecting...`, []string{"W0519", "clientconn.go:1223"}, + false, }, { // Cassandra ` []<_> in .<_>`, `INFO [Service Thread] 2021-05-19 07:40:12,130 GCInspector.java:284 - ParNew GC in 248ms. CMS Old Gen: 5043436640 -> 5091062064; Par Eden Space: 671088640 -> 0; Par Survivor Space: 70188280 -> 60139760`, []string{"INFO", "Service Thread", "248ms"}, + true, }, { // Cortex & Loki distributor `<_> msg=" () "`, `level=debug ts=2021-05-19T07:54:26.864644382Z caller=logging.go:66 traceID=7fbb92fd0eb9c65d msg="POST /loki/api/v1/push (204) 1.238734ms"`, []string{"POST", "/loki/api/v1/push", "204", "1.238734ms"}, + true, }, { // Etcd `<_> <_> | : <_> peer <_> tcp :<_>`, `2021-05-19 08:16:50.181436 W | rafthttp: health check for peer fd8275e521cfb532 could not connect: dial tcp 10.32.85.85:2380: connect: connection refused`, []string{"W", "rafthttp", "fd8275e521cfb532", "10.32.85.85"}, + true, }, { // Kafka `<_>] [Log partition=, dir=] `, `[2021-05-19 08:35:28,681] INFO [Log partition=p-636-L-fs-117, dir=/data/kafka-logs] Deleting segment 455976081 (kafka.log.Log)`, []string{"INFO", "p-636-L-fs-117", "/data/kafka-logs"}, + false, }, { // Elastic `<_>][][] [] []`, `[2021-05-19T06:54:06,994][INFO ][o.e.c.m.MetaDataMappingService] [1f605d47-8454-4bfb-a67f-49f318bf837a] [usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA] update_mapping [report]`, []string{"INFO ", "o.e.c.m.MetaDataMappingService", "1f605d47-8454-4bfb-a67f-49f318bf837a", "usage-stats-2021.05.19/O2Je9IbmR8CqFyUvNpTttA"}, + false, }, { // Envoy `<_> " <_>" <_> "" "" <_> <_> ""`, `[2016-04-15T20:17:00.310Z] "POST /api/v1/locations HTTP/2" 204 - 154 0 226 100 "10.0.35.28" "nsq2http" "cc21d9b0-cf5c-432b-8c7e-98aeb7988cd2" "locations" "tcp://10.0.2.1:80"`, []string{"POST", "/api/v1/locations", "204", "154", "0", "226", "100", "10.0.35.28", "nsq2http", "tcp://10.0.2.1:80"}, + true, }, } @@ -112,12 +160,14 @@ func Test_matcher_Matches(t *testing.T) { t.Parallel() m, err := New(tt.expr) require.NoError(t, err) - actual := m.Matches([]byte(tt.in)) + line := []byte(tt.in) + assert.Equal(t, tt.matches, m.Test(line)) + actual := m.Matches(line) var actualStrings []string for _, a := range actual { actualStrings = append(actualStrings, string(a)) } - require.Equal(t, tt.expected, actualStrings) + assert.Equal(t, tt.expected, actualStrings) }) } } @@ -162,3 +212,40 @@ func Test_Error(t *testing.T) { }) } } + +func Test_ParseLineFilter(t *testing.T) { + for _, tt := range []struct { + name string + err error + }{ + {"<_>", nil}, // Meaningless, but valid: matches everything. + {"", nil}, // Empty pattern matches empty lines. + {"foo <_> bar <_>", nil}, + {" bar <_>", fmt.Errorf("%w: found ''", ErrCaptureNotAllowed)}, + {"", fmt.Errorf("%w: found ''", ErrCaptureNotAllowed)}, + } { + t.Run(tt.name, func(t *testing.T) { + _, err := ParseLineFilter([]byte(tt.name)) + require.Equal(t, tt.err, err) + }) + } +} + +func Test_ParseLiterals(t *testing.T) { + for _, tt := range []struct { + pattern string + lit [][]byte + err error + }{ + {"<_>", [][]byte{}, nil}, + {"", nil, newParseError("syntax error: unexpected $end, expecting IDENTIFIER or LITERAL", 1, 1)}, + {"foo <_> bar <_>", [][]byte{[]byte("foo "), []byte(" bar ")}, nil}, + {"", [][]byte{}, nil}, + } { + t.Run(tt.pattern, func(t *testing.T) { + lit, err := ParseLiterals(tt.pattern) + require.Equal(t, tt.err, err) + require.Equal(t, tt.lit, lit) + }) + } +} diff --git a/pkg/logql/syntax/ast.go b/pkg/logql/syntax/ast.go index e1b796c4fbedd..78b6330809e5d 100644 --- a/pkg/logql/syntax/ast.go +++ b/pkg/logql/syntax/ast.go @@ -355,7 +355,7 @@ func newLineFilterExpr(ty log.LineMatchType, op, match string) *LineFilterExpr { func newOrLineFilter(left, right *LineFilterExpr) *LineFilterExpr { right.Ty = left.Ty - if left.Ty == log.LineMatchEqual || left.Ty == log.LineMatchRegexp { + if left.Ty == log.LineMatchEqual || left.Ty == log.LineMatchRegexp || left.Ty == log.LineMatchPattern { left.Or = right right.IsOrChild = true return left diff --git a/pkg/logql/syntax/ast_test.go b/pkg/logql/syntax/ast_test.go index 95f654d2c647f..ce2dca62f9d43 100644 --- a/pkg/logql/syntax/ast_test.go +++ b/pkg/logql/syntax/ast_test.go @@ -24,7 +24,7 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar"}`, false}, {`{foo="bar", bar!="baz"}`, false}, {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop"`, true}, - {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, true}, + {`{foo="bar"} |= "baz" |~ "blip" |> "qux" !> "waldo" != "flip" !~ "flap"`, true}, {`{foo="bar", bar!="baz"} |= ""`, false}, {`{foo="bar", bar!="baz"} |= "" |= ip("::1")`, true}, {`{foo="bar", bar!="baz"} |= "" != ip("127.0.0.1")`, true}, @@ -32,7 +32,10 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar", bar!="baz"} |~ ".*"`, false}, {`{foo="bar", bar!="baz"} |= "" |= ""`, false}, {`{foo="bar", bar!="baz"} |~ "" |= "" |~ ".*"`, false}, - {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" | json`, true}, + {`{foo="bar", bar!="baz"} |> ""`, true}, + {`{foo="bar", bar!="baz"} |> "<_>"`, true}, + {`{foo="bar", bar!="baz"} |> "<_>" !> "<_> <_>"`, true}, + {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" |> "<_> bop <_>" | json`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt --strict`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt --strict --keep-empty`, true}, @@ -275,6 +278,7 @@ func Test_NilFilterDoesntPanic(t *testing.T) { `{namespace="dev", container_name="cart"} |= "bleep" |= "" |= "bloop"`, `{namespace="dev", container_name="cart"} |= "bleep" |= "" |= "bloop"`, `{namespace="dev", container_name="cart"} |= "bleep" |= "bloop" |= ""`, + `{namespace="dev", container_name="cart"} !> ""`, } { t.Run(tc, func(t *testing.T) { expr, err := ParseLogSelector(tc, true) @@ -355,6 +359,20 @@ func Test_FilterMatcher(t *testing.T) { }, []linecheck{{"foo", true}, {"bar", false}, {"foobar", true}}, }, + { + `{app="foo"} |> "foo <_>"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo bar", true}, {"foo", false}}, + }, + { + `{app="foo"} !> "foo <_>"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo bar", false}, {"foo", true}}, + }, { `{app="foo"} |~ "foo\\.bar\\.baz"`, []*labels.Matcher{ @@ -425,6 +443,20 @@ func Test_FilterMatcher(t *testing.T) { }, []linecheck{{"foo", false}, {"bar", true}, {"127.0.0.2", true}, {"127.0.0.1", false}}, }, + { + `{app="foo"} |> "foo" or "bar"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo", true}, {"bar", true}, {"none", false}}, + }, + { + `{app="foo"} !> "foo" or "bar"`, + []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "app", "foo"), + }, + []linecheck{{"foo", false}, {"bar", false}, {"none", true}}, + }, } { tt := tt t.Run(tt.q, func(t *testing.T) { @@ -455,6 +487,8 @@ func TestOrLineFilterTypes(t *testing.T) { {log.LineMatchNotEqual}, {log.LineMatchRegexp}, {log.LineMatchNotRegexp}, + {log.LineMatchPattern}, + {log.LineMatchNotPattern}, } { t.Run("right inherits left's type", func(t *testing.T) { left := &LineFilterExpr{LineFilter: LineFilter{Ty: tt.ty, Match: "something"}} @@ -523,6 +557,14 @@ func TestStringer(t *testing.T) { in: `{app="foo"} |~ ip("127.0.0.1") or "foo"`, out: `{app="foo"} |~ ip("127.0.0.1") or "foo"`, }, + { + in: `{app="foo"} |> "foo <_> baz" or "foo <_>"`, + out: `{app="foo"} |> "foo <_> baz" or "foo <_>"`, + }, + { + in: `{app="foo"} |> "foo <_> baz" or "foo <_>" |> "foo <_> baz"`, + out: `{app="foo"} |> "foo <_> baz" or "foo <_>" |> "foo <_> baz"`, + }, { // !(A || B) == !A && !B in: `{app="foo"} != "foo" or "bar"`, out: `{app="foo"} != "foo" != "bar"`, @@ -539,6 +581,10 @@ func TestStringer(t *testing.T) { in: `{app="foo"} !~ ip("127.0.0.1") or "foo"`, out: `{app="foo"} !~ ip("127.0.0.1") !~ "foo"`, }, + { + in: `{app="foo"} !> "<_> foo <_>" or "foo <_>" !> "foo <_> baz"`, + out: `{app="foo"} !> "<_> foo <_>" !> "foo <_>" !> "foo <_> baz"`, + }, } { t.Run(tc.in, func(t *testing.T) { expr, err := ParseExpr(tc.in) @@ -563,19 +609,19 @@ func BenchmarkContainsFilter(b *testing.B) { }{ { "AllMatches", - `{app="foo"} |= "foo" |= "hello" |= "world" |= "bar"`, + `{app="foo"} |= "foo" |= "hello" |= "world" |= "bar" |> "<_> world <_>"`, }, { "OneMatches", - `{app="foo"} |= "foo" |= "not" |= "in" |= "there"`, + `{app="foo"} |= "foo" |= "not" |= "in" |= "there" |> "yet"`, }, { "MixedFiltersTrue", - `{app="foo"} |= "foo" != "not" |~ "hello.*bar" != "there" |= "world"`, + `{app="foo"} |= "foo" != "not" |~ "hello.*bar" != "there" |= "world" |> "<_> more than one <_>"`, }, { "MixedFiltersFalse", - `{app="foo"} |= "baz" != "not" |~ "hello.*bar" != "there" |= "world"`, + `{app="foo"} |= "baz" != "not" |~ "hello.*bar" != "there" |= "world" !> "<_> more than one"`, }, { "GreedyRegex", diff --git a/pkg/logql/syntax/expr.y b/pkg/logql/syntax/expr.y index 043642d526ad1..0386406a87f81 100644 --- a/pkg/logql/syntax/expr.y +++ b/pkg/logql/syntax/expr.y @@ -134,7 +134,7 @@ import ( %token BYTES %token IDENTIFIER STRING NUMBER PARSER_FLAG %token DURATION RANGE -%token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT +%token MATCHERS LABELS EQ RE NRE NPA OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT PIPE_PATTERN OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE RATE_COUNTER SUM SORT SORT_DESC AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME BYTES_CONV DURATION_CONV DURATION_SECONDS_CONV @@ -241,8 +241,10 @@ labelReplaceExpr: filter: PIPE_MATCH { $$ = log.LineMatchRegexp } | PIPE_EXACT { $$ = log.LineMatchEqual } + | PIPE_PATTERN { $$ = log.LineMatchPattern } | NRE { $$ = log.LineMatchNotRegexp } | NEQ { $$ = log.LineMatchNotEqual } + | NPA { $$ = log.LineMatchNotPattern } ; selector: diff --git a/pkg/logql/syntax/expr.y.go b/pkg/logql/syntax/expr.y.go index 41da7466fbc20..48ba393ad5e2a 100644 --- a/pkg/logql/syntax/expr.y.go +++ b/pkg/logql/syntax/expr.y.go @@ -87,83 +87,85 @@ const LABELS = 57354 const EQ = 57355 const RE = 57356 const NRE = 57357 -const OPEN_BRACE = 57358 -const CLOSE_BRACE = 57359 -const OPEN_BRACKET = 57360 -const CLOSE_BRACKET = 57361 -const COMMA = 57362 -const DOT = 57363 -const PIPE_MATCH = 57364 -const PIPE_EXACT = 57365 -const OPEN_PARENTHESIS = 57366 -const CLOSE_PARENTHESIS = 57367 -const BY = 57368 -const WITHOUT = 57369 -const COUNT_OVER_TIME = 57370 -const RATE = 57371 -const RATE_COUNTER = 57372 -const SUM = 57373 -const SORT = 57374 -const SORT_DESC = 57375 -const AVG = 57376 -const MAX = 57377 -const MIN = 57378 -const COUNT = 57379 -const STDDEV = 57380 -const STDVAR = 57381 -const BOTTOMK = 57382 -const TOPK = 57383 -const BYTES_OVER_TIME = 57384 -const BYTES_RATE = 57385 -const BOOL = 57386 -const JSON = 57387 -const REGEXP = 57388 -const LOGFMT = 57389 -const PIPE = 57390 -const LINE_FMT = 57391 -const LABEL_FMT = 57392 -const UNWRAP = 57393 -const AVG_OVER_TIME = 57394 -const SUM_OVER_TIME = 57395 -const MIN_OVER_TIME = 57396 -const MAX_OVER_TIME = 57397 -const STDVAR_OVER_TIME = 57398 -const STDDEV_OVER_TIME = 57399 -const QUANTILE_OVER_TIME = 57400 -const BYTES_CONV = 57401 -const DURATION_CONV = 57402 -const DURATION_SECONDS_CONV = 57403 -const FIRST_OVER_TIME = 57404 -const LAST_OVER_TIME = 57405 -const ABSENT_OVER_TIME = 57406 -const VECTOR = 57407 -const LABEL_REPLACE = 57408 -const UNPACK = 57409 -const OFFSET = 57410 -const PATTERN = 57411 -const IP = 57412 -const ON = 57413 -const IGNORING = 57414 -const GROUP_LEFT = 57415 -const GROUP_RIGHT = 57416 -const DECOLORIZE = 57417 -const DROP = 57418 -const KEEP = 57419 -const OR = 57420 -const AND = 57421 -const UNLESS = 57422 -const CMP_EQ = 57423 -const NEQ = 57424 -const LT = 57425 -const LTE = 57426 -const GT = 57427 -const GTE = 57428 -const ADD = 57429 -const SUB = 57430 -const MUL = 57431 -const DIV = 57432 -const MOD = 57433 -const POW = 57434 +const NPA = 57358 +const OPEN_BRACE = 57359 +const CLOSE_BRACE = 57360 +const OPEN_BRACKET = 57361 +const CLOSE_BRACKET = 57362 +const COMMA = 57363 +const DOT = 57364 +const PIPE_MATCH = 57365 +const PIPE_EXACT = 57366 +const PIPE_PATTERN = 57367 +const OPEN_PARENTHESIS = 57368 +const CLOSE_PARENTHESIS = 57369 +const BY = 57370 +const WITHOUT = 57371 +const COUNT_OVER_TIME = 57372 +const RATE = 57373 +const RATE_COUNTER = 57374 +const SUM = 57375 +const SORT = 57376 +const SORT_DESC = 57377 +const AVG = 57378 +const MAX = 57379 +const MIN = 57380 +const COUNT = 57381 +const STDDEV = 57382 +const STDVAR = 57383 +const BOTTOMK = 57384 +const TOPK = 57385 +const BYTES_OVER_TIME = 57386 +const BYTES_RATE = 57387 +const BOOL = 57388 +const JSON = 57389 +const REGEXP = 57390 +const LOGFMT = 57391 +const PIPE = 57392 +const LINE_FMT = 57393 +const LABEL_FMT = 57394 +const UNWRAP = 57395 +const AVG_OVER_TIME = 57396 +const SUM_OVER_TIME = 57397 +const MIN_OVER_TIME = 57398 +const MAX_OVER_TIME = 57399 +const STDVAR_OVER_TIME = 57400 +const STDDEV_OVER_TIME = 57401 +const QUANTILE_OVER_TIME = 57402 +const BYTES_CONV = 57403 +const DURATION_CONV = 57404 +const DURATION_SECONDS_CONV = 57405 +const FIRST_OVER_TIME = 57406 +const LAST_OVER_TIME = 57407 +const ABSENT_OVER_TIME = 57408 +const VECTOR = 57409 +const LABEL_REPLACE = 57410 +const UNPACK = 57411 +const OFFSET = 57412 +const PATTERN = 57413 +const IP = 57414 +const ON = 57415 +const IGNORING = 57416 +const GROUP_LEFT = 57417 +const GROUP_RIGHT = 57418 +const DECOLORIZE = 57419 +const DROP = 57420 +const KEEP = 57421 +const OR = 57422 +const AND = 57423 +const UNLESS = 57424 +const CMP_EQ = 57425 +const NEQ = 57426 +const LT = 57427 +const LTE = 57428 +const GT = 57429 +const GTE = 57430 +const ADD = 57431 +const SUB = 57432 +const MUL = 57433 +const DIV = 57434 +const MOD = 57435 +const POW = 57436 var exprToknames = [...]string{ "$end", @@ -181,6 +183,7 @@ var exprToknames = [...]string{ "EQ", "RE", "NRE", + "NPA", "OPEN_BRACE", "CLOSE_BRACE", "OPEN_BRACKET", @@ -189,6 +192,7 @@ var exprToknames = [...]string{ "DOT", "PIPE_MATCH", "PIPE_EXACT", + "PIPE_PATTERN", "OPEN_PARENTHESIS", "CLOSE_PARENTHESIS", "BY", @@ -273,121 +277,122 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 592 +const exprLast = 608 var exprAct = [...]int{ - 287, 226, 82, 4, 212, 64, 180, 124, 202, 187, - 73, 198, 195, 63, 235, 5, 150, 185, 75, 2, - 56, 78, 48, 49, 50, 57, 58, 61, 62, 59, + 289, 228, 84, 4, 214, 64, 182, 126, 204, 189, + 75, 200, 197, 63, 237, 5, 152, 187, 77, 2, + 56, 80, 48, 49, 50, 57, 58, 61, 62, 59, 60, 51, 52, 53, 54, 55, 56, 49, 50, 57, 58, 61, 62, 59, 60, 51, 52, 53, 54, 55, 56, 57, 58, 61, 62, 59, 60, 51, 52, 53, - 54, 55, 56, 51, 52, 53, 54, 55, 56, 107, - 146, 148, 149, 113, 53, 54, 55, 56, 205, 148, - 149, 281, 215, 137, 290, 154, 164, 165, 213, 293, - 138, 159, 67, 293, 71, 214, 152, 295, 71, 162, - 163, 69, 70, 292, 345, 69, 70, 364, 161, 364, - 92, 384, 166, 167, 168, 169, 170, 171, 172, 173, - 174, 175, 176, 177, 178, 179, 71, 228, 379, 290, - 337, 228, 304, 69, 70, 192, 134, 354, 189, 147, - 200, 204, 83, 84, 372, 367, 211, 206, 209, 210, - 207, 208, 140, 217, 140, 128, 134, 291, 108, 228, - 233, 72, 371, 337, 139, 72, 227, 291, 292, 229, - 230, 182, 344, 238, 222, 128, 120, 121, 119, 225, - 129, 131, 295, 81, 71, 83, 84, 369, 246, 247, - 248, 69, 70, 72, 296, 292, 222, 329, 122, 225, - 123, 292, 250, 357, 71, 292, 130, 132, 133, 347, - 304, 69, 70, 304, 71, 353, 71, 228, 352, 299, - 283, 69, 70, 69, 70, 134, 285, 288, 328, 294, - 181, 297, 237, 107, 300, 113, 301, 228, 71, 289, - 152, 286, 237, 298, 128, 69, 70, 66, 304, 228, - 222, 72, 314, 351, 302, 134, 237, 308, 310, 313, - 315, 316, 312, 241, 200, 204, 323, 318, 322, 290, - 182, 72, 231, 223, 128, 253, 311, 134, 134, 304, - 338, 72, 237, 72, 306, 304, 326, 142, 141, 330, - 305, 332, 334, 182, 336, 107, 128, 128, 361, 335, - 346, 331, 309, 237, 107, 72, 264, 348, 219, 265, - 237, 263, 134, 325, 324, 282, 13, 120, 121, 119, - 245, 129, 131, 239, 153, 244, 382, 182, 183, 181, - 236, 128, 358, 359, 340, 341, 342, 107, 360, 122, - 243, 123, 242, 216, 362, 363, 158, 130, 132, 133, - 368, 183, 181, 151, 157, 260, 156, 218, 261, 16, - 259, 88, 13, 87, 374, 80, 375, 376, 13, 378, - 153, 255, 262, 350, 251, 303, 6, 257, 380, 256, - 21, 22, 23, 36, 45, 46, 37, 39, 40, 38, - 41, 42, 43, 44, 24, 25, 254, 240, 232, 224, - 252, 79, 234, 377, 26, 27, 28, 29, 30, 31, - 32, 13, 366, 77, 33, 34, 35, 47, 19, 6, - 365, 258, 343, 21, 22, 23, 36, 45, 46, 37, - 39, 40, 38, 41, 42, 43, 44, 24, 25, 17, - 18, 279, 333, 160, 280, 155, 278, 26, 27, 28, - 29, 30, 31, 32, 13, 89, 86, 33, 34, 35, - 47, 19, 6, 320, 321, 383, 21, 22, 23, 36, - 45, 46, 37, 39, 40, 38, 41, 42, 43, 44, - 24, 25, 17, 18, 276, 85, 373, 277, 381, 275, - 26, 27, 28, 29, 30, 31, 32, 370, 356, 355, - 33, 34, 35, 47, 19, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 102, 103, 104, 105, 106, 144, - 188, 125, 327, 249, 273, 17, 18, 274, 270, 272, - 349, 271, 317, 269, 143, 267, 3, 145, 268, 188, - 266, 319, 186, 74, 196, 126, 307, 284, 221, 220, - 219, 218, 193, 191, 190, 203, 199, 188, 79, 196, - 111, 112, 194, 116, 201, 118, 197, 117, 115, 114, - 184, 65, 135, 127, 136, 109, 110, 91, 90, 11, - 10, 9, 20, 12, 15, 8, 339, 14, 7, 76, - 68, 1, + 54, 55, 56, 51, 52, 53, 54, 55, 56, 109, + 148, 150, 151, 115, 53, 54, 55, 56, 207, 150, + 151, 283, 217, 139, 166, 167, 292, 156, 215, 295, + 216, 72, 74, 161, 72, 74, 164, 165, 154, 69, + 70, 71, 69, 70, 71, 297, 347, 140, 67, 294, + 163, 366, 136, 366, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, 184, 230, + 339, 292, 94, 130, 255, 136, 339, 194, 85, 86, + 191, 149, 202, 206, 293, 136, 386, 369, 213, 208, + 211, 212, 209, 210, 142, 219, 130, 363, 306, 141, + 73, 184, 235, 73, 356, 381, 130, 239, 229, 374, + 294, 231, 232, 142, 110, 240, 294, 122, 123, 121, + 227, 131, 133, 297, 294, 72, 74, 185, 183, 316, + 248, 249, 250, 69, 70, 71, 373, 298, 371, 124, + 295, 125, 136, 359, 252, 72, 74, 132, 134, 135, + 293, 349, 239, 69, 70, 71, 72, 74, 184, 224, + 230, 183, 285, 130, 69, 70, 71, 346, 287, 290, + 330, 296, 136, 299, 314, 109, 302, 115, 303, 304, + 230, 291, 154, 288, 331, 300, 306, 72, 74, 243, + 294, 230, 355, 130, 73, 69, 70, 71, 306, 310, + 312, 315, 317, 318, 354, 233, 202, 206, 325, 320, + 324, 292, 340, 83, 73, 85, 86, 185, 183, 136, + 144, 266, 230, 221, 267, 73, 265, 262, 328, 220, + 263, 332, 261, 334, 336, 184, 338, 109, 239, 306, + 130, 337, 348, 333, 239, 353, 109, 224, 227, 350, + 327, 306, 306, 72, 74, 239, 73, 308, 307, 239, + 313, 69, 70, 71, 13, 224, 311, 143, 342, 343, + 344, 379, 301, 155, 360, 361, 384, 241, 326, 109, + 362, 238, 284, 153, 247, 246, 364, 365, 230, 264, + 225, 245, 370, 13, 244, 260, 218, 160, 159, 158, + 16, 90, 155, 89, 72, 74, 376, 82, 377, 378, + 13, 380, 69, 70, 71, 352, 253, 305, 259, 6, + 382, 258, 73, 21, 22, 23, 36, 45, 46, 37, + 39, 40, 38, 41, 42, 43, 44, 24, 25, 66, + 256, 242, 234, 226, 257, 254, 368, 26, 27, 28, + 29, 30, 31, 32, 81, 146, 367, 33, 34, 35, + 47, 19, 236, 281, 345, 335, 282, 79, 280, 322, + 323, 145, 13, 73, 147, 278, 162, 88, 279, 87, + 277, 6, 17, 18, 385, 21, 22, 23, 36, 45, + 46, 37, 39, 40, 38, 41, 42, 43, 44, 24, + 25, 275, 272, 127, 276, 273, 274, 271, 383, 26, + 27, 28, 29, 30, 31, 32, 372, 358, 357, 33, + 34, 35, 47, 19, 157, 269, 329, 375, 270, 190, + 268, 3, 251, 190, 13, 319, 188, 321, 76, 309, + 198, 128, 286, 6, 17, 18, 136, 21, 22, 23, + 36, 45, 46, 37, 39, 40, 38, 41, 42, 43, + 44, 24, 25, 223, 222, 221, 220, 130, 195, 193, + 192, 26, 27, 28, 29, 30, 31, 32, 91, 351, + 205, 33, 34, 35, 47, 19, 201, 190, 122, 123, + 121, 81, 131, 133, 198, 113, 114, 196, 118, 203, + 120, 199, 119, 117, 116, 186, 17, 18, 65, 137, + 124, 129, 125, 138, 111, 112, 93, 92, 132, 134, + 135, 11, 10, 9, 20, 12, 15, 8, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 341, 14, 7, 78, 68, 1, } var exprPact = [...]int{ - 352, -1000, -56, -1000, -1000, 199, 352, -1000, -1000, -1000, - -1000, -1000, -1000, 396, 341, 159, -1000, 478, 449, 339, - 337, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 353, -1000, -58, -1000, -1000, 349, 353, -1000, -1000, -1000, + -1000, -1000, -1000, 409, 341, 247, -1000, 432, 430, 337, + 335, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 66, 66, - 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, - 66, 66, 66, 199, -1000, 223, 272, 5, 84, -1000, - -1000, -1000, -1000, 263, 262, -56, 517, -1000, -1000, 57, - 346, 438, 332, 330, 322, -1000, -1000, 352, 436, 352, - 28, 13, -1000, 352, 352, 352, 352, 352, 352, 352, - 352, 352, 352, 352, 352, 352, 352, -1000, -1000, -1000, - -1000, -1000, -1000, 273, -1000, -1000, -1000, -1000, -1000, 534, - 552, 548, -1000, 547, -1000, -1000, -1000, -1000, 220, 546, - -1000, 554, 551, 550, 65, -1000, -1000, 82, 4, 319, - -1000, -1000, -1000, -1000, -1000, 553, 545, 544, 543, 542, - 248, 379, 189, 300, 247, 378, 395, 305, 298, 377, - 238, -42, 318, 316, 301, 296, -30, -30, -15, -15, - -72, -72, -72, -72, -24, -24, -24, -24, -24, -24, - 273, 220, 220, 220, 515, 354, -1000, -1000, 387, 354, - -1000, -1000, 250, -1000, 376, -1000, 358, 359, -1000, 57, - -1000, 357, -1000, 57, -1000, 351, 302, 531, 524, 520, - 480, 437, -1000, 3, 291, 82, 541, -1000, -1000, -1000, - -1000, -1000, -1000, 116, 300, 201, 157, 83, 131, 169, - 194, 116, 352, 229, 355, 265, -1000, -1000, 259, -1000, - 540, -1000, 277, 251, 237, 227, 307, 273, 151, -1000, - 354, 552, 526, -1000, 539, 458, 551, 550, 290, -1000, - -1000, -1000, 289, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 86, 86, + 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, + 86, 86, 86, 349, -1000, 76, 501, 3, 101, -1000, + -1000, -1000, -1000, -1000, -1000, 300, 253, -58, 413, -1000, + -1000, 57, 336, 477, 333, 332, 331, -1000, -1000, 353, + 429, 353, 23, 9, -1000, 353, 353, 353, 353, 353, + 353, 353, 353, 353, 353, 353, 353, 353, 353, -1000, + -1000, -1000, -1000, -1000, -1000, 197, -1000, -1000, -1000, -1000, + -1000, 488, 542, 524, -1000, 523, -1000, -1000, -1000, -1000, + 227, 522, -1000, 549, 541, 535, 65, -1000, -1000, 82, + 2, 330, -1000, -1000, -1000, -1000, -1000, 546, 520, 519, + 518, 517, 323, 382, 298, 307, 238, 381, 415, 314, + 310, 380, 222, -44, 328, 325, 319, 318, -32, -32, + -17, -17, -74, -74, -74, -74, -26, -26, -26, -26, + -26, -26, 197, 227, 227, 227, 484, 355, -1000, -1000, + 392, 355, -1000, -1000, 107, -1000, 379, -1000, 391, 360, + -1000, 57, -1000, 357, -1000, 57, -1000, 283, 277, 481, + 458, 457, 431, 419, -1000, 1, 316, 82, 496, -1000, + -1000, -1000, -1000, -1000, -1000, 110, 307, 201, 134, 190, + 130, 170, 305, 110, 353, 212, 356, 291, -1000, -1000, + 290, -1000, 493, -1000, 299, 293, 207, 162, 274, 197, + 140, -1000, 355, 542, 489, -1000, 495, 424, 541, 535, + 312, -1000, -1000, -1000, 284, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 82, 516, -1000, 203, -1000, 172, 111, 55, 111, - 433, 16, 220, 16, 153, 275, 412, 147, 79, -1000, - -1000, 184, -1000, 352, 525, -1000, -1000, 353, 228, -1000, - 193, -1000, -1000, 190, -1000, 112, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 493, 492, -1000, 178, -1000, 116, - 55, 111, 55, -1000, -1000, 273, -1000, 16, -1000, 274, - -1000, -1000, -1000, 61, 410, 402, 120, 116, 162, -1000, - 491, -1000, -1000, -1000, -1000, 137, 119, -1000, -1000, 55, - -1000, 481, 59, 55, 46, 16, 16, 393, -1000, -1000, - 349, -1000, -1000, 103, 55, -1000, -1000, 16, 482, -1000, - -1000, 306, 459, 86, -1000, + -1000, -1000, -1000, 82, 480, -1000, 203, -1000, 217, 232, + 59, 232, 416, 16, 227, 16, 126, 267, 414, 200, + 79, -1000, -1000, 184, -1000, 353, 534, -1000, -1000, 354, + 278, -1000, 237, -1000, -1000, 225, -1000, 137, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, 472, 471, -1000, 176, + -1000, 110, 59, 232, 59, -1000, -1000, 197, -1000, 16, + -1000, 131, -1000, -1000, -1000, 61, 406, 396, 120, 110, + 171, -1000, 470, -1000, -1000, -1000, -1000, 169, 142, -1000, + -1000, 59, -1000, 482, 63, 59, 52, 16, 16, 321, + -1000, -1000, 350, -1000, -1000, 138, 59, -1000, -1000, 16, + 462, -1000, -1000, 315, 438, 119, -1000, } var exprPgo = [...]int{ - 0, 591, 18, 590, 2, 14, 536, 3, 16, 7, - 589, 588, 587, 586, 15, 585, 584, 583, 582, 95, - 581, 580, 579, 455, 578, 577, 576, 575, 13, 5, - 574, 573, 572, 6, 571, 92, 4, 570, 569, 568, - 567, 566, 11, 565, 564, 8, 563, 12, 562, 9, - 17, 561, 560, 1, 545, 521, 0, + 0, 607, 18, 606, 2, 14, 491, 3, 16, 7, + 605, 604, 603, 602, 15, 587, 586, 585, 584, 90, + 583, 582, 581, 538, 577, 576, 575, 574, 13, 5, + 573, 571, 569, 6, 568, 108, 4, 565, 564, 563, + 562, 561, 11, 560, 559, 8, 558, 12, 557, 9, + 17, 556, 555, 1, 501, 463, 0, } var exprR1 = [...]int{ @@ -397,23 +402,23 @@ var exprR1 = [...]int{ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 53, 53, 53, 13, 13, 13, 11, 11, 11, 11, 15, 15, 15, 15, 15, 15, 22, 3, 3, 3, - 3, 14, 14, 14, 10, 10, 9, 9, 9, 9, - 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, - 29, 29, 29, 19, 36, 36, 36, 35, 35, 35, - 34, 34, 34, 37, 37, 27, 27, 26, 26, 26, - 26, 52, 51, 51, 38, 39, 47, 47, 48, 48, - 48, 46, 33, 33, 33, 33, 33, 33, 33, 33, - 33, 49, 49, 50, 50, 55, 55, 54, 54, 32, - 32, 32, 32, 32, 32, 32, 30, 30, 30, 30, - 30, 30, 30, 31, 31, 31, 31, 31, 31, 31, - 42, 42, 41, 41, 40, 45, 45, 44, 44, 43, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 24, 24, 25, 25, 25, - 25, 23, 23, 23, 23, 23, 23, 23, 23, 21, - 21, 21, 17, 18, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 12, 12, 12, 12, 12, + 3, 3, 3, 14, 14, 14, 10, 10, 9, 9, + 9, 9, 28, 28, 29, 29, 29, 29, 29, 29, + 29, 29, 29, 29, 29, 19, 36, 36, 36, 35, + 35, 35, 34, 34, 34, 37, 37, 27, 27, 26, + 26, 26, 26, 52, 51, 51, 38, 39, 47, 47, + 48, 48, 48, 46, 33, 33, 33, 33, 33, 33, + 33, 33, 33, 49, 49, 50, 50, 55, 55, 54, + 54, 32, 32, 32, 32, 32, 32, 32, 30, 30, + 30, 30, 30, 30, 30, 31, 31, 31, 31, 31, + 31, 31, 42, 42, 41, 41, 40, 45, 45, 44, + 44, 43, 20, 20, 20, 20, 20, 20, 20, 20, + 20, 20, 20, 20, 20, 20, 20, 24, 24, 25, + 25, 25, 25, 23, 23, 23, 23, 23, 23, 23, + 23, 21, 21, 21, 17, 18, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, - 56, 5, 5, 4, 4, 4, 4, + 12, 12, 56, 5, 5, 4, 4, 4, 4, } var exprR2 = [...]int{ @@ -423,107 +428,107 @@ var exprR2 = [...]int{ 4, 5, 6, 7, 3, 4, 4, 5, 3, 2, 3, 6, 3, 1, 1, 1, 4, 6, 5, 7, 4, 5, 5, 6, 7, 7, 12, 1, 1, 1, - 1, 3, 3, 2, 1, 3, 3, 3, 3, 3, - 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 1, 1, 4, 3, 2, 5, 4, - 1, 3, 2, 1, 2, 1, 2, 1, 2, 1, - 2, 2, 3, 2, 2, 1, 3, 3, 1, 3, - 3, 2, 1, 1, 1, 1, 3, 2, 3, 3, - 3, 3, 1, 1, 3, 6, 6, 1, 1, 3, + 1, 1, 1, 3, 3, 2, 1, 3, 3, 3, + 3, 3, 1, 2, 1, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 1, 1, 4, 3, 2, + 5, 4, 1, 3, 2, 1, 2, 1, 2, 1, + 2, 1, 2, 2, 3, 2, 2, 1, 3, 3, + 1, 3, 3, 2, 1, 1, 1, 1, 3, 2, + 3, 3, 3, 3, 1, 1, 3, 6, 6, 1, + 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 1, 1, 1, 3, 2, 1, 1, 1, 3, 2, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 0, 1, 5, 4, 5, - 4, 1, 1, 2, 4, 5, 2, 4, 5, 1, - 2, 2, 4, 1, 1, 1, 1, 1, 1, 1, + 3, 3, 1, 1, 1, 3, 2, 1, 1, 1, + 3, 2, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 0, 1, 5, + 4, 5, 4, 1, 1, 2, 4, 5, 2, 4, + 5, 1, 2, 2, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 2, 1, 3, 4, 4, 3, 3, + 1, 1, 2, 1, 3, 4, 4, 3, 3, } var exprChk = [...]int{ - -1000, -1, -2, -6, -7, -14, 24, -11, -15, -20, - -21, -22, -17, 16, -12, -16, 7, 87, 88, 66, - -18, 28, 29, 30, 42, 43, 52, 53, 54, 55, - 56, 57, 58, 62, 63, 64, 31, 34, 37, 35, - 36, 38, 39, 40, 41, 32, 33, 65, 78, 79, - 80, 87, 88, 89, 90, 91, 92, 81, 82, 85, - 86, 83, 84, -28, -29, -34, 48, -35, -3, 22, - 23, 15, 82, -7, -6, -2, -10, 17, -9, 5, - 24, 24, -4, 26, 27, 7, 7, 24, 24, -23, - -24, -25, 44, -23, -23, -23, -23, -23, -23, -23, - -23, -23, -23, -23, -23, -23, -23, -29, -35, -27, - -26, -52, -51, -33, -38, -39, -46, -40, -43, 47, - 45, 46, 67, 69, -9, -55, -54, -31, 24, 49, - 75, 50, 76, 77, 5, -32, -30, 78, 6, -19, - 70, 25, 25, 17, 2, 20, 13, 82, 14, 15, - -8, 7, -14, 24, -7, 7, 24, 24, 24, -7, - 7, -2, 71, 72, 73, 74, -2, -2, -2, -2, + -1000, -1, -2, -6, -7, -14, 26, -11, -15, -20, + -21, -22, -17, 17, -12, -16, 7, 89, 90, 68, + -18, 30, 31, 32, 44, 45, 54, 55, 56, 57, + 58, 59, 60, 64, 65, 66, 33, 36, 39, 37, + 38, 40, 41, 42, 43, 34, 35, 67, 80, 81, + 82, 89, 90, 91, 92, 93, 94, 83, 84, 87, + 88, 85, 86, -28, -29, -34, 50, -35, -3, 23, + 24, 25, 15, 84, 16, -7, -6, -2, -10, 18, + -9, 5, 26, 26, -4, 28, 29, 7, 7, 26, + 26, -23, -24, -25, 46, -23, -23, -23, -23, -23, + -23, -23, -23, -23, -23, -23, -23, -23, -23, -29, + -35, -27, -26, -52, -51, -33, -38, -39, -46, -40, + -43, 49, 47, 48, 69, 71, -9, -55, -54, -31, + 26, 51, 77, 52, 78, 79, 5, -32, -30, 80, + 6, -19, 72, 27, 27, 18, 2, 21, 13, 84, + 14, 15, -8, 7, -14, 26, -7, 7, 26, 26, + 26, -7, 7, -2, 73, 74, 75, 76, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -33, 79, 20, 78, -37, -50, 8, -49, 5, -50, - 6, 6, -33, 6, -48, -47, 5, -41, -42, 5, - -9, -44, -45, 5, -9, 13, 82, 85, 86, 83, - 84, 81, -36, 6, -19, 78, 24, -9, 6, 6, - 6, 6, 2, 25, 20, 10, -53, -28, 48, -14, - -8, 25, 20, -7, 7, -5, 25, 5, -5, 25, - 20, 25, 24, 24, 24, 24, -33, -33, -33, 8, - -50, 20, 13, 25, 20, 13, 20, 20, 70, 9, - 4, 7, 70, 9, 4, 7, 9, 4, 7, 9, - 4, 7, 9, 4, 7, 9, 4, 7, 9, 4, - 7, 78, 24, -36, 6, -4, -8, -56, -53, -28, - 68, 10, 48, 10, -53, 51, 25, -53, -28, 25, - -4, -7, 25, 20, 20, 25, 25, 6, -5, 25, - -5, 25, 25, -5, 25, -5, -49, 6, -47, 2, - 5, 6, -42, -45, 24, 24, -36, 6, 25, 25, - -53, -28, -53, 9, -56, -33, -56, 10, 5, -13, - 59, 60, 61, 10, 25, 25, -53, 25, -7, 5, - 20, 25, 25, 25, 25, 6, 6, 25, -4, -53, - -56, 24, -56, -53, 48, 10, 10, 25, -4, 25, - 6, 25, 25, 5, -53, -56, -56, 10, 20, 25, - -56, 6, 20, 6, 25, + -2, -2, -33, 81, 21, 80, -37, -50, 8, -49, + 5, -50, 6, 6, -33, 6, -48, -47, 5, -41, + -42, 5, -9, -44, -45, 5, -9, 13, 84, 87, + 88, 85, 86, 83, -36, 6, -19, 80, 26, -9, + 6, 6, 6, 6, 2, 27, 21, 10, -53, -28, + 50, -14, -8, 27, 21, -7, 7, -5, 27, 5, + -5, 27, 21, 27, 26, 26, 26, 26, -33, -33, + -33, 8, -50, 21, 13, 27, 21, 13, 21, 21, + 72, 9, 4, 7, 72, 9, 4, 7, 9, 4, + 7, 9, 4, 7, 9, 4, 7, 9, 4, 7, + 9, 4, 7, 80, 26, -36, 6, -4, -8, -56, + -53, -28, 70, 10, 50, 10, -53, 53, 27, -53, + -28, 27, -4, -7, 27, 21, 21, 27, 27, 6, + -5, 27, -5, 27, 27, -5, 27, -5, -49, 6, + -47, 2, 5, 6, -42, -45, 26, 26, -36, 6, + 27, 27, -53, -28, -53, 9, -56, -33, -56, 10, + 5, -13, 61, 62, 63, 10, 27, 27, -53, 27, + -7, 5, 21, 27, 27, 27, 27, 6, 6, 27, + -4, -53, -56, 26, -56, -53, 50, 10, 10, 27, + -4, 27, 6, 27, 27, 5, -53, -56, -56, 10, + 21, 27, -56, 6, 21, 6, 27, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 11, 0, 4, 5, 6, - 7, 8, 9, 0, 0, 0, 189, 0, 0, 0, - 0, 205, 206, 207, 208, 209, 210, 211, 212, 213, - 214, 215, 216, 217, 218, 219, 194, 195, 196, 197, - 198, 199, 200, 201, 202, 203, 204, 193, 175, 175, - 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, - 175, 175, 175, 12, 70, 72, 0, 90, 0, 57, - 58, 59, 60, 3, 2, 0, 0, 63, 64, 0, - 0, 0, 0, 0, 0, 190, 191, 0, 0, 0, - 181, 182, 176, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 71, 92, 73, - 74, 75, 76, 77, 78, 79, 80, 81, 82, 95, - 97, 0, 99, 0, 112, 113, 114, 115, 0, 0, - 105, 0, 0, 0, 0, 127, 128, 0, 87, 0, - 83, 10, 13, 61, 62, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 3, 189, 0, 0, 0, 3, - 0, 160, 0, 0, 183, 186, 161, 162, 163, 164, + 7, 8, 9, 0, 0, 0, 191, 0, 0, 0, + 0, 207, 208, 209, 210, 211, 212, 213, 214, 215, + 216, 217, 218, 219, 220, 221, 196, 197, 198, 199, + 200, 201, 202, 203, 204, 205, 206, 195, 177, 177, + 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, + 177, 177, 177, 12, 72, 74, 0, 92, 0, 57, + 58, 59, 60, 61, 62, 3, 2, 0, 0, 65, + 66, 0, 0, 0, 0, 0, 0, 192, 193, 0, + 0, 0, 183, 184, 178, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 73, + 94, 75, 76, 77, 78, 79, 80, 81, 82, 83, + 84, 97, 99, 0, 101, 0, 114, 115, 116, 117, + 0, 0, 107, 0, 0, 0, 0, 129, 130, 0, + 89, 0, 85, 10, 13, 63, 64, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 3, 191, 0, 0, + 0, 3, 0, 162, 0, 0, 185, 188, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, - 117, 0, 0, 0, 96, 103, 93, 123, 122, 101, - 98, 100, 0, 104, 111, 108, 0, 154, 152, 150, - 151, 159, 157, 155, 156, 0, 0, 0, 0, 0, - 0, 0, 91, 84, 0, 0, 0, 65, 66, 67, - 68, 69, 39, 46, 0, 14, 0, 0, 0, 0, - 0, 50, 0, 3, 189, 0, 225, 221, 0, 226, - 0, 192, 0, 0, 0, 0, 118, 119, 120, 94, - 102, 0, 0, 116, 0, 0, 0, 0, 0, 134, - 141, 148, 0, 133, 140, 147, 129, 136, 143, 130, - 137, 144, 131, 138, 145, 132, 139, 146, 135, 142, - 149, 0, 0, 89, 0, 48, 0, 15, 18, 34, - 0, 22, 0, 26, 0, 0, 0, 0, 0, 38, - 52, 3, 51, 0, 0, 223, 224, 0, 0, 178, - 0, 180, 184, 0, 187, 0, 124, 121, 109, 110, - 106, 107, 153, 158, 0, 0, 86, 0, 88, 47, - 19, 35, 36, 220, 23, 42, 27, 30, 40, 0, - 43, 44, 45, 16, 0, 0, 0, 53, 3, 222, - 0, 177, 179, 185, 188, 0, 0, 85, 49, 37, - 31, 0, 17, 20, 0, 24, 28, 0, 54, 55, - 0, 125, 126, 0, 21, 25, 29, 32, 0, 41, - 33, 0, 0, 0, 56, + 175, 176, 119, 0, 0, 0, 98, 105, 95, 125, + 124, 103, 100, 102, 0, 106, 113, 110, 0, 156, + 154, 152, 153, 161, 159, 157, 158, 0, 0, 0, + 0, 0, 0, 0, 93, 86, 0, 0, 0, 67, + 68, 69, 70, 71, 39, 46, 0, 14, 0, 0, + 0, 0, 0, 50, 0, 3, 191, 0, 227, 223, + 0, 228, 0, 194, 0, 0, 0, 0, 120, 121, + 122, 96, 104, 0, 0, 118, 0, 0, 0, 0, + 0, 136, 143, 150, 0, 135, 142, 149, 131, 138, + 145, 132, 139, 146, 133, 140, 147, 134, 141, 148, + 137, 144, 151, 0, 0, 91, 0, 48, 0, 15, + 18, 34, 0, 22, 0, 26, 0, 0, 0, 0, + 0, 38, 52, 3, 51, 0, 0, 225, 226, 0, + 0, 180, 0, 182, 186, 0, 189, 0, 126, 123, + 111, 112, 108, 109, 155, 160, 0, 0, 88, 0, + 90, 47, 19, 35, 36, 222, 23, 42, 27, 30, + 40, 0, 43, 44, 45, 16, 0, 0, 0, 53, + 3, 224, 0, 179, 181, 187, 190, 0, 0, 87, + 49, 37, 31, 0, 17, 20, 0, 24, 28, 0, + 54, 55, 0, 127, 128, 0, 21, 25, 29, 32, + 0, 41, 33, 0, 0, 0, 56, } var exprTok1 = [...]int{ @@ -540,7 +545,7 @@ var exprTok2 = [...]int{ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, + 92, 93, 94, } var exprTok3 = [...]int{ 0, @@ -1169,845 +1174,855 @@ exprdefault: case 59: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = log.LineMatchNotRegexp + exprVAL.Filter = log.LineMatchPattern } case 60: exprDollar = exprS[exprpt-1 : exprpt+1] { - exprVAL.Filter = log.LineMatchNotEqual + exprVAL.Filter = log.LineMatchNotRegexp } case 61: + exprDollar = exprS[exprpt-1 : exprpt+1] + { + exprVAL.Filter = log.LineMatchNotEqual + } + case 62: + exprDollar = exprS[exprpt-1 : exprpt+1] + { + exprVAL.Filter = log.LineMatchNotPattern + } + case 63: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Selector = exprDollar[2].Matchers } - case 62: + case 64: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Selector = exprDollar[2].Matchers } - case 63: + case 65: exprDollar = exprS[exprpt-2 : exprpt+1] { } - case 64: + case 66: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 65: + case 67: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 66: + case 68: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 67: + case 69: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 68: + case 70: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 69: + case 71: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 70: + case 72: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.PipelineExpr = MultiStageExpr{exprDollar[1].PipelineStage} } - case 71: + case 73: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 72: + case 74: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 73: + case 75: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LogfmtParser } - case 74: + case 76: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 75: + case 77: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].JSONExpressionParser } - case 76: + case 78: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LogfmtExpressionParser } - case 77: + case 79: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = &LabelFilterExpr{LabelFilterer: exprDollar[2].LabelFilter} } - case 78: + case 80: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 79: + case 81: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].DecolorizeExpr } - case 80: + case 82: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 81: + case 83: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].DropLabelsExpr } - case 82: + case 84: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.PipelineStage = exprDollar[2].KeepLabelsExpr } - case 83: + case 85: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.FilterOp = OpFilterIP } - case 84: + case 86: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str) } - case 85: + case 87: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.OrFilter = newLineFilterExpr(log.LineMatchEqual, exprDollar[1].FilterOp, exprDollar[3].str) } - case 86: + case 88: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.OrFilter = newOrLineFilter(newLineFilterExpr(log.LineMatchEqual, "", exprDollar[1].str), exprDollar[3].OrFilter) } - case 87: + case 89: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFilter = newLineFilterExpr(exprDollar[1].Filter, "", exprDollar[2].str) } - case 88: + case 90: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.LineFilter = newLineFilterExpr(exprDollar[1].Filter, exprDollar[2].FilterOp, exprDollar[4].str) } - case 89: + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.LineFilter = newOrLineFilter(newLineFilterExpr(exprDollar[1].Filter, "", exprDollar[2].str), exprDollar[4].OrFilter) } - case 90: + case 92: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LineFilters = exprDollar[1].LineFilter } - case 91: + case 93: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LineFilters = newOrLineFilter(exprDollar[1].LineFilter, exprDollar[3].OrFilter) } - case 92: + case 94: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFilters = newNestedLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].LineFilter) } - case 93: + case 95: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.ParserFlags = []string{exprDollar[1].str} } - case 94: + case 96: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.ParserFlags = append(exprDollar[1].ParserFlags, exprDollar[2].str) } - case 95: + case 97: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LogfmtParser = newLogfmtParserExpr(nil) } - case 96: + case 98: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LogfmtParser = newLogfmtParserExpr(exprDollar[2].ParserFlags) } - case 97: + case 99: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 98: + case 100: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 99: + case 101: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeUnpack, "") } - case 100: + case 102: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelParser = newLabelParserExpr(OpParserTypePattern, exprDollar[2].str) } - case 101: + case 103: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.JSONExpressionParser = newJSONExpressionParser(exprDollar[2].LabelExtractionExpressionList) } - case 102: + case 104: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LogfmtExpressionParser = newLogfmtExpressionParser(exprDollar[3].LabelExtractionExpressionList, exprDollar[2].ParserFlags) } - case 103: + case 105: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LogfmtExpressionParser = newLogfmtExpressionParser(exprDollar[2].LabelExtractionExpressionList, nil) } - case 104: + case 106: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 105: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DecolorizeExpr = newDecolorizeExpr() } - case 106: + case 108: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFormat = log.NewRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 107: + case 109: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFormat = log.NewTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 108: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelsFormat = []log.LabelFmt{exprDollar[1].LabelFormat} } - case 109: + case 111: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 111: + case 113: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 112: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = log.NewStringLabelFilter(exprDollar[1].Matcher) } - case 113: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].IPLabelFilter } - case 114: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].UnitFilter } - case 115: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 116: + case 118: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 117: + case 119: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 118: + case 120: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 119: + case 121: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 120: + case 122: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelFilter = log.NewOrLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 121: + case 123: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelExtractionExpression = log.NewLabelExtractionExpr(exprDollar[1].str, exprDollar[3].str) } - case 122: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelExtractionExpression = log.NewLabelExtractionExpr(exprDollar[1].str, exprDollar[1].str) } - case 123: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LabelExtractionExpressionList = []log.LabelExtractionExpr{exprDollar[1].LabelExtractionExpression} } - case 124: + case 126: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.LabelExtractionExpressionList = append(exprDollar[1].LabelExtractionExpressionList, exprDollar[3].LabelExtractionExpression) } - case 125: + case 127: exprDollar = exprS[exprpt-6 : exprpt+1] { exprVAL.IPLabelFilter = log.NewIPLabelFilter(exprDollar[5].str, exprDollar[1].str, log.LabelFilterEqual) } - case 126: + case 128: exprDollar = exprS[exprpt-6 : exprpt+1] { exprVAL.IPLabelFilter = log.NewIPLabelFilter(exprDollar[5].str, exprDollar[1].str, log.LabelFilterNotEqual) } - case 127: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.UnitFilter = exprDollar[1].DurationFilter } - case 128: + case 130: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.UnitFilter = exprDollar[1].BytesFilter } - case 129: + case 131: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 130: + case 132: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 131: + case 133: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 132: + case 134: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 133: + case 135: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 134: + case 136: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 135: + case 137: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 136: + case 138: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) } - case 137: + case 139: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 138: + case 140: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) } - case 139: + case 141: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 140: + case 142: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 141: + case 143: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 142: + case 144: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } - case 143: + case 145: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 144: + case 146: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 145: + case 147: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 146: + case 148: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 147: + case 149: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 148: + case 150: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 149: + case 151: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 150: + case 152: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabel = log.NewDropLabel(nil, exprDollar[1].str) } - case 151: + case 153: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabel = log.NewDropLabel(exprDollar[1].Matcher, "") } - case 152: + case 154: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.DropLabels = []log.DropLabel{exprDollar[1].DropLabel} } - case 153: + case 155: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.DropLabels = append(exprDollar[1].DropLabels, exprDollar[3].DropLabel) } - case 154: + case 156: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.DropLabelsExpr = newDropLabelsExpr(exprDollar[2].DropLabels) } - case 155: + case 157: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabel = log.NewKeepLabel(nil, exprDollar[1].str) } - case 156: + case 158: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabel = log.NewKeepLabel(exprDollar[1].Matcher, "") } - case 157: + case 159: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.KeepLabels = []log.KeepLabel{exprDollar[1].KeepLabel} } - case 158: + case 160: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.KeepLabels = append(exprDollar[1].KeepLabels, exprDollar[3].KeepLabel) } - case 159: + case 161: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.KeepLabelsExpr = newKeepLabelsExpr(exprDollar[2].KeepLabels) } - case 160: + case 162: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 161: + case 163: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 162: + case 164: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 163: + case 165: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 164: + case 166: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 165: + case 167: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 166: + case 168: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 167: + case 169: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 168: + case 170: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 169: + case 171: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 170: + case 172: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 171: + case 173: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 172: + case 174: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 173: + case 175: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 174: + case 176: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 175: + case 177: exprDollar = exprS[exprpt-0 : exprpt+1] { exprVAL.BoolModifier = &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}} } - case 176: + case 178: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BoolModifier = &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: true} } - case 177: + case 179: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.On = true exprVAL.OnOrIgnoringModifier.VectorMatching.MatchingLabels = exprDollar[4].Labels } - case 178: + case 180: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.On = true } - case 179: + case 181: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier exprVAL.OnOrIgnoringModifier.VectorMatching.MatchingLabels = exprDollar[4].Labels } - case 180: + case 182: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.OnOrIgnoringModifier = exprDollar[1].BoolModifier } - case 181: + case 183: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].BoolModifier } - case 182: + case 184: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier } - case 183: + case 185: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne } - case 184: + case 186: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne } - case 185: + case 187: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardManyToOne exprVAL.BinOpModifier.VectorMatching.Include = exprDollar[4].Labels } - case 186: + case 188: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany } - case 187: + case 189: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany } - case 188: + case 190: exprDollar = exprS[exprpt-5 : exprpt+1] { exprVAL.BinOpModifier = exprDollar[1].OnOrIgnoringModifier exprVAL.BinOpModifier.VectorMatching.Card = CardOneToMany exprVAL.BinOpModifier.VectorMatching.Include = exprDollar[4].Labels } - case 189: + case 191: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 190: + case 192: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 191: + case 193: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 192: + case 194: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.VectorExpr = NewVectorExpr(exprDollar[3].str) } - case 193: + case 195: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Vector = OpTypeVector } - case 194: + case 196: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSum } - case 195: + case 197: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeAvg } - case 196: + case 198: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeCount } - case 197: + case 199: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeMax } - case 198: + case 200: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeMin } - case 199: + case 201: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeStddev } - case 200: + case 202: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeStdvar } - case 201: + case 203: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeBottomK } - case 202: + case 204: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeTopK } - case 203: + case 205: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSort } - case 204: + case 206: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.VectorOp = OpTypeSortDesc } - case 205: + case 207: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeCount } - case 206: + case 208: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeRate } - case 207: + case 209: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeRateCounter } - case 208: + case 210: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeBytes } - case 209: + case 211: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 210: + case 212: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeAvg } - case 211: + case 213: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeSum } - case 212: + case 214: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeMin } - case 213: + case 215: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeMax } - case 214: + case 216: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeStdvar } - case 215: + case 217: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeStddev } - case 216: + case 218: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeQuantile } - case 217: + case 219: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeFirst } - case 218: + case 220: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeLast } - case 219: + case 221: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.RangeOp = OpRangeTypeAbsent } - case 220: + case 222: exprDollar = exprS[exprpt-2 : exprpt+1] { exprVAL.OffsetExpr = newOffsetExpr(exprDollar[2].duration) } - case 221: + case 223: exprDollar = exprS[exprpt-1 : exprpt+1] { exprVAL.Labels = []string{exprDollar[1].str} } - case 222: + case 224: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 223: + case 225: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: false, Groups: exprDollar[3].Labels} } - case 224: + case 226: exprDollar = exprS[exprpt-4 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: true, Groups: exprDollar[3].Labels} } - case 225: + case 227: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: false, Groups: nil} } - case 226: + case 228: exprDollar = exprS[exprpt-3 : exprpt+1] { exprVAL.Grouping = &Grouping{Without: true, Groups: nil} diff --git a/pkg/logql/syntax/lex.go b/pkg/logql/syntax/lex.go index 14db797e248f1..dc806c921090e 100644 --- a/pkg/logql/syntax/lex.go +++ b/pkg/logql/syntax/lex.go @@ -23,8 +23,10 @@ var tokens = map[string]int{ OpTypeNEQ: NEQ, "=~": RE, "!~": NRE, + "!>": NPA, "|=": PIPE_EXACT, "|~": PIPE_MATCH, + "|>": PIPE_PATTERN, OpPipe: PIPE, OpUnwrap: UNWRAP, "(": OPEN_PARENTHESIS, diff --git a/pkg/logql/syntax/linefilter_test.go b/pkg/logql/syntax/linefilter_test.go index d0cc700ce4601..55fc0fc39179c 100644 --- a/pkg/logql/syntax/linefilter_test.go +++ b/pkg/logql/syntax/linefilter_test.go @@ -16,7 +16,10 @@ func TestLineFilterSerialization(t *testing.T) { {Ty: log.LineMatchEqual, Match: "match", Op: "OR"}, {Ty: log.LineMatchNotEqual, Match: "not match"}, {Ty: log.LineMatchNotEqual, Match: "not match", Op: "OR"}, - {Ty: log.LineMatchRegexp, Op: "OR"}, + {Ty: log.LineMatchPattern, Match: "match"}, + {Ty: log.LineMatchPattern, Match: "match", Op: "OR"}, + {Ty: log.LineMatchNotPattern, Match: "not match"}, + {Ty: log.LineMatchNotPattern, Match: "not match", Op: "OR"}, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { b := make([]byte, orig.Size()) diff --git a/pkg/storage/bloom/v1/bloom_tester.go b/pkg/storage/bloom/v1/bloom_tester.go index 5aa688bfc2657..f60166788e4d6 100644 --- a/pkg/storage/bloom/v1/bloom_tester.go +++ b/pkg/storage/bloom/v1/bloom_tester.go @@ -5,6 +5,7 @@ import ( regexpsyntax "github.com/grafana/regexp/syntax" "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/pkg/logql/log/pattern" "github.com/grafana/loki/pkg/logql/syntax" "github.com/grafana/loki/pkg/storage/bloom/v1/filter" ) @@ -89,7 +90,7 @@ func FiltersToBloomTest(b NGramBuilder, filters ...syntax.LineFilterExpr) BloomT func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest { switch filter.Ty { - case log.LineMatchNotEqual, log.LineMatchNotRegexp: + case log.LineMatchNotEqual, log.LineMatchNotRegexp, log.LineMatchNotPattern: // We cannot test _negated_ filters with a bloom filter since blooms are probabilistic // filters that can only tell us if a string _might_ exist. // For example, for `!= "foo"`, the bloom filter might tell us that the string "foo" might exist @@ -114,6 +115,8 @@ func simpleFilterToBloomTest(b NGramBuilder, filter syntax.LineFilter) BloomTest } return matcherFilterWrapper{filter: matcher} + case log.LineMatchPattern: + return newPatternTest(b, filter.Match) default: return MatchAll } @@ -275,3 +278,20 @@ func (o orTest) Matches(bloom filter.Checker) bool { func (o orTest) MatchesWithPrefixBuf(bloom filter.Checker, buf []byte, prefixLen int) bool { return o.left.MatchesWithPrefixBuf(bloom, buf, prefixLen) || o.right.MatchesWithPrefixBuf(bloom, buf, prefixLen) } + +func newPatternTest(b NGramBuilder, match string) BloomTest { + lit, err := pattern.ParseLiterals(match) + if err != nil { + return MatchAll + } + var test stringTest + for _, l := range lit { + it := b.Tokens(string(l)) + for it.Next() { + ngram := make([]byte, len(it.At())) + copy(ngram, it.At()) + test.ngrams = append(test.ngrams, ngram) + } + } + return test +} diff --git a/pkg/storage/bloom/v1/bloom_tester_test.go b/pkg/storage/bloom/v1/bloom_tester_test.go index 991ac092dee39..085d56c590063 100644 --- a/pkg/storage/bloom/v1/bloom_tester_test.go +++ b/pkg/storage/bloom/v1/bloom_tester_test.go @@ -166,6 +166,42 @@ func TestFiltersToBloomTests(t *testing.T) { bloom: fakeBloom{"foo"}, expectMatch: true, }, + { + name: "pattern match exists", + query: `{app="fake"} |> "<_>foo"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, + }, + { + name: "pattern match does not exist", + query: `{app="fake"} |> "<_>foo"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: false, + }, + { + name: "pattern not match exists", + query: `{app="fake"} !> "<_>foo"`, + bloom: fakeBloom{"foo", "bar"}, + expectMatch: true, + }, + { + name: "pattern not match does not exist", + query: `{app="fake"} !> "<_>foo"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, + { + name: "pattern all", + query: `{app="fake"} |> "<_>"`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, + { + name: "pattern empty", + query: `{app="fake"} |> ""`, + bloom: fakeBloom{"bar", "baz"}, + expectMatch: true, + }, } { t.Run(tc.name, func(t *testing.T) { expr, err := syntax.ParseExpr(tc.query) From 71602eb10f7603c4007972f9bbfb3f66f39dd79e Mon Sep 17 00:00:00 2001 From: Christian Haudum Date: Tue, 2 Apr 2024 11:21:40 +0200 Subject: [PATCH 49/54] chore(blooms): Clean up bloom component configuration (#12387) * Remove unused setting and move download parallelism configuration one level up. * Update description of bloom_shipper config block Signed-off-by: Christian Haudum --- docs/sources/configure/_index.md | 15 +++++-------- integration/cluster/cluster.go | 2 -- pkg/bloomcompactor/retention_test.go | 6 ++--- pkg/bloomgateway/bloomgateway_test.go | 6 ++--- pkg/loki/modules_test.go | 6 ++--- pkg/storage/factory.go | 2 +- .../shipper/bloomshipper/config/config.go | 22 +++++-------------- .../stores/shipper/bloomshipper/store.go | 2 +- .../stores/shipper/bloomshipper/store_test.go | 6 ++--- 9 files changed, 21 insertions(+), 46 deletions(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index ab9ab42c18703..4c99c3b4c8d1a 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2351,7 +2351,8 @@ tsdb_shipper: [ingesterdbretainperiod: ] -# Configures Bloom Shipper. +# Configures the bloom shipper component, which contains the store abstraction +# to fetch bloom filters from and put them to object storage. bloom_shipper: # Working directory to store downloaded bloom blocks. Supports multiple # directories, separated by comma. @@ -2363,15 +2364,9 @@ bloom_shipper: # CLI flag: -bloom.max-query-page-size [max_query_page_size: | default = 64MiB] - blocks_downloading_queue: - # The count of parallel workers that download Bloom Blocks. - # CLI flag: -bloom.shipper.blocks-downloading-queue.workers-count - [workers_count: | default = 16] - - # Maximum number of task in queue per tenant per bloom-gateway. Enqueuing - # the tasks above this limit will fail an error. - # CLI flag: -bloom.shipper.blocks-downloading-queue.max_tasks_enqueued_per_tenant - [max_tasks_enqueued_per_tenant: | default = 10000] + # The amount of maximum concurrent bloom blocks downloads. + # CLI flag: -bloom.download-parallelism + [download_parallelism: | default = 16] blocks_cache: # Cache for bloom blocks. Soft limit of the cache in bytes. Exceeding this diff --git a/integration/cluster/cluster.go b/integration/cluster/cluster.go index 5e29413a68c62..79dc7ce2809ff 100644 --- a/integration/cluster/cluster.go +++ b/integration/cluster/cluster.go @@ -82,8 +82,6 @@ storage_config: cache_location: {{.dataPath}}/tsdb-cache bloom_shipper: working_directory: {{.dataPath}}/bloom-shipper - blocks_downloading_queue: - workers_count: 1 bloom_gateway: enabled: false diff --git a/pkg/bloomcompactor/retention_test.go b/pkg/bloomcompactor/retention_test.go index 6c3c82c426c3e..0f880a2bd7e2a 100644 --- a/pkg/bloomcompactor/retention_test.go +++ b/pkg/bloomcompactor/retention_test.go @@ -802,10 +802,8 @@ func NewMockBloomStoreWithWorkDir(t *testing.T, workDir string) (*bloomshipper.B Directory: workDir, }, BloomShipperConfig: config.Config{ - WorkingDirectory: []string{workDir}, - BlocksDownloadingQueue: config.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{workDir}, + DownloadParallelism: 1, BlocksCache: config.BlocksCacheConfig{ SoftLimit: 1 << 20, HardLimit: 2 << 20, diff --git a/pkg/bloomgateway/bloomgateway_test.go b/pkg/bloomgateway/bloomgateway_test.go index f705bb5eb0919..59f37974a4ce1 100644 --- a/pkg/bloomgateway/bloomgateway_test.go +++ b/pkg/bloomgateway/bloomgateway_test.go @@ -72,10 +72,8 @@ func setupBloomStore(t *testing.T) *bloomshipper.BloomStore { } storageCfg := storage.Config{ BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: []string{t.TempDir()}, - BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{t.TempDir()}, + DownloadParallelism: 1, BlocksCache: bloomshipperconfig.BlocksCacheConfig{ SoftLimit: flagext.Bytes(10 << 20), HardLimit: flagext.Bytes(20 << 20), diff --git a/pkg/loki/modules_test.go b/pkg/loki/modules_test.go index 1c8945b51a37e..4529eb7c23c88 100644 --- a/pkg/loki/modules_test.go +++ b/pkg/loki/modules_test.go @@ -367,10 +367,8 @@ func minimalWorkingConfig(t *testing.T, dir, target string, cfgTransformers ...f cfg.StorageConfig = storage.Config{ FSConfig: local.FSConfig{Directory: dir}, BloomShipperConfig: bloomshipperconfig.Config{ - WorkingDirectory: []string{filepath.Join(dir, "blooms")}, - BlocksDownloadingQueue: bloomshipperconfig.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{filepath.Join(dir, "blooms")}, + DownloadParallelism: 1, }, BoltDBShipperConfig: boltdb.IndexCfg{ Config: indexshipper.Config{ diff --git a/pkg/storage/factory.go b/pkg/storage/factory.go index da687c5ea9c7b..b619d978a7564 100644 --- a/pkg/storage/factory.go +++ b/pkg/storage/factory.go @@ -336,7 +336,7 @@ type Config struct { MaxChunkBatchSize int `yaml:"max_chunk_batch_size"` BoltDBShipperConfig boltdb.IndexCfg `yaml:"boltdb_shipper" doc:"description=Configures storing index in an Object Store (GCS/S3/Azure/Swift/COS/Filesystem) in the form of boltdb files. Required fields only required when boltdb-shipper is defined in config."` TSDBShipperConfig indexshipper.Config `yaml:"tsdb_shipper" doc:"description=Configures storing index in an Object Store (GCS/S3/Azure/Swift/COS/Filesystem) in a prometheus TSDB-like format. Required fields only required when TSDB is defined in config."` - BloomShipperConfig bloomshipperconfig.Config `yaml:"bloom_shipper" doc:"description=Configures Bloom Shipper."` + BloomShipperConfig bloomshipperconfig.Config `yaml:"bloom_shipper" doc:"description=Configures the bloom shipper component, which contains the store abstraction to fetch bloom filters from and put them to object storage."` // Config for using AsyncStore when using async index stores like `boltdb-shipper`. // It is required for getting chunk ids of recently flushed chunks from the ingesters. diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index 3aef86cabdf22..89a2f30e2dd33 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -12,21 +12,11 @@ import ( ) type Config struct { - WorkingDirectory flagext.StringSliceCSV `yaml:"working_directory"` - MaxQueryPageSize flagext.Bytes `yaml:"max_query_page_size"` - BlocksDownloadingQueue DownloadingQueueConfig `yaml:"blocks_downloading_queue"` - BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` - MetasCache cache.Config `yaml:"metas_cache"` -} - -type DownloadingQueueConfig struct { - WorkersCount int `yaml:"workers_count"` - MaxTasksEnqueuedPerTenant int `yaml:"max_tasks_enqueued_per_tenant"` -} - -func (cfg *DownloadingQueueConfig) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { - f.IntVar(&cfg.WorkersCount, prefix+"workers-count", 16, "The count of parallel workers that download Bloom Blocks.") - f.IntVar(&cfg.MaxTasksEnqueuedPerTenant, prefix+"max_tasks_enqueued_per_tenant", 10_000, "Maximum number of task in queue per tenant per bloom-gateway. Enqueuing the tasks above this limit will fail an error.") + WorkingDirectory flagext.StringSliceCSV `yaml:"working_directory"` + MaxQueryPageSize flagext.Bytes `yaml:"max_query_page_size"` + DownloadParallelism int `yaml:"download_parallelism"` + BlocksCache BlocksCacheConfig `yaml:"blocks_cache"` + MetasCache cache.Config `yaml:"metas_cache"` } func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { @@ -34,7 +24,7 @@ func (c *Config) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet) { f.Var(&c.WorkingDirectory, prefix+"shipper.working-directory", "Working directory to store downloaded bloom blocks. Supports multiple directories, separated by comma.") _ = c.MaxQueryPageSize.Set("64MiB") // default should match the one set in pkg/storage/bloom/v1/bloom.go f.Var(&c.MaxQueryPageSize, prefix+"max-query-page-size", "Maximum size of bloom pages that should be queried. Larger pages than this limit are skipped when querying blooms to limit memory usage.") - c.BlocksDownloadingQueue.RegisterFlagsWithPrefix(prefix+"shipper.blocks-downloading-queue.", f) + f.IntVar(&c.DownloadParallelism, prefix+"download-parallelism", 16, "The amount of maximum concurrent bloom blocks downloads.") c.BlocksCache.RegisterFlagsWithPrefixAndDefaults(prefix+"blocks-cache.", "Cache for bloom blocks. ", f, 24*time.Hour) c.MetasCache.RegisterFlagsWithPrefix(prefix+"metas-cache.", "Cache for bloom metas. ", f) } diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index ce15d4cc2663a..83d0db9e4296b 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -278,7 +278,7 @@ func NewBloomStore( // TODO(chaudum): Remove wrapper cfg := bloomStoreConfig{ workingDirs: storageConfig.BloomShipperConfig.WorkingDirectory, - numWorkers: storageConfig.BloomShipperConfig.BlocksDownloadingQueue.WorkersCount, + numWorkers: storageConfig.BloomShipperConfig.DownloadParallelism, maxBloomPageSize: int(storageConfig.BloomShipperConfig.MaxQueryPageSize), } diff --git a/pkg/storage/stores/shipper/bloomshipper/store_test.go b/pkg/storage/stores/shipper/bloomshipper/store_test.go index 6c206161839a6..3ba7b8d2b5dee 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/store_test.go @@ -58,10 +58,8 @@ func newMockBloomStoreWithWorkDir(t *testing.T, workDir, storeDir string) (*Bloo Directory: storeDir, }, BloomShipperConfig: config.Config{ - WorkingDirectory: []string{workDir}, - BlocksDownloadingQueue: config.DownloadingQueueConfig{ - WorkersCount: 1, - }, + WorkingDirectory: []string{workDir}, + DownloadParallelism: 1, BlocksCache: config.BlocksCacheConfig{ SoftLimit: 1 << 20, HardLimit: 2 << 20, From b66c3433831fbd2c257427fa9a0d6ea52f3ededd Mon Sep 17 00:00:00 2001 From: Sandeep Sukhani Date: Tue, 2 Apr 2024 16:25:27 +0530 Subject: [PATCH 50/54] feat: add support for discovering and adding log levels as structured metadata (#12428) --- docs/sources/configure/_index.md | 6 ++ pkg/distributor/distributor.go | 70 +++++++++++++ pkg/distributor/distributor_test.go | 146 ++++++++++++++++++++++++++++ pkg/distributor/limits.go | 1 + pkg/distributor/validator.go | 2 + pkg/loghttp/push/otlp.go | 4 +- pkg/validation/limits.go | 6 ++ 7 files changed, 234 insertions(+), 1 deletion(-) diff --git a/docs/sources/configure/_index.md b/docs/sources/configure/_index.md index 4c99c3b4c8d1a..cce92e24eb2f1 100644 --- a/docs/sources/configure/_index.md +++ b/docs/sources/configure/_index.md @@ -2830,6 +2830,12 @@ The `limits_config` block configures global and per-tenant limits in Loki. # CLI flag: -validation.discover-service-name [discover_service_name: | default = [service app application name app_kubernetes_io_name container container_name component workload job]] +# Discover and add log levels during ingestion, if not present already. Levels +# would be added to Structured Metadata with name 'level' and one of the values +# from 'debug', 'info', 'warn', 'error', 'critical', 'fatal'. +# CLI flag: -validation.discover-log-levels +[discover_log_levels: | default = false] + # Maximum number of active streams per user, per ingester. 0 to disable. # CLI flag: -ingester.max-streams-per-user [max_streams_per_user: | default = 0] diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 9b34913d42a19..01f77e320b5a6 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -15,6 +15,7 @@ import ( "github.com/go-kit/log/level" "github.com/gogo/status" "github.com/prometheus/prometheus/model/labels" + "go.opentelemetry.io/collector/pdata/plog" "google.golang.org/grpc/codes" "github.com/grafana/dskit/httpgrpc" @@ -57,6 +58,13 @@ const ( labelServiceName = "service_name" serviceUnknown = "unknown_service" + labelLevel = "level" + logLevelDebug = "debug" + logLevelInfo = "info" + logLevelWarn = "warn" + logLevelError = "error" + logLevelFatal = "fatal" + logLevelCritical = "critical" ) var ( @@ -367,6 +375,7 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log n := 0 pushSize := 0 prevTs := stream.Entries[0].Timestamp + addLogLevel := validationContext.allowStructuredMetadata && validationContext.discoverLogLevels && !lbs.Has(labelLevel) for _, entry := range stream.Entries { if err := d.validator.ValidateEntry(ctx, validationContext, lbs, entry); err != nil { d.writeFailuresManager.Log(tenantID, err) @@ -374,6 +383,14 @@ func (d *Distributor) Push(ctx context.Context, req *logproto.PushRequest) (*log continue } + structuredMetadata := logproto.FromLabelAdaptersToLabels(entry.StructuredMetadata) + if addLogLevel && !structuredMetadata.Has(labelLevel) { + logLevel := detectLogLevelFromLogEntry(entry, structuredMetadata) + entry.StructuredMetadata = append(entry.StructuredMetadata, logproto.LabelAdapter{ + Name: labelLevel, + Value: logLevel, + }) + } stream.Entries[n] = entry // If configured for this tenant, increment duplicate timestamps. Note, this is imperfect @@ -838,3 +855,56 @@ func newRingAndLifecycler(cfg RingConfig, instanceCount *atomic.Uint32, logger l func (d *Distributor) HealthyInstancesCount() int { return int(d.healthyInstancesCount.Load()) } + +func detectLogLevelFromLogEntry(entry logproto.Entry, structuredMetadata labels.Labels) string { + // otlp logs have a severity number, using which we are defining the log levels. + // Significance of severity number is explained in otel docs here https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber + if otlpSeverityNumberTxt := structuredMetadata.Get(push.OTLPSeverityNumber); otlpSeverityNumberTxt != "" { + otlpSeverityNumber, err := strconv.Atoi(otlpSeverityNumberTxt) + if err != nil { + return logLevelInfo + } + if otlpSeverityNumber <= int(plog.SeverityNumberDebug4) { + return logLevelDebug + } else if otlpSeverityNumber <= int(plog.SeverityNumberInfo4) { + return logLevelInfo + } else if otlpSeverityNumber <= int(plog.SeverityNumberWarn4) { + return logLevelWarn + } else if otlpSeverityNumber <= int(plog.SeverityNumberError4) { + return logLevelError + } else if otlpSeverityNumber <= int(plog.SeverityNumberFatal4) { + return logLevelFatal + } + return logLevelInfo + } + + return extractLogLevelFromLogLine(entry.Line) +} + +func extractLogLevelFromLogLine(log string) string { + if strings.Contains(log, `:"err"`) || strings.Contains(log, `:"ERR"`) || + strings.Contains(log, "=err") || strings.Contains(log, "=ERR") || + strings.Contains(log, "err:") || strings.Contains(log, "ERR:") || + strings.Contains(log, "error") || strings.Contains(log, "ERROR") { + return logLevelError + } + if strings.Contains(log, `:"warn"`) || strings.Contains(log, `:"WARN"`) || + strings.Contains(log, "=warn") || strings.Contains(log, "=WARN") || + strings.Contains(log, "warn:") || strings.Contains(log, "WARN:") || + strings.Contains(log, "warning") || strings.Contains(log, "WARNING") { + return logLevelWarn + } + if strings.Contains(log, `:"critical"`) || strings.Contains(log, `:"CRITICAL"`) || + strings.Contains(log, "=critical") || strings.Contains(log, "=CRITICAL") || + strings.Contains(log, "CRITICAL:") || strings.Contains(log, "critical:") { + return logLevelCritical + } + if strings.Contains(log, `:"debug"`) || strings.Contains(log, `:"DEBUG"`) || + strings.Contains(log, "=debug") || strings.Contains(log, "=DEBUG") || + strings.Contains(log, "debug:") || strings.Contains(log, "DEBUG:") { + return logLevelDebug + } + + // Default to info if no specific level is found + return logLevelInfo +} diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index 81a7fb09b94a5..e4bf766b42ee6 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -26,13 +26,16 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/pdata/plog" "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" "github.com/grafana/loki/pkg/ingester" "github.com/grafana/loki/pkg/ingester/client" + loghttp_push "github.com/grafana/loki/pkg/loghttp/push" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/pkg/push" "github.com/grafana/loki/pkg/runtime" "github.com/grafana/loki/pkg/util/constants" fe "github.com/grafana/loki/pkg/util/flagext" @@ -1491,3 +1494,146 @@ func TestDistributorTee(t *testing.T) { require.Equal(t, "test", tee.tenant) } } + +func Test_DetectLogLevels(t *testing.T) { + setup := func(discoverLogLevels bool) (*validation.Limits, *mockIngester) { + limits := &validation.Limits{} + flagext.DefaultValues(limits) + + limits.DiscoverLogLevels = discoverLogLevels + limits.DiscoverServiceName = nil + limits.AllowStructuredMetadata = true + return limits, &mockIngester{} + } + + t.Run("log level detection disabled", func(t *testing.T) { + limits, ingester := setup(false) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Len(t, topVal.Streams[0].Entries[0].StructuredMetadata, 0) + }) + + t.Run("log level detection enabled", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Equal(t, push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelInfo, + }, + }, topVal.Streams[0].Entries[0].StructuredMetadata) + }) + + t.Run("log level detection enabled but log level already present in stream", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar", level="debug"}`}) + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar", level="debug"}`, topVal.Streams[0].Labels) + require.Len(t, topVal.Streams[0].Entries[0].StructuredMetadata, 0) + }) + + t.Run("log level detection enabled but log level already present as structured metadata", func(t *testing.T) { + limits, ingester := setup(true) + distributors, _ := prepare(t, 1, 5, limits, func(addr string) (ring_client.PoolClient, error) { return ingester, nil }) + + writeReq := makeWriteRequestWithLabels(1, 10, []string{`{foo="bar"}`}) + writeReq.Streams[0].Entries[0].StructuredMetadata = push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelWarn, + }, + } + _, err := distributors[0].Push(ctx, writeReq) + require.NoError(t, err) + topVal := ingester.Peek() + require.Equal(t, `{foo="bar"}`, topVal.Streams[0].Labels) + require.Equal(t, push.LabelsAdapter{ + { + Name: labelLevel, + Value: logLevelWarn, + }, + }, topVal.Streams[0].Entries[0].StructuredMetadata) + }) +} + +func Test_detectLogLevelFromLogEntry(t *testing.T) { + for _, tc := range []struct { + name string + entry logproto.Entry + expectedLogLevel string + }{ + { + name: "use severity number from otlp logs", + entry: logproto.Entry{ + Line: "error", + StructuredMetadata: push.LabelsAdapter{ + { + Name: loghttp_push.OTLPSeverityNumber, + Value: fmt.Sprintf("%d", plog.SeverityNumberDebug3), + }, + }, + }, + expectedLogLevel: logLevelDebug, + }, + { + name: "invalid severity number should not cause any issues", + entry: logproto.Entry{ + StructuredMetadata: push.LabelsAdapter{ + { + Name: loghttp_push.OTLPSeverityNumber, + Value: "foo", + }, + }, + }, + expectedLogLevel: logLevelInfo, + }, + { + name: "non otlp without any of the log level keywords in log line", + entry: logproto.Entry{ + Line: "foo", + }, + expectedLogLevel: logLevelInfo, + }, + { + name: "non otlp with log level keywords in log line", + entry: logproto.Entry{ + Line: "this is a warning log", + }, + expectedLogLevel: logLevelWarn, + }, + { + name: "json log line with an error", + entry: logproto.Entry{ + Line: `{"foo":"bar","level":"error"}`, + }, + expectedLogLevel: logLevelError, + }, + { + name: "logfmt log line with a warn", + entry: logproto.Entry{ + Line: `foo=bar level=warn`, + }, + expectedLogLevel: logLevelWarn, + }, + } { + t.Run(tc.name, func(t *testing.T) { + detectedLogLevel := detectLogLevelFromLogEntry(tc.entry, logproto.FromLabelAdaptersToLabels(tc.entry.StructuredMetadata)) + require.Equal(t, tc.expectedLogLevel, detectedLogLevel) + }) + } +} diff --git a/pkg/distributor/limits.go b/pkg/distributor/limits.go index d2f655f1c8329..927374416e8ba 100644 --- a/pkg/distributor/limits.go +++ b/pkg/distributor/limits.go @@ -23,6 +23,7 @@ type Limits interface { IncrementDuplicateTimestamps(userID string) bool DiscoverServiceName(userID string) []string + DiscoverLogLevels(userID string) bool ShardStreams(userID string) *shardstreams.Config IngestionRateStrategy() string diff --git a/pkg/distributor/validator.go b/pkg/distributor/validator.go index ca2186c1d2626..6f0bce53d983b 100644 --- a/pkg/distributor/validator.go +++ b/pkg/distributor/validator.go @@ -44,6 +44,7 @@ type validationContext struct { incrementDuplicateTimestamps bool discoverServiceName []string + discoverLogLevels bool allowStructuredMetadata bool maxStructuredMetadataSize int @@ -65,6 +66,7 @@ func (v Validator) getValidationContextForTime(now time.Time, userID string) val maxLabelValueLength: v.MaxLabelValueLength(userID), incrementDuplicateTimestamps: v.IncrementDuplicateTimestamps(userID), discoverServiceName: v.DiscoverServiceName(userID), + discoverLogLevels: v.DiscoverLogLevels(userID), allowStructuredMetadata: v.AllowStructuredMetadata(userID), maxStructuredMetadataSize: v.MaxStructuredMetadataSize(userID), maxStructuredMetadataCount: v.MaxStructuredMetadataCount(userID), diff --git a/pkg/loghttp/push/otlp.go b/pkg/loghttp/push/otlp.go index a001b52b210f6..8bd206fce29ac 100644 --- a/pkg/loghttp/push/otlp.go +++ b/pkg/loghttp/push/otlp.go @@ -27,6 +27,8 @@ const ( pbContentType = "application/x-protobuf" gzipContentEncoding = "gzip" attrServiceName = "service.name" + + OTLPSeverityNumber = "severity_number" ) func newPushStats() *Stats { @@ -287,7 +289,7 @@ func otlpLogToPushEntry(log plog.LogRecord, otlpConfig OTLPConfig) push.Entry { if severityNum := log.SeverityNumber(); severityNum != plog.SeverityNumberUnspecified { structuredMetadata = append(structuredMetadata, push.LabelAdapter{ - Name: "severity_number", + Name: OTLPSeverityNumber, Value: fmt.Sprintf("%d", severityNum), }) } diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index 8c2197113a41f..a004b8eb94f89 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -81,6 +81,7 @@ type Limits struct { MaxLineSizeTruncate bool `yaml:"max_line_size_truncate" json:"max_line_size_truncate"` IncrementDuplicateTimestamp bool `yaml:"increment_duplicate_timestamp" json:"increment_duplicate_timestamp"` DiscoverServiceName []string `yaml:"discover_service_name" json:"discover_service_name"` + DiscoverLogLevels bool `yaml:"discover_log_levels" json:"discover_log_levels"` // Ingester enforced limits. MaxLocalStreamsPerUser int `yaml:"max_streams_per_user" json:"max_streams_per_user"` @@ -254,6 +255,7 @@ func (l *Limits) RegisterFlags(f *flag.FlagSet) { "job", } f.Var((*dskit_flagext.StringSlice)(&l.DiscoverServiceName), "validation.discover-service-name", "If no service_name label exists, Loki maps a single label from the configured list to service_name. If none of the configured labels exist in the stream, label is set to unknown_service. Empty list disables setting the label.") + f.BoolVar(&l.DiscoverLogLevels, "validation.discover-log-levels", false, "Discover and add log levels during ingestion, if not present already. Levels would be added to Structured Metadata with name 'level' and one of the values from 'debug', 'info', 'warn', 'error', 'critical', 'fatal'.") _ = l.RejectOldSamplesMaxAge.Set("7d") f.Var(&l.RejectOldSamplesMaxAge, "validation.reject-old-samples.max-age", "Maximum accepted sample age before rejecting.") @@ -915,6 +917,10 @@ func (o *Overrides) DiscoverServiceName(userID string) []string { return o.getOverridesForUser(userID).DiscoverServiceName } +func (o *Overrides) DiscoverLogLevels(userID string) bool { + return o.getOverridesForUser(userID).DiscoverLogLevels +} + // VolumeEnabled returns whether volume endpoints are enabled for a user. func (o *Overrides) VolumeEnabled(userID string) bool { return o.getOverridesForUser(userID).VolumeEnabled From affd9d604fc87ef1a7c9b13b54dd109401506ef8 Mon Sep 17 00:00:00 2001 From: J Stickler Date: Tue, 2 Apr 2024 12:26:18 -0400 Subject: [PATCH 51/54] docs: Adding upgrade link page to Operations (#12423) --- docs/sources/operations/upgrade.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 docs/sources/operations/upgrade.md diff --git a/docs/sources/operations/upgrade.md b/docs/sources/operations/upgrade.md new file mode 100644 index 0000000000000..5a0be8626e6a1 --- /dev/null +++ b/docs/sources/operations/upgrade.md @@ -0,0 +1,11 @@ +--- +title: Upgrade +description: Links to Loki upgrade documentation. +weight: +--- + +# Upgrade + +- [Upgrade](https://grafana.com/docs/loki/latest/setup/upgrade/) from one Loki version to a newer version. + +- [Upgrade Helm](https://grafana.com/docs/loki/latest/setup/upgrade/) from Helm v2.x to Helm v3.x. From 4c88be0ef2310406206c02d7747bf73b51c95bda Mon Sep 17 00:00:00 2001 From: Ed Welch Date: Tue, 2 Apr 2024 13:00:17 -0400 Subject: [PATCH 52/54] chore: update loki modules for 3.0 release (#12433) Signed-off-by: Edward Welch --- clients/cmd/docker-driver/config.go | 8 +- clients/cmd/docker-driver/config_test.go | 4 +- clients/cmd/docker-driver/loki.go | 8 +- clients/cmd/docker-driver/loki_test.go | 2 +- clients/cmd/docker-driver/main.go | 4 +- clients/cmd/fluent-bit/buffer.go | 2 +- clients/cmd/fluent-bit/client.go | 2 +- clients/cmd/fluent-bit/config.go | 6 +- clients/cmd/fluent-bit/config_test.go | 4 +- clients/cmd/fluent-bit/dque.go | 6 +- clients/cmd/fluent-bit/loki.go | 6 +- clients/cmd/fluent-bit/loki_test.go | 6 +- clients/cmd/fluent-bit/out_grafana_loki.go | 4 +- clients/cmd/promtail/main.go | 16 +- clients/pkg/logentry/logql/parser.go | 2 +- clients/pkg/logentry/metric/metricvec.go | 2 +- clients/pkg/logentry/stages/decolorize.go | 2 +- .../pkg/logentry/stages/decolorize_test.go | 2 +- clients/pkg/logentry/stages/drop.go | 2 +- clients/pkg/logentry/stages/drop_test.go | 2 +- .../logentry/stages/eventlogmessage_test.go | 2 +- clients/pkg/logentry/stages/extensions.go | 2 +- .../pkg/logentry/stages/extensions_test.go | 2 +- clients/pkg/logentry/stages/json_test.go | 2 +- .../pkg/logentry/stages/labelallow_test.go | 2 +- clients/pkg/logentry/stages/labeldrop_test.go | 2 +- clients/pkg/logentry/stages/labels_test.go | 2 +- clients/pkg/logentry/stages/limit.go | 2 +- clients/pkg/logentry/stages/limit_test.go | 2 +- clients/pkg/logentry/stages/logfmt_test.go | 2 +- clients/pkg/logentry/stages/match.go | 2 +- clients/pkg/logentry/stages/match_test.go | 2 +- clients/pkg/logentry/stages/metrics.go | 2 +- clients/pkg/logentry/stages/metrics_test.go | 4 +- clients/pkg/logentry/stages/multiline.go | 4 +- clients/pkg/logentry/stages/multiline_test.go | 6 +- clients/pkg/logentry/stages/output_test.go | 2 +- clients/pkg/logentry/stages/pack.go | 2 +- clients/pkg/logentry/stages/pack_test.go | 8 +- clients/pkg/logentry/stages/pipeline.go | 2 +- clients/pkg/logentry/stages/pipeline_test.go | 8 +- clients/pkg/logentry/stages/regex_test.go | 2 +- clients/pkg/logentry/stages/replace_test.go | 2 +- clients/pkg/logentry/stages/sampling_test.go | 2 +- clients/pkg/logentry/stages/stage.go | 2 +- .../pkg/logentry/stages/static_labels_test.go | 2 +- .../pkg/logentry/stages/structuredmetadata.go | 2 +- .../stages/structuredmetadata_test.go | 4 +- clients/pkg/logentry/stages/template_test.go | 2 +- clients/pkg/logentry/stages/tenant.go | 2 +- clients/pkg/logentry/stages/tenant_test.go | 6 +- clients/pkg/logentry/stages/timestamp.go | 2 +- clients/pkg/logentry/stages/timestamp_test.go | 4 +- clients/pkg/logentry/stages/util_test.go | 4 +- clients/pkg/promtail/api/types.go | 2 +- clients/pkg/promtail/client/batch.go | 4 +- clients/pkg/promtail/client/batch_test.go | 4 +- clients/pkg/promtail/client/client.go | 6 +- clients/pkg/promtail/client/client_test.go | 10 +- clients/pkg/promtail/client/client_writeto.go | 6 +- .../promtail/client/client_writeto_test.go | 6 +- clients/pkg/promtail/client/config.go | 2 +- clients/pkg/promtail/client/fake/client.go | 2 +- clients/pkg/promtail/client/logger.go | 6 +- clients/pkg/promtail/client/logger_test.go | 6 +- clients/pkg/promtail/client/manager.go | 6 +- clients/pkg/promtail/client/manager_test.go | 16 +- clients/pkg/promtail/config/config.go | 18 +- clients/pkg/promtail/config/config_test.go | 4 +- clients/pkg/promtail/limit/config.go | 2 +- .../pkg/promtail/positions/positions_test.go | 2 +- clients/pkg/promtail/promtail.go | 20 +- clients/pkg/promtail/promtail_test.go | 26 +- clients/pkg/promtail/promtail_wal_test.go | 18 +- .../pkg/promtail/scrapeconfig/scrapeconfig.go | 4 +- clients/pkg/promtail/server/server.go | 6 +- clients/pkg/promtail/server/template.go | 2 +- .../pkg/promtail/server/ui/assets_generate.go | 2 +- .../promtail/targets/azureeventhubs/parser.go | 4 +- .../targets/azureeventhubs/target_syncer.go | 8 +- .../azureeventhubs/target_syncer_test.go | 2 +- .../targets/azureeventhubs/targetmanager.go | 8 +- .../pkg/promtail/targets/cloudflare/target.go | 10 +- .../targets/cloudflare/target_test.go | 6 +- .../targets/cloudflare/targetmanager.go | 10 +- clients/pkg/promtail/targets/docker/target.go | 8 +- .../promtail/targets/docker/target_group.go | 8 +- .../promtail/targets/docker/target_test.go | 4 +- .../promtail/targets/docker/targetmanager.go | 12 +- .../targets/docker/targetmanager_test.go | 6 +- .../pkg/promtail/targets/file/decompresser.go | 8 +- .../targets/file/decompresser_test.go | 6 +- .../pkg/promtail/targets/file/filetarget.go | 8 +- .../promtail/targets/file/filetarget_test.go | 4 +- .../targets/file/filetargetmanager.go | 12 +- .../targets/file/filetargetmanager_test.go | 8 +- clients/pkg/promtail/targets/file/tailer.go | 8 +- .../pkg/promtail/targets/gcplog/formatter.go | 4 +- .../promtail/targets/gcplog/formatter_test.go | 4 +- .../promtail/targets/gcplog/pull_target.go | 6 +- .../targets/gcplog/pull_target_test.go | 6 +- .../promtail/targets/gcplog/push_target.go | 10 +- .../targets/gcplog/push_target_test.go | 10 +- .../targets/gcplog/push_translation.go | 6 +- clients/pkg/promtail/targets/gcplog/target.go | 6 +- .../promtail/targets/gcplog/target_test.go | 6 +- .../promtail/targets/gcplog/targetmanager.go | 8 +- .../pkg/promtail/targets/gelf/gelftarget.go | 8 +- .../promtail/targets/gelf/gelftarget_test.go | 4 +- .../targets/gelf/gelftargetmanager.go | 8 +- clients/pkg/promtail/targets/heroku/target.go | 16 +- .../promtail/targets/heroku/target_test.go | 6 +- .../promtail/targets/heroku/targetmanager.go | 8 +- .../promtail/targets/journal/journaltarget.go | 10 +- .../targets/journal/journaltarget_test.go | 8 +- .../targets/journal/journaltargetmanager.go | 8 +- .../journal/journaltargetmanager_linux.go | 10 +- .../pkg/promtail/targets/kafka/consumer.go | 2 +- .../promtail/targets/kafka/consumer_test.go | 2 +- .../pkg/promtail/targets/kafka/formatter.go | 2 +- clients/pkg/promtail/targets/kafka/parser.go | 4 +- clients/pkg/promtail/targets/kafka/target.go | 4 +- .../promtail/targets/kafka/target_syncer.go | 10 +- .../targets/kafka/target_syncer_test.go | 6 +- .../pkg/promtail/targets/kafka/target_test.go | 2 +- .../promtail/targets/kafka/targetmanager.go | 6 +- .../promtail/targets/lokipush/pushtarget.go | 16 +- .../targets/lokipush/pushtarget_test.go | 10 +- .../targets/lokipush/pushtargetmanager.go | 8 +- .../lokipush/pushtargetmanager_test.go | 2 +- clients/pkg/promtail/targets/manager.go | 34 +- .../targets/stdin/stdin_target_manager.go | 10 +- .../stdin/stdin_target_manager_test.go | 12 +- .../syslog/syslogparser/syslogparser_test.go | 2 +- .../promtail/targets/syslog/syslogtarget.go | 8 +- .../targets/syslog/syslogtarget_test.go | 6 +- .../targets/syslog/syslogtargetmanager.go | 8 +- .../pkg/promtail/targets/syslog/transport.go | 4 +- .../pkg/promtail/targets/windows/bookmark.go | 2 +- .../pkg/promtail/targets/windows/format.go | 4 +- .../pkg/promtail/targets/windows/target.go | 8 +- .../promtail/targets/windows/target_test.go | 12 +- .../promtail/targets/windows/targetmanager.go | 6 +- .../targets/windows/targetmanager_windows.go | 8 +- .../windows/win_eventlog/win_eventlog.go | 2 +- clients/pkg/promtail/utils/entries.go | 2 +- clients/pkg/promtail/utils/entries_test.go | 4 +- .../pkg/promtail/utils/remotewrite_server.go | 4 +- clients/pkg/promtail/wal/reader.go | 8 +- clients/pkg/promtail/wal/wal.go | 2 +- clients/pkg/promtail/wal/watcher.go | 2 +- clients/pkg/promtail/wal/watcher_test.go | 8 +- clients/pkg/promtail/wal/writer.go | 8 +- clients/pkg/promtail/wal/writer_test.go | 4 +- cmd/logcli/main.go | 18 +- cmd/logql-analyzer/main.go | 6 +- cmd/loki-canary/main.go | 8 +- cmd/loki/main.go | 12 +- cmd/migrate/main.go | 20 +- cmd/querytee/main.go | 4 +- go.mod | 6 +- integration/client/client.go | 6 +- integration/cluster/cluster.go | 16 +- .../loki_micro_services_delete_test.go | 12 +- integration/loki_micro_services_test.go | 10 +- integration/loki_rule_eval_test.go | 6 +- integration/loki_simple_scalable_test.go | 4 +- integration/loki_single_binary_test.go | 4 +- integration/multi_tenant_queries_test.go | 4 +- integration/per_request_limits_test.go | 6 +- pkg/analytics/reporter.go | 4 +- pkg/analytics/reporter_test.go | 2 +- pkg/analytics/seed_test.go | 2 +- pkg/analytics/stats.go | 2 +- pkg/analytics/stats_test.go | 2 +- pkg/bloomcompactor/batch.go | 12 +- pkg/bloomcompactor/batch_test.go | 4 +- pkg/bloomcompactor/bloomcompactor.go | 14 +- pkg/bloomcompactor/bloomcompactor_test.go | 16 +- pkg/bloomcompactor/config.go | 2 +- pkg/bloomcompactor/controller.go | 10 +- pkg/bloomcompactor/controller_test.go | 6 +- pkg/bloomcompactor/metrics.go | 2 +- pkg/bloomcompactor/retention.go | 8 +- pkg/bloomcompactor/retention_test.go | 20 +- pkg/bloomcompactor/spec.go | 14 +- pkg/bloomcompactor/spec_test.go | 6 +- pkg/bloomcompactor/tracker.go | 4 +- pkg/bloomcompactor/tracker_test.go | 4 +- pkg/bloomcompactor/tsdb.go | 16 +- pkg/bloomcompactor/tsdb_test.go | 4 +- pkg/bloomcompactor/versioned_range.go | 4 +- pkg/bloomcompactor/versioned_range_test.go | 6 +- pkg/bloomgateway/bloomgateway.go | 12 +- pkg/bloomgateway/bloomgateway_test.go | 22 +- pkg/bloomgateway/cache.go | 6 +- pkg/bloomgateway/cache_test.go | 14 +- pkg/bloomgateway/client.go | 22 +- pkg/bloomgateway/client_test.go | 12 +- pkg/bloomgateway/config.go | 2 +- pkg/bloomgateway/multiplexing.go | 10 +- pkg/bloomgateway/multiplexing_test.go | 8 +- pkg/bloomgateway/processor.go | 6 +- pkg/bloomgateway/processor_test.go | 10 +- pkg/bloomgateway/querier.go | 8 +- pkg/bloomgateway/querier_test.go | 6 +- pkg/bloomgateway/util.go | 8 +- pkg/bloomgateway/util_test.go | 8 +- pkg/bloomgateway/worker.go | 6 +- pkg/bloomutils/ring.go | 2 +- pkg/bloomutils/ring_test.go | 2 +- pkg/canary/comparator/comparator.go | 2 +- pkg/canary/reader/reader.go | 8 +- pkg/canary/writer/push.go | 4 +- pkg/canary/writer/push_test.go | 4 +- pkg/chunkenc/dumb_chunk.go | 8 +- pkg/chunkenc/facade.go | 4 +- pkg/chunkenc/hash_test.go | 2 +- pkg/chunkenc/interface.go | 8 +- pkg/chunkenc/memchunk.go | 14 +- pkg/chunkenc/memchunk_test.go | 18 +- pkg/chunkenc/pool.go | 2 +- pkg/chunkenc/symbols.go | 2 +- pkg/chunkenc/unordered.go | 8 +- pkg/chunkenc/unordered_test.go | 8 +- pkg/chunkenc/util_test.go | 4 +- pkg/compactor/client/grpc.go | 4 +- pkg/compactor/client/http.go | 4 +- pkg/compactor/compactor.go | 24 +- pkg/compactor/compactor_test.go | 12 +- pkg/compactor/deletion/delete_request.go | 8 +- pkg/compactor/deletion/delete_request_test.go | 6 +- .../deletion/delete_requests_client.go | 2 +- .../deletion/delete_requests_manager.go | 8 +- .../deletion/delete_requests_manager_test.go | 8 +- .../deletion/delete_requests_store.go | 4 +- .../deletion/delete_requests_store_test.go | 4 +- .../deletion/delete_requests_table.go | 12 +- .../deletion/delete_requests_table_test.go | 10 +- .../deletion/grpc_request_handler.go | 4 +- .../deletion/grpc_request_handler_test.go | 4 +- pkg/compactor/deletion/metrics.go | 2 +- pkg/compactor/deletion/request_handler.go | 6 +- .../deletion/request_handler_test.go | 2 +- .../deletion/tenant_delete_requests_client.go | 2 +- .../deletion/tenant_request_handler_test.go | 2 +- pkg/compactor/deletion/util.go | 4 +- .../generationnumber/gennumber_loader.go | 2 +- pkg/compactor/generationnumber/metrics.go | 2 +- pkg/compactor/index_set.go | 12 +- pkg/compactor/retention/expiration.go | 6 +- pkg/compactor/retention/expiration_test.go | 2 +- pkg/compactor/retention/marker.go | 6 +- pkg/compactor/retention/retention.go | 14 +- pkg/compactor/retention/retention_test.go | 16 +- pkg/compactor/retention/util_test.go | 16 +- pkg/compactor/table.go | 10 +- pkg/compactor/table_test.go | 8 +- pkg/compactor/testutil.go | 12 +- pkg/configs/client/client.go | 4 +- pkg/configs/client/configs_test.go | 2 +- pkg/configs/userconfig/config.go | 2 +- pkg/configs/userconfig/config_test.go | 2 +- pkg/distributor/distributor.go | 32 +- pkg/distributor/distributor_ring.go | 2 +- pkg/distributor/distributor_test.go | 28 +- pkg/distributor/http.go | 8 +- pkg/distributor/http_test.go | 6 +- .../ingestion_rate_strategy_test.go | 2 +- pkg/distributor/instance_count_test.go | 2 +- pkg/distributor/limits.go | 6 +- pkg/distributor/ratestore.go | 6 +- pkg/distributor/ratestore_metrics.go | 2 +- pkg/distributor/ratestore_test.go | 8 +- pkg/distributor/shardstreams/config.go | 2 +- pkg/distributor/streamsharder_test.go | 2 +- pkg/distributor/tee_test.go | 2 +- pkg/distributor/validator.go | 6 +- pkg/distributor/validator_test.go | 8 +- pkg/distributor/writefailures/cfg.go | 2 +- pkg/distributor/writefailures/manager.go | 2 +- pkg/distributor/writefailures/manager_test.go | 4 +- pkg/distributor/writefailures/metrics.go | 2 +- pkg/ingester/checkpoint.go | 10 +- pkg/ingester/checkpoint.pb.go | 78 ++-- pkg/ingester/checkpoint.proto | 2 +- pkg/ingester/checkpoint_test.go | 18 +- pkg/ingester/chunk_test.go | 8 +- pkg/ingester/client/client.go | 6 +- pkg/ingester/client/compat.go | 2 +- pkg/ingester/encoding_test.go | 6 +- pkg/ingester/flush.go | 8 +- pkg/ingester/flush_test.go | 32 +- pkg/ingester/index/bitprefix.go | 6 +- pkg/ingester/index/bitprefix_test.go | 6 +- pkg/ingester/index/index.go | 8 +- pkg/ingester/index/index_test.go | 8 +- pkg/ingester/index/multi.go | 6 +- pkg/ingester/index/multi_test.go | 8 +- pkg/ingester/ingester.go | 46 +-- pkg/ingester/ingester_test.go | 36 +- pkg/ingester/instance.go | 46 +-- pkg/ingester/instance_test.go | 28 +- pkg/ingester/limiter.go | 4 +- pkg/ingester/limiter_test.go | 2 +- pkg/ingester/mapper.go | 2 +- pkg/ingester/metrics.go | 6 +- pkg/ingester/recovery.go | 4 +- pkg/ingester/recovery_test.go | 16 +- pkg/ingester/replay_controller.go | 2 +- pkg/ingester/replay_controller_test.go | 2 +- pkg/ingester/stream.go | 20 +- pkg/ingester/stream_rate_calculator.go | 2 +- pkg/ingester/stream_test.go | 14 +- pkg/ingester/streams_map_test.go | 2 +- pkg/ingester/tailer.go | 10 +- pkg/ingester/tailer_test.go | 4 +- pkg/ingester/wal.go | 6 +- pkg/ingester/wal/encoding.go | 4 +- pkg/ingester/wal/encoding_test.go | 2 +- pkg/iter/cache.go | 2 +- pkg/iter/cache_test.go | 2 +- pkg/iter/categorized_labels_iterator.go | 4 +- pkg/iter/categorized_labels_iterator_test.go | 2 +- pkg/iter/entry_iterator.go | 8 +- pkg/iter/entry_iterator_test.go | 4 +- pkg/iter/iterator.go | 2 +- pkg/iter/sample_iterator.go | 6 +- pkg/iter/sample_iterator_test.go | 4 +- pkg/logcli/client/client.go | 12 +- pkg/logcli/client/file.go | 18 +- pkg/logcli/client/file_test.go | 4 +- pkg/logcli/index/stats.go | 4 +- pkg/logcli/index/volume.go | 10 +- pkg/logcli/labelquery/labels.go | 4 +- pkg/logcli/output/default.go | 2 +- pkg/logcli/output/default_test.go | 2 +- pkg/logcli/output/jsonl.go | 2 +- pkg/logcli/output/jsonl_test.go | 2 +- pkg/logcli/output/output.go | 2 +- pkg/logcli/output/raw.go | 2 +- pkg/logcli/output/raw_test.go | 2 +- pkg/logcli/print/print.go | 10 +- pkg/logcli/print/print_test.go | 4 +- pkg/logcli/query/query.go | 32 +- pkg/logcli/query/query_test.go | 22 +- pkg/logcli/query/tail.go | 10 +- pkg/logcli/seriesquery/series.go | 4 +- pkg/logcli/util/util.go | 2 +- pkg/loghttp/labels.go | 2 +- pkg/loghttp/labels_test.go | 2 +- pkg/loghttp/legacy/tail.go | 2 +- pkg/loghttp/params.go | 6 +- pkg/loghttp/params_test.go | 2 +- pkg/loghttp/push/otlp.go | 7 +- pkg/loghttp/push/otlp_test.go | 4 +- pkg/loghttp/push/push.go | 21 +- pkg/loghttp/push/push_test.go | 2 +- pkg/loghttp/query.go | 12 +- pkg/loghttp/query_test.go | 4 +- pkg/loghttp/series.go | 4 +- pkg/loghttp/series_test.go | 2 +- pkg/loghttp/tail.go | 6 +- pkg/loghttp/tail_test.go | 6 +- pkg/logproto/alias.go | 2 +- pkg/logproto/bloomgateway.pb.go | 79 ++-- pkg/logproto/bloomgateway.proto | 6 +- pkg/logproto/compat.go | 6 +- pkg/logproto/compat_test.go | 4 +- pkg/logproto/extensions.go | 2 +- pkg/logproto/indexgateway.pb.go | 95 ++--- pkg/logproto/indexgateway.proto | 2 +- pkg/logproto/logproto.pb.go | 379 +++++++++--------- pkg/logproto/logproto.proto | 16 +- pkg/logproto/metrics.pb.go | 79 ++-- pkg/logproto/metrics.proto | 2 +- pkg/logproto/sketch.pb.go | 79 ++-- pkg/logproto/sketch.proto | 2 +- pkg/logql/accumulator.go | 12 +- pkg/logql/accumulator_test.go | 6 +- pkg/logql/blocker.go | 6 +- pkg/logql/blocker_test.go | 8 +- pkg/logql/downstream.go | 16 +- pkg/logql/downstream_test.go | 6 +- pkg/logql/engine.go | 26 +- pkg/logql/engine_test.go | 20 +- pkg/logql/evaluator.go | 12 +- pkg/logql/evaluator_test.go | 2 +- pkg/logql/explain_test.go | 2 +- pkg/logql/limits.go | 2 +- pkg/logql/log/drop_labels.go | 2 +- pkg/logql/log/drop_labels_test.go | 2 +- pkg/logql/log/filter.go | 4 +- pkg/logql/log/fmt.go | 2 +- pkg/logql/log/fmt_test.go | 2 +- pkg/logql/log/keep_labels.go | 2 +- pkg/logql/log/keep_labels_test.go | 2 +- pkg/logql/log/label_filter.go | 2 +- pkg/logql/log/label_filter_test.go | 2 +- pkg/logql/log/labels.go | 2 +- pkg/logql/log/labels_test.go | 2 +- pkg/logql/log/parser.go | 8 +- pkg/logql/log/parser_hints.go | 2 +- pkg/logql/log/parser_hints_test.go | 4 +- pkg/logql/log/parser_test.go | 2 +- pkg/logql/log/pipeline_test.go | 2 +- pkg/logql/mapper_metrics.go | 2 +- pkg/logql/matchers.go | 2 +- pkg/logql/metrics.go | 20 +- pkg/logql/metrics_test.go | 14 +- pkg/logql/optimize.go | 2 +- pkg/logql/optimize_test.go | 2 +- pkg/logql/quantile_over_time_sketch.go | 8 +- pkg/logql/quantile_over_time_sketch_test.go | 6 +- pkg/logql/range_vector.go | 6 +- pkg/logql/range_vector_test.go | 10 +- pkg/logql/rangemapper.go | 4 +- pkg/logql/rangemapper_test.go | 2 +- pkg/logql/shardmapper.go | 6 +- pkg/logql/shardmapper_test.go | 8 +- pkg/logql/shards.go | 14 +- pkg/logql/shards_test.go | 4 +- pkg/logql/sketch/quantile.go | 2 +- pkg/logql/sketch/series.go | 2 +- pkg/logql/sketch/topk.go | 2 +- pkg/logql/syntax/ast.go | 6 +- pkg/logql/syntax/ast_test.go | 2 +- pkg/logql/syntax/clone.go | 2 +- pkg/logql/syntax/clone_test.go | 2 +- pkg/logql/syntax/expr.y | 2 +- pkg/logql/syntax/expr.y.go | 2 +- pkg/logql/syntax/extractor.go | 2 +- pkg/logql/syntax/lex.go | 2 +- pkg/logql/syntax/linefilter.go | 4 +- pkg/logql/syntax/linefilter_test.go | 2 +- pkg/logql/syntax/parser.go | 4 +- pkg/logql/syntax/parser_test.go | 4 +- pkg/logql/syntax/serialize.go | 2 +- pkg/logql/test_utils.go | 10 +- pkg/logqlanalyzer/analyzer.go | 4 +- pkg/logqlanalyzer/http.go | 2 +- pkg/logqlmodel/logqlmodel.go | 7 +- pkg/logqlmodel/metadata/context.go | 2 +- pkg/logqlmodel/metadata/context_test.go | 2 +- pkg/logqlmodel/stats/context_test.go | 2 +- pkg/logqlmodel/stats/stats.pb.go | 172 ++++---- pkg/logqlmodel/stats/stats.proto | 2 +- pkg/loki/common/common.go | 22 +- pkg/loki/config_compat.go | 6 +- pkg/loki/config_test.go | 4 +- pkg/loki/config_wrapper.go | 16 +- pkg/loki/config_wrapper_test.go | 30 +- pkg/loki/delete_store_listener.go | 2 +- pkg/loki/format_query_handler.go | 4 +- pkg/loki/loki.go | 74 ++-- pkg/loki/loki_test.go | 2 +- pkg/loki/modules.go | 92 ++--- pkg/loki/modules_test.go | 14 +- pkg/loki/runtime_config.go | 6 +- pkg/loki/runtime_config_test.go | 4 +- pkg/loki/version_handler.go | 2 +- pkg/loki/version_handler_test.go | 2 +- pkg/lokifrontend/config.go | 6 +- pkg/lokifrontend/frontend/config.go | 10 +- .../frontend/downstream_roundtripper.go | 2 +- .../frontend/transport/handler.go | 10 +- .../frontend/transport/roundtripper.go | 4 +- pkg/lokifrontend/frontend/v1/frontend.go | 12 +- pkg/lokifrontend/frontend/v1/frontend_test.go | 18 +- .../frontend/v1/frontendv1pb/frontend.pb.go | 2 +- pkg/lokifrontend/frontend/v1/queue_test.go | 4 +- pkg/lokifrontend/frontend/v2/frontend.go | 16 +- .../frontend/v2/frontend_scheduler_worker.go | 6 +- pkg/lokifrontend/frontend/v2/frontend_test.go | 16 +- .../frontend/v2/frontendv2pb/frontend.pb.go | 4 +- pkg/push/go.mod | 2 +- pkg/push/push.pb.go | 69 ++-- pkg/push/push.proto | 2 +- pkg/querier/astmapper/parallel.go | 2 +- pkg/querier/astmapper/shard_summer.go | 2 +- pkg/querier/handler.go | 8 +- pkg/querier/http.go | 32 +- pkg/querier/http_test.go | 6 +- pkg/querier/ingester_querier.go | 20 +- pkg/querier/ingester_querier_test.go | 6 +- pkg/querier/limits/definitions.go | 2 +- pkg/querier/multi_tenant_querier.go | 16 +- pkg/querier/multi_tenant_querier_test.go | 10 +- pkg/querier/plan/plan.go | 4 +- pkg/querier/plan/plan_test.go | 2 +- pkg/querier/querier.go | 32 +- pkg/querier/querier_mock_test.go | 30 +- pkg/querier/querier_test.go | 18 +- pkg/querier/queryrange/benchmarkutils_test.go | 2 +- pkg/querier/queryrange/codec.go | 32 +- pkg/querier/queryrange/codec_test.go | 20 +- pkg/querier/queryrange/downstreamer.go | 12 +- pkg/querier/queryrange/downstreamer_test.go | 14 +- pkg/querier/queryrange/extensions.go | 4 +- pkg/querier/queryrange/extensions_test.go | 2 +- pkg/querier/queryrange/index_stats_cache.go | 12 +- .../queryrange/index_stats_cache_test.go | 14 +- .../queryrange/ingester_query_window.go | 4 +- .../queryrange/instant_metric_cache.go | 6 +- pkg/querier/queryrange/instrument.go | 2 +- pkg/querier/queryrange/labels_cache.go | 8 +- pkg/querier/queryrange/labels_cache_test.go | 12 +- pkg/querier/queryrange/limits.go | 24 +- pkg/querier/queryrange/limits/definitions.go | 4 +- pkg/querier/queryrange/limits_test.go | 20 +- pkg/querier/queryrange/log_result_cache.go | 14 +- .../queryrange/log_result_cache_test.go | 10 +- pkg/querier/queryrange/marshal.go | 22 +- pkg/querier/queryrange/marshal_test.go | 12 +- pkg/querier/queryrange/metrics.go | 6 +- pkg/querier/queryrange/ordering.go | 2 +- pkg/querier/queryrange/prometheus.go | 8 +- pkg/querier/queryrange/prometheus_test.go | 6 +- pkg/querier/queryrange/queryrange.pb.go | 379 +++++++++--------- pkg/querier/queryrange/queryrange.proto | 42 +- .../queryrange/queryrangebase/alias.go | 4 +- .../definitions/definitions.pb.go | 18 +- .../definitions/definitions.proto | 2 +- .../queryrangebase/definitions/interface.go | 2 +- .../queryrangebase/marshaling_test.go | 2 +- .../queryrange/queryrangebase/middleware.go | 2 +- .../queryrange/queryrangebase/promql_test.go | 2 +- .../queryrange/queryrangebase/query_range.go | 6 +- .../queryrangebase/query_range_test.go | 2 +- .../queryrangebase/queryrange.pb.go | 110 ++--- .../queryrangebase/queryrange.proto | 4 +- .../queryrangebase/results_cache.go | 8 +- .../queryrangebase/results_cache_test.go | 10 +- .../queryrange/queryrangebase/retry.go | 4 +- .../queryrange/queryrangebase/retry_test.go | 2 +- .../queryrange/queryrangebase/series_test.go | 2 +- .../queryrange/queryrangebase/test_utils.go | 4 +- .../queryrangebase/test_utils_test.go | 2 +- pkg/querier/queryrange/queryrangebase/util.go | 2 +- .../queryrange/queryrangebase/value.go | 4 +- .../queryrange/queryrangebase/value_test.go | 2 +- pkg/querier/queryrange/querysharding.go | 24 +- pkg/querier/queryrange/querysharding_test.go | 22 +- pkg/querier/queryrange/roundtrip.go | 22 +- pkg/querier/queryrange/roundtrip_test.go | 34 +- pkg/querier/queryrange/serialize.go | 8 +- pkg/querier/queryrange/serialize_test.go | 8 +- pkg/querier/queryrange/series_cache.go | 8 +- pkg/querier/queryrange/series_cache_test.go | 12 +- pkg/querier/queryrange/shard_resolver.go | 20 +- pkg/querier/queryrange/split_by_interval.go | 14 +- .../queryrange/split_by_interval_test.go | 18 +- pkg/querier/queryrange/split_by_range.go | 14 +- pkg/querier/queryrange/split_by_range_test.go | 10 +- pkg/querier/queryrange/splitters.go | 8 +- pkg/querier/queryrange/stats.go | 16 +- pkg/querier/queryrange/stats_test.go | 6 +- pkg/querier/queryrange/views.go | 4 +- pkg/querier/queryrange/views_test.go | 8 +- pkg/querier/queryrange/volume.go | 16 +- pkg/querier/queryrange/volume_cache.go | 12 +- pkg/querier/queryrange/volume_cache_test.go | 18 +- pkg/querier/queryrange/volume_test.go | 10 +- pkg/querier/stats/stats.pb.go | 41 +- pkg/querier/stats/stats.proto | 2 +- pkg/querier/tail.go | 8 +- pkg/querier/tail_mock_test.go | 2 +- pkg/querier/tail_test.go | 6 +- pkg/querier/testutils.go | 2 +- pkg/querier/worker/frontend_processor.go | 6 +- pkg/querier/worker/frontend_processor_test.go | 4 +- pkg/querier/worker/scheduler_processor.go | 12 +- .../worker/scheduler_processor_test.go | 6 +- pkg/querier/worker/util.go | 4 +- pkg/querier/worker/util_test.go | 8 +- pkg/querier/worker/worker.go | 6 +- pkg/querier/worker/worker_test.go | 2 +- pkg/querier/worker_service.go | 6 +- pkg/queue/dequeue_qos_test.go | 2 +- pkg/queue/queue_test.go | 2 +- pkg/queue/tenant_queues.go | 6 +- pkg/queue/tenant_queues_test.go | 2 +- pkg/ruler/base/api.go | 8 +- pkg/ruler/base/api_test.go | 2 +- pkg/ruler/base/client_pool_test.go | 2 +- pkg/ruler/base/compat.go | 6 +- pkg/ruler/base/compat_test.go | 2 +- pkg/ruler/base/error_translate_queryable.go | 4 +- pkg/ruler/base/lifecycle_test.go | 4 +- pkg/ruler/base/manager.go | 2 +- pkg/ruler/base/manager_metrics.go | 2 +- pkg/ruler/base/manager_metrics_test.go | 6 +- pkg/ruler/base/manager_test.go | 6 +- pkg/ruler/base/notifier.go | 4 +- pkg/ruler/base/notifier_test.go | 4 +- pkg/ruler/base/pusher_mock_test.go | 2 +- pkg/ruler/base/ruler.go | 12 +- pkg/ruler/base/ruler.pb.go | 130 +++--- pkg/ruler/base/ruler.proto | 4 +- pkg/ruler/base/ruler_ring.go | 2 +- pkg/ruler/base/ruler_test.go | 22 +- pkg/ruler/base/storage.go | 34 +- pkg/ruler/base/store_mock_test.go | 6 +- pkg/ruler/compat.go | 10 +- pkg/ruler/compat_test.go | 10 +- pkg/ruler/config.go | 6 +- pkg/ruler/config/alertmanager.go | 2 +- pkg/ruler/evaluator.go | 2 +- pkg/ruler/evaluator_jitter.go | 4 +- pkg/ruler/evaluator_jitter_test.go | 2 +- pkg/ruler/evaluator_local.go | 6 +- pkg/ruler/evaluator_remote.go | 14 +- pkg/ruler/evaluator_remote_test.go | 6 +- pkg/ruler/grouploader.go | 2 +- pkg/ruler/memstore.go | 6 +- pkg/ruler/memstore_test.go | 2 +- pkg/ruler/registry.go | 6 +- pkg/ruler/registry_test.go | 8 +- pkg/ruler/ruler.go | 4 +- pkg/ruler/rulespb/compat.go | 2 +- pkg/ruler/rulespb/rules.pb.go | 86 ++-- pkg/ruler/rulespb/rules.proto | 6 +- .../rulestore/bucketclient/bucket_client.go | 6 +- .../bucketclient/bucket_client_test.go | 10 +- pkg/ruler/rulestore/config.go | 8 +- pkg/ruler/rulestore/configdb/store.go | 6 +- pkg/ruler/rulestore/configdb/store_test.go | 4 +- pkg/ruler/rulestore/local/local.go | 2 +- pkg/ruler/rulestore/local/local_test.go | 2 +- .../rulestore/objectclient/rule_store.go | 6 +- pkg/ruler/rulestore/store.go | 2 +- pkg/ruler/storage/cleaner/cleaner.go | 4 +- pkg/ruler/storage/cleaner/cleaner_test.go | 2 +- pkg/ruler/storage/instance/instance.go | 6 +- pkg/ruler/storage/instance/instance_test.go | 2 +- pkg/ruler/storage/instance/manager.go | 2 +- pkg/scheduler/scheduler.go | 18 +- pkg/scheduler/scheduler_test.go | 4 +- pkg/scheduler/schedulerpb/scheduler.pb.go | 2 +- pkg/storage/async_store.go | 20 +- pkg/storage/async_store_test.go | 10 +- pkg/storage/batch.go | 24 +- pkg/storage/batch_test.go | 14 +- pkg/storage/bloom/v1/archive.go | 2 +- pkg/storage/bloom/v1/archive_test.go | 2 +- pkg/storage/bloom/v1/block_writer.go | 2 +- pkg/storage/bloom/v1/bloom.go | 6 +- pkg/storage/bloom/v1/bloom_tester.go | 8 +- pkg/storage/bloom/v1/bloom_tester_test.go | 4 +- pkg/storage/bloom/v1/bloom_tokenizer.go | 6 +- pkg/storage/bloom/v1/bloom_tokenizer_test.go | 10 +- pkg/storage/bloom/v1/bounds.go | 4 +- pkg/storage/bloom/v1/bounds_test.go | 2 +- pkg/storage/bloom/v1/builder.go | 6 +- pkg/storage/bloom/v1/builder_test.go | 4 +- pkg/storage/bloom/v1/fuse_test.go | 2 +- pkg/storage/bloom/v1/index.go | 6 +- pkg/storage/bloom/v1/index_test.go | 2 +- pkg/storage/bloom/v1/metrics.go | 2 +- pkg/storage/bloom/v1/test_util.go | 4 +- pkg/storage/bucket/azure/config.go | 2 +- pkg/storage/bucket/azure/config_test.go | 2 +- pkg/storage/bucket/client.go | 12 +- pkg/storage/bucket/client_test.go | 2 +- pkg/storage/bucket/s3/config.go | 6 +- pkg/storage/bucket/s3/config_test.go | 4 +- pkg/storage/bucket/sse_bucket_client.go | 2 +- pkg/storage/bucket/sse_bucket_client_test.go | 2 +- pkg/storage/chunk/cache/background.go | 6 +- pkg/storage/chunk/cache/background_test.go | 6 +- pkg/storage/chunk/cache/cache.go | 2 +- pkg/storage/chunk/cache/cache_gen.go | 2 +- pkg/storage/chunk/cache/cache_test.go | 12 +- pkg/storage/chunk/cache/embeddedcache.go | 4 +- pkg/storage/chunk/cache/instrumented.go | 2 +- pkg/storage/chunk/cache/memcached.go | 6 +- pkg/storage/chunk/cache/memcached_client.go | 2 +- .../chunk/cache/memcached_client_selector.go | 2 +- .../cache/memcached_client_selector_test.go | 2 +- pkg/storage/chunk/cache/memcached_test.go | 2 +- pkg/storage/chunk/cache/mock.go | 2 +- pkg/storage/chunk/cache/redis_cache.go | 4 +- pkg/storage/chunk/cache/resultscache/cache.go | 10 +- .../chunk/cache/resultscache/cache_test.go | 6 +- .../chunk/cache/resultscache/config.go | 2 +- .../chunk/cache/resultscache/test_types.pb.go | 59 +-- .../chunk/cache/resultscache/test_types.proto | 2 +- .../chunk/cache/resultscache/types.pb.go | 52 +-- .../chunk/cache/resultscache/types.proto | 2 +- pkg/storage/chunk/cache/snappy.go | 2 +- pkg/storage/chunk/cache/stats.go | 2 +- pkg/storage/chunk/cache/tiered.go | 2 +- pkg/storage/chunk/cache/tiered_test.go | 2 +- pkg/storage/chunk/chunk.go | 2 +- pkg/storage/chunk/chunk_test.go | 4 +- .../chunk/client/alibaba/oss_object_client.go | 4 +- .../chunk/client/aws/dynamodb_index_reader.go | 4 +- .../chunk/client/aws/dynamodb_metrics.go | 2 +- .../client/aws/dynamodb_storage_client.go | 18 +- .../aws/dynamodb_storage_client_test.go | 4 +- .../chunk/client/aws/dynamodb_table_client.go | 6 +- pkg/storage/chunk/client/aws/fixtures.go | 8 +- .../chunk/client/aws/metrics_autoscaling.go | 4 +- .../client/aws/metrics_autoscaling_test.go | 4 +- pkg/storage/chunk/client/aws/mock.go | 2 +- .../chunk/client/aws/s3_storage_client.go | 14 +- .../client/aws/s3_storage_client_test.go | 2 +- pkg/storage/chunk/client/aws/sse_config.go | 2 +- .../chunk/client/aws/sse_config_test.go | 2 +- .../chunk/client/azure/blob_storage_client.go | 14 +- .../client/azure/blob_storage_client_test.go | 2 +- .../client/baidubce/bos_storage_client.go | 4 +- .../chunk/client/cassandra/fixtures.go | 8 +- .../chunk/client/cassandra/instrumentation.go | 2 +- .../chunk/client/cassandra/storage_client.go | 10 +- .../chunk/client/cassandra/table_client.go | 4 +- pkg/storage/chunk/client/client.go | 4 +- pkg/storage/chunk/client/congestion/config.go | 2 +- .../chunk/client/congestion/controller.go | 4 +- .../client/congestion/controller_test.go | 4 +- pkg/storage/chunk/client/congestion/hedge.go | 2 +- .../chunk/client/congestion/interfaces.go | 4 +- .../chunk/client/congestion/metrics.go | 2 +- .../chunk/client/gcp/bigtable_index_client.go | 10 +- .../client/gcp/bigtable_object_client.go | 8 +- pkg/storage/chunk/client/gcp/fixtures.go | 10 +- .../chunk/client/gcp/gcs_object_client.go | 6 +- .../client/gcp/gcs_object_client_test.go | 2 +- .../chunk/client/gcp/instrumentation.go | 2 +- pkg/storage/chunk/client/gcp/table_client.go | 4 +- .../chunk/client/grpc/grpc_client_test.go | 10 +- .../client/grpc/grpc_server_mock_test.go | 2 +- pkg/storage/chunk/client/grpc/index_client.go | 4 +- .../chunk/client/grpc/storage_client.go | 4 +- pkg/storage/chunk/client/grpc/table_client.go | 2 +- .../client/ibmcloud/cos_object_client.go | 8 +- .../client/ibmcloud/cos_object_client_test.go | 4 +- ...trusted_profile_authentication_provider.go | 2 +- .../chunk/client/local/boltdb_index_client.go | 6 +- .../client/local/boltdb_index_client_test.go | 4 +- .../chunk/client/local/boltdb_table_client.go | 4 +- pkg/storage/chunk/client/local/fixtures.go | 8 +- .../chunk/client/local/fs_object_client.go | 8 +- .../client/local/fs_object_client_test.go | 2 +- pkg/storage/chunk/client/metrics.go | 4 +- pkg/storage/chunk/client/object_client.go | 6 +- .../chunk/client/object_client_test.go | 6 +- .../client/openstack/swift_object_client.go | 8 +- .../openstack/swift_object_client_test.go | 4 +- .../testutils/inmemory_storage_client.go | 8 +- .../chunk/client/testutils/testutils.go | 14 +- .../chunk/client/util/parallel_chunk_fetch.go | 4 +- .../client/util/parallel_chunk_fetch_test.go | 2 +- pkg/storage/chunk/client/util/util.go | 4 +- pkg/storage/chunk/dummy.go | 2 +- pkg/storage/chunk/fetcher/fetcher.go | 16 +- pkg/storage/chunk/fetcher/fetcher_test.go | 14 +- pkg/storage/chunk/interface.go | 2 +- pkg/storage/chunk/predicate.go | 2 +- pkg/storage/chunk/tests/by_key_test.go | 4 +- .../chunk/tests/caching_fixtures_test.go | 16 +- pkg/storage/chunk/tests/chunk_client_test.go | 10 +- pkg/storage/chunk/tests/index_client_test.go | 6 +- pkg/storage/chunk/tests/utils_test.go | 14 +- pkg/storage/common/aws/storage_class.go | 2 +- pkg/storage/config/bench_test.go | 4 +- pkg/storage/config/schema_config.go | 8 +- pkg/storage/config/schema_config_test.go | 4 +- pkg/storage/config/store.go | 2 +- pkg/storage/factory.go | 52 +-- pkg/storage/factory_test.go | 18 +- pkg/storage/hack/main.go | 20 +- pkg/storage/lazy_chunk.go | 14 +- pkg/storage/lazy_chunk_test.go | 14 +- pkg/storage/store.go | 48 +-- pkg/storage/store_test.go | 40 +- pkg/storage/stores/composite_store.go | 18 +- pkg/storage/stores/composite_store_entry.go | 22 +- pkg/storage/stores/composite_store_test.go | 10 +- pkg/storage/stores/index/index.go | 12 +- pkg/storage/stores/index/metrics.go | 2 +- .../stores/index/seriesvolume/volume.go | 2 +- .../stores/index/seriesvolume/volume_test.go | 2 +- pkg/storage/stores/index/stats/stats.go | 4 +- .../series/index/caching_index_client.go | 6 +- .../series/index/caching_index_client_test.go | 6 +- pkg/storage/stores/series/index/schema.go | 4 +- .../stores/series/index/schema_config.go | 4 +- .../stores/series/index/schema_test.go | 4 +- .../stores/series/index/table_client.go | 2 +- .../stores/series/index/table_manager.go | 4 +- .../stores/series/index/table_manager_test.go | 2 +- .../series/series_index_gateway_store.go | 10 +- .../series/series_index_gateway_store_test.go | 2 +- .../stores/series/series_index_store.go | 32 +- .../stores/series/series_store_test.go | 24 +- .../stores/series/series_store_utils.go | 6 +- .../stores/series/series_store_utils_test.go | 2 +- pkg/storage/stores/series_store_write.go | 12 +- pkg/storage/stores/series_store_write_test.go | 10 +- .../shipper/bloomshipper/blockscache.go | 6 +- .../shipper/bloomshipper/blockscache_test.go | 6 +- .../stores/shipper/bloomshipper/cache.go | 6 +- .../stores/shipper/bloomshipper/cache_test.go | 4 +- .../stores/shipper/bloomshipper/client.go | 12 +- .../shipper/bloomshipper/client_test.go | 6 +- .../shipper/bloomshipper/compress_utils.go | 2 +- .../bloomshipper/compress_utils_test.go | 2 +- .../shipper/bloomshipper/config/config.go | 2 +- .../stores/shipper/bloomshipper/fetcher.go | 6 +- .../shipper/bloomshipper/fetcher_test.go | 8 +- .../stores/shipper/bloomshipper/interval.go | 4 +- .../shipper/bloomshipper/interval_test.go | 2 +- .../stores/shipper/bloomshipper/resolver.go | 2 +- .../shipper/bloomshipper/resolver_test.go | 2 +- .../stores/shipper/bloomshipper/shipper.go | 2 +- .../shipper/bloomshipper/shipper_test.go | 2 +- .../stores/shipper/bloomshipper/store.go | 14 +- .../stores/shipper/bloomshipper/store_test.go | 14 +- .../boltdb/compactor/compacted_index.go | 16 +- .../boltdb/compactor/compacted_index_test.go | 14 +- .../indexshipper/boltdb/compactor/index.go | 2 +- .../boltdb/compactor/index_compactor.go | 4 +- .../indexshipper/boltdb/compactor/iterator.go | 6 +- .../boltdb/compactor/iterator_test.go | 10 +- .../indexshipper/boltdb/compactor/series.go | 2 +- .../boltdb/compactor/table_compactor.go | 10 +- .../boltdb/compactor/table_compactor_test.go | 16 +- .../indexshipper/boltdb/compactor/util.go | 8 +- .../boltdb/compactor/util_test.go | 24 +- .../shipper/indexshipper/boltdb/index.go | 10 +- .../indexshipper/boltdb/index_client.go | 12 +- .../shipper/indexshipper/boltdb/querier.go | 6 +- .../shipper/indexshipper/boltdb/table.go | 10 +- .../indexshipper/boltdb/table_manager.go | 12 +- .../indexshipper/boltdb/table_manager_test.go | 12 +- .../shipper/indexshipper/boltdb/table_test.go | 10 +- .../indexshipper/downloads/index_set.go | 10 +- .../indexshipper/downloads/index_set_test.go | 8 +- .../shipper/indexshipper/downloads/table.go | 10 +- .../indexshipper/downloads/table_manager.go | 12 +- .../downloads/table_manager_test.go | 10 +- .../indexshipper/downloads/table_test.go | 6 +- .../gatewayclient/gateway_client.go | 18 +- .../gatewayclient/gateway_client_test.go | 12 +- .../gatewayclient/index_gateway_grpc_pool.go | 2 +- .../indexshipper/indexgateway/config.go | 2 +- .../indexshipper/indexgateway/gateway.go | 28 +- .../indexshipper/indexgateway/gateway_test.go | 20 +- .../shipper/indexshipper/indexgateway/grpc.go | 2 +- .../indexshipper/indexgateway/metrics.go | 2 +- .../indexgateway/shufflesharding.go | 2 +- .../stores/shipper/indexshipper/shipper.go | 16 +- .../indexshipper/storage/cached_client.go | 6 +- .../storage/cached_client_test.go | 6 +- .../shipper/indexshipper/storage/client.go | 2 +- .../indexshipper/storage/client_test.go | 4 +- .../shipper/indexshipper/storage/util_test.go | 6 +- .../shipper/indexshipper/table_client.go | 8 +- .../shipper/indexshipper/table_client_test.go | 4 +- .../shipper/indexshipper/testutil/testutil.go | 6 +- .../shipper/indexshipper/tsdb/builder.go | 4 +- .../shipper/indexshipper/tsdb/builder_test.go | 2 +- .../shipper/indexshipper/tsdb/compactor.go | 14 +- .../indexshipper/tsdb/compactor_test.go | 22 +- .../stores/shipper/indexshipper/tsdb/head.go | 2 +- .../shipper/indexshipper/tsdb/head_manager.go | 8 +- .../indexshipper/tsdb/head_manager_test.go | 16 +- .../shipper/indexshipper/tsdb/head_read.go | 2 +- .../shipper/indexshipper/tsdb/head_wal.go | 4 +- .../indexshipper/tsdb/head_wal_test.go | 2 +- .../stores/shipper/indexshipper/tsdb/index.go | 6 +- .../shipper/indexshipper/tsdb/index/chunk.go | 4 +- .../indexshipper/tsdb/index/chunk_test.go | 2 +- .../shipper/indexshipper/tsdb/index/index.go | 2 +- .../indexshipper/tsdb/index/index_test.go | 2 +- .../shipper/indexshipper/tsdb/index_client.go | 24 +- .../indexshipper/tsdb/index_client_test.go | 8 +- .../tsdb/index_shipper_querier.go | 8 +- .../shipper/indexshipper/tsdb/lazy_index.go | 4 +- .../shipper/indexshipper/tsdb/manager.go | 8 +- .../indexshipper/tsdb/multi_file_index.go | 4 +- .../tsdb/multi_file_index_test.go | 2 +- .../shipper/indexshipper/tsdb/multitenant.go | 4 +- .../stores/shipper/indexshipper/tsdb/pool.go | 2 +- .../shipper/indexshipper/tsdb/querier.go | 2 +- .../shipper/indexshipper/tsdb/querier_test.go | 4 +- .../indexshipper/tsdb/sharding/for_series.go | 2 +- .../indexshipper/tsdb/sharding/power.go | 4 +- .../indexshipper/tsdb/sharding/power_test.go | 2 +- .../indexshipper/tsdb/sharding/sharding.go | 6 +- .../tsdb/sharding/sharding_test.go | 4 +- .../indexshipper/tsdb/single_file_index.go | 12 +- .../tsdb/single_file_index_test.go | 10 +- .../stores/shipper/indexshipper/tsdb/store.go | 16 +- .../indexshipper/tsdb/testutil/objstore.go | 2 +- .../shipper/indexshipper/tsdb/util_test.go | 2 +- .../shipper/indexshipper/uploads/index_set.go | 8 +- .../indexshipper/uploads/index_set_test.go | 8 +- .../shipper/indexshipper/uploads/table.go | 6 +- .../indexshipper/uploads/table_manager.go | 4 +- .../uploads/table_manager_test.go | 6 +- .../indexshipper/uploads/table_test.go | 2 +- .../shipper/indexshipper/util/queries.go | 4 +- .../shipper/indexshipper/util/queries_test.go | 2 +- .../stores/shipper/indexshipper/util/util.go | 2 +- pkg/storage/util_test.go | 36 +- pkg/util/config.go | 2 +- pkg/util/deletion/deletion.go | 8 +- pkg/util/dns_watcher.go | 2 +- pkg/util/errors.go | 2 +- pkg/util/extract/extract.go | 2 +- pkg/util/flagext/labelset.go | 2 +- pkg/util/http_test.go | 6 +- pkg/util/httpgrpc/carrier.go | 2 +- pkg/util/limiter/combined_limits.go | 22 +- pkg/util/limiter/query_limiter.go | 4 +- pkg/util/log/experimental.go | 2 +- pkg/util/log/log.go | 2 +- pkg/util/loser/tree_test.go | 2 +- pkg/util/marshal/labels.go | 2 +- pkg/util/marshal/labels_test.go | 2 +- pkg/util/marshal/legacy/marshal.go | 6 +- pkg/util/marshal/legacy/marshal_test.go | 6 +- pkg/util/marshal/marshal.go | 16 +- pkg/util/marshal/marshal_test.go | 12 +- pkg/util/marshal/query.go | 12 +- pkg/util/marshal/tail.go | 4 +- pkg/util/metrics_helper.go | 2 +- pkg/util/querylimits/limiter.go | 4 +- pkg/util/querylimits/limiter_test.go | 2 +- pkg/util/querylimits/middleware.go | 2 +- pkg/util/querylimits/propagation.go | 2 +- pkg/util/ring/ring.go | 2 +- pkg/util/ring/ring_config.go | 4 +- pkg/util/server/error.go | 6 +- pkg/util/server/error_test.go | 6 +- pkg/util/server/grpc_headers.go | 2 +- pkg/util/server/grpc_headers_test.go | 2 +- pkg/util/server/grpc_query_tags.go | 2 +- pkg/util/server/grpc_query_tags_test.go | 2 +- pkg/util/server/recovery.go | 4 +- pkg/util/server/recovery_test.go | 2 +- pkg/util/spanlogger/spanlogger.go | 2 +- pkg/util/time.go | 2 +- pkg/util/unmarshal/legacy/unmarshal.go | 2 +- pkg/util/unmarshal/legacy/unmarshal_test.go | 2 +- pkg/util/unmarshal/unmarshal.go | 4 +- pkg/util/unmarshal/unmarshal_test.go | 8 +- .../validation/notifications_limit_flag.go | 2 +- pkg/validation/exporter.go | 2 +- pkg/validation/limits.go | 24 +- pkg/validation/limits_test.go | 8 +- pkg/validation/validate.go | 4 +- tools/bloom/inspector/main.go | 2 +- tools/deprecated-config-checker/main.go | 2 +- tools/doc-generator/main.go | 4 +- tools/doc-generator/parse/parser.go | 8 +- tools/doc-generator/parse/root_blocks.go | 60 +-- tools/doc-generator/writer.go | 2 +- tools/querytee/response_comparator.go | 4 +- tools/tsdb/bloom-tester/concurrent.go | 2 +- tools/tsdb/bloom-tester/lib.go | 26 +- tools/tsdb/bloom-tester/main.go | 2 +- tools/tsdb/bloom-tester/metrics.go | 4 +- tools/tsdb/bloom-tester/readlib.go | 28 +- tools/tsdb/bloom-tester/tokenizer.go | 16 +- tools/tsdb/helpers/setup.go | 16 +- tools/tsdb/helpers/util.go | 4 +- tools/tsdb/index-analyzer/analytics.go | 8 +- tools/tsdb/index-analyzer/main.go | 10 +- tools/tsdb/migrate-versions/main.go | 22 +- tools/tsdb/migrate-versions/main_test.go | 14 +- tools/tsdb/tsdb-map/main.go | 12 +- tools/tsdb/tsdb-map/main_test.go | 4 +- .../grafana/loki/{ => v3}/pkg/push/LICENSE | 0 .../grafana/loki/{ => v3}/pkg/push/push.pb.go | 69 ++-- .../grafana/loki/{ => v3}/pkg/push/push.proto | 2 +- .../loki/{ => v3}/pkg/push/timestamp.go | 0 .../grafana/loki/{ => v3}/pkg/push/types.go | 0 vendor/modules.txt | 6 +- 981 files changed, 4457 insertions(+), 4444 deletions(-) rename vendor/github.com/grafana/loki/{ => v3}/pkg/push/LICENSE (100%) rename vendor/github.com/grafana/loki/{ => v3}/pkg/push/push.pb.go (92%) rename vendor/github.com/grafana/loki/{ => v3}/pkg/push/push.proto (95%) rename vendor/github.com/grafana/loki/{ => v3}/pkg/push/timestamp.go (100%) rename vendor/github.com/grafana/loki/{ => v3}/pkg/push/types.go (100%) diff --git a/clients/cmd/docker-driver/config.go b/clients/cmd/docker-driver/config.go index 95dd07a6d8e81..d53117ca4872b 100644 --- a/clients/cmd/docker-driver/config.go +++ b/clients/cmd/docker-driver/config.go @@ -19,11 +19,11 @@ import ( "github.com/prometheus/prometheus/model/relabel" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/cmd/docker-driver/config_test.go b/clients/cmd/docker-driver/config_test.go index a3920778b622b..f83c560e39391 100644 --- a/clients/cmd/docker-driver/config_test.go +++ b/clients/cmd/docker-driver/config_test.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var jobRename = ` diff --git a/clients/cmd/docker-driver/loki.go b/clients/cmd/docker-driver/loki.go index cc15e71bda2ee..77bc4e5e439b7 100644 --- a/clients/cmd/docker-driver/loki.go +++ b/clients/cmd/docker-driver/loki.go @@ -10,11 +10,11 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var jobName = "docker" diff --git a/clients/cmd/docker-driver/loki_test.go b/clients/cmd/docker-driver/loki_test.go index 0edc7b0c76931..4e61f37cd05c0 100644 --- a/clients/cmd/docker-driver/loki_test.go +++ b/clients/cmd/docker-driver/loki_test.go @@ -7,7 +7,7 @@ import ( "github.com/docker/docker/daemon/logger" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_loki_LogWhenClosed(t *testing.T) { diff --git a/clients/cmd/docker-driver/main.go b/clients/cmd/docker-driver/main.go index 5aba041f6b5fe..06d90b81bda56 100644 --- a/clients/cmd/docker-driver/main.go +++ b/clients/cmd/docker-driver/main.go @@ -12,8 +12,8 @@ import ( dslog "github.com/grafana/dskit/log" "github.com/prometheus/common/version" - _ "github.com/grafana/loki/pkg/util/build" - util_log "github.com/grafana/loki/pkg/util/log" + _ "github.com/grafana/loki/v3/pkg/util/build" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const socketAddress = "/run/docker/plugins/loki.sock" diff --git a/clients/cmd/fluent-bit/buffer.go b/clients/cmd/fluent-bit/buffer.go index a168ccfc142c5..28e9529abff48 100644 --- a/clients/cmd/fluent-bit/buffer.go +++ b/clients/cmd/fluent-bit/buffer.go @@ -5,7 +5,7 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) type bufferConfig struct { diff --git a/clients/cmd/fluent-bit/client.go b/clients/cmd/fluent-bit/client.go index 11c2fa1d0386b..828d013d85ae1 100644 --- a/clients/cmd/fluent-bit/client.go +++ b/clients/cmd/fluent-bit/client.go @@ -3,7 +3,7 @@ package main import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) // NewClient creates a new client based on the fluentbit configuration. diff --git a/clients/cmd/fluent-bit/config.go b/clients/cmd/fluent-bit/config.go index 469e18d495d74..84838d03f20f8 100644 --- a/clients/cmd/fluent-bit/config.go +++ b/clients/cmd/fluent-bit/config.go @@ -12,10 +12,10 @@ import ( "github.com/grafana/dskit/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/logql" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/logentry/logql" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var defaultClientCfg = client.Config{} diff --git a/clients/cmd/fluent-bit/config_test.go b/clients/cmd/fluent-bit/config_test.go index 0d5ec6d592b0e..f52ea18bc96db 100644 --- a/clients/cmd/fluent-bit/config_test.go +++ b/clients/cmd/fluent-bit/config_test.go @@ -12,9 +12,9 @@ import ( "github.com/grafana/dskit/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) type fakeConfig map[string]string diff --git a/clients/cmd/fluent-bit/dque.go b/clients/cmd/fluent-bit/dque.go index f7091de893f59..6e5746033254b 100644 --- a/clients/cmd/fluent-bit/dque.go +++ b/clients/cmd/fluent-bit/dque.go @@ -12,10 +12,10 @@ import ( "github.com/joncrlsn/dque" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type dqueConfig struct { diff --git a/clients/cmd/fluent-bit/loki.go b/clients/cmd/fluent-bit/loki.go index ea3de0261f407..6749af1ebf881 100644 --- a/clients/cmd/fluent-bit/loki.go +++ b/clients/cmd/fluent-bit/loki.go @@ -17,10 +17,10 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/clients/cmd/fluent-bit/loki_test.go b/clients/cmd/fluent-bit/loki_test.go index 1bfd21d22ce02..477f6abe1757c 100644 --- a/clients/cmd/fluent-bit/loki_test.go +++ b/clients/cmd/fluent-bit/loki_test.go @@ -11,10 +11,10 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var now = time.Now() diff --git a/clients/cmd/fluent-bit/out_grafana_loki.go b/clients/cmd/fluent-bit/out_grafana_loki.go index d396fddfc8da2..70a58e65b9350 100644 --- a/clients/cmd/fluent-bit/out_grafana_loki.go +++ b/clients/cmd/fluent-bit/out_grafana_loki.go @@ -13,12 +13,12 @@ import ( dslog "github.com/grafana/dskit/log" "github.com/prometheus/common/version" - _ "github.com/grafana/loki/pkg/util/build" + _ "github.com/grafana/loki/v3/pkg/util/build" ) import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) var ( diff --git a/clients/cmd/promtail/main.go b/clients/cmd/promtail/main.go index 4492938246b5b..7e00e7ff35db3 100644 --- a/clients/cmd/promtail/main.go +++ b/clients/cmd/promtail/main.go @@ -20,16 +20,16 @@ import ( collectors_version "github.com/prometheus/client_golang/prometheus/collectors/version" "github.com/prometheus/common/version" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail" - "github.com/grafana/loki/clients/pkg/promtail/client" - promtail_config "github.com/grafana/loki/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + promtail_config "github.com/grafana/loki/v3/clients/pkg/promtail/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/cfg" - _ "github.com/grafana/loki/pkg/util/build" - util_log "github.com/grafana/loki/pkg/util/log" + _ "github.com/grafana/loki/v3/pkg/util/build" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func init() { diff --git a/clients/pkg/logentry/logql/parser.go b/clients/pkg/logentry/logql/parser.go index d567f6fce4c8b..924ec1b7bdeab 100644 --- a/clients/pkg/logentry/logql/parser.go +++ b/clients/pkg/logentry/logql/parser.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func init() { diff --git a/clients/pkg/logentry/metric/metricvec.go b/clients/pkg/logentry/metric/metricvec.go index 07f73c20873d3..f004db760f8f6 100644 --- a/clients/pkg/logentry/metric/metricvec.go +++ b/clients/pkg/logentry/metric/metricvec.go @@ -5,7 +5,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" diff --git a/clients/pkg/logentry/stages/decolorize.go b/clients/pkg/logentry/stages/decolorize.go index bac7274b6bad3..a86e6cdeafb2e 100644 --- a/clients/pkg/logentry/stages/decolorize.go +++ b/clients/pkg/logentry/stages/decolorize.go @@ -1,7 +1,7 @@ package stages import ( - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type decolorizeStage struct{} diff --git a/clients/pkg/logentry/stages/decolorize_test.go b/clients/pkg/logentry/stages/decolorize_test.go index 5e7cead0a5275..029cd74c1c1e3 100644 --- a/clients/pkg/logentry/stages/decolorize_test.go +++ b/clients/pkg/logentry/stages/decolorize_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testDecolorizePipeline = ` diff --git a/clients/pkg/logentry/stages/drop.go b/clients/pkg/logentry/stages/drop.go index 19a2e6c378075..462d6c34f6350 100644 --- a/clients/pkg/logentry/stages/drop.go +++ b/clients/pkg/logentry/stages/drop.go @@ -13,7 +13,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( diff --git a/clients/pkg/logentry/stages/drop_test.go b/clients/pkg/logentry/stages/drop_test.go index a7e5ffcb5665f..220bb68314df3 100644 --- a/clients/pkg/logentry/stages/drop_test.go +++ b/clients/pkg/logentry/stages/drop_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error diff --git a/clients/pkg/logentry/stages/eventlogmessage_test.go b/clients/pkg/logentry/stages/eventlogmessage_test.go index 4729d5a08f0e6..ed4bedccfc70c 100644 --- a/clients/pkg/logentry/stages/eventlogmessage_test.go +++ b/clients/pkg/logentry/stages/eventlogmessage_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testEvtLogMsgYamlDefaults = ` diff --git a/clients/pkg/logentry/stages/extensions.go b/clients/pkg/logentry/stages/extensions.go index d2e788dcb9d1a..2e49d6bd224b3 100644 --- a/clients/pkg/logentry/stages/extensions.go +++ b/clients/pkg/logentry/stages/extensions.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( diff --git a/clients/pkg/logentry/stages/extensions_test.go b/clients/pkg/logentry/stages/extensions_test.go index 9e2a3f62a56f7..0d03acd3fe3dd 100644 --- a/clients/pkg/logentry/stages/extensions_test.go +++ b/clients/pkg/logentry/stages/extensions_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/clients/pkg/logentry/stages/json_test.go b/clients/pkg/logentry/stages/json_test.go index 31a0c0219e5af..1764387253fb1 100644 --- a/clients/pkg/logentry/stages/json_test.go +++ b/clients/pkg/logentry/stages/json_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testJSONYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/labelallow_test.go b/clients/pkg/logentry/stages/labelallow_test.go index a5cbcd8e3ce6b..ebcf451487ef8 100644 --- a/clients/pkg/logentry/stages/labelallow_test.go +++ b/clients/pkg/logentry/stages/labelallow_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_addLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/labeldrop_test.go b/clients/pkg/logentry/stages/labeldrop_test.go index 215a7888f8c31..70912c7ed1c84 100644 --- a/clients/pkg/logentry/stages/labeldrop_test.go +++ b/clients/pkg/logentry/stages/labeldrop_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_dropLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/labels_test.go b/clients/pkg/logentry/stages/labels_test.go index 175359606a2f4..27747d8032edd 100644 --- a/clients/pkg/logentry/stages/labels_test.go +++ b/clients/pkg/logentry/stages/labels_test.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testLabelsYaml = ` diff --git a/clients/pkg/logentry/stages/limit.go b/clients/pkg/logentry/stages/limit.go index d5489221e6ac0..49d32cbf04029 100644 --- a/clients/pkg/logentry/stages/limit.go +++ b/clients/pkg/logentry/stages/limit.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/go-kit/log" "github.com/mitchellh/mapstructure" diff --git a/clients/pkg/logentry/stages/limit_test.go b/clients/pkg/logentry/stages/limit_test.go index 840db40d37cb2..0d3519e8c9b4b 100644 --- a/clients/pkg/logentry/stages/limit_test.go +++ b/clients/pkg/logentry/stages/limit_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error diff --git a/clients/pkg/logentry/stages/logfmt_test.go b/clients/pkg/logentry/stages/logfmt_test.go index 8258eeece501b..ed60d8770d014 100644 --- a/clients/pkg/logentry/stages/logfmt_test.go +++ b/clients/pkg/logentry/stages/logfmt_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testLogfmtYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/match.go b/clients/pkg/logentry/stages/match.go index 3b4addbb0de12..4007e45da4ecb 100644 --- a/clients/pkg/logentry/stages/match.go +++ b/clients/pkg/logentry/stages/match.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/logentry/logql" + "github.com/grafana/loki/v3/clients/pkg/logentry/logql" ) const ( diff --git a/clients/pkg/logentry/stages/match_test.go b/clients/pkg/logentry/stages/match_test.go index 558407320c57d..05d65f0bcaff5 100644 --- a/clients/pkg/logentry/stages/match_test.go +++ b/clients/pkg/logentry/stages/match_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testMatchYaml = ` diff --git a/clients/pkg/logentry/stages/metrics.go b/clients/pkg/logentry/stages/metrics.go index 14386e3b43a40..827f0cf313a47 100644 --- a/clients/pkg/logentry/stages/metrics.go +++ b/clients/pkg/logentry/stages/metrics.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/logentry/metric" + "github.com/grafana/loki/v3/clients/pkg/logentry/metric" ) const ( diff --git a/clients/pkg/logentry/stages/metrics_test.go b/clients/pkg/logentry/stages/metrics_test.go index 6a14e6c80c1ee..f46ea6839919f 100644 --- a/clients/pkg/logentry/stages/metrics_test.go +++ b/clients/pkg/logentry/stages/metrics_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/metric" + "github.com/grafana/loki/v3/clients/pkg/logentry/metric" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testMetricYaml = ` diff --git a/clients/pkg/logentry/stages/multiline.go b/clients/pkg/logentry/stages/multiline.go index 199ff438a9390..2f94a2e1822f3 100644 --- a/clients/pkg/logentry/stages/multiline.go +++ b/clients/pkg/logentry/stages/multiline.go @@ -13,9 +13,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/logentry/stages/multiline_test.go b/clients/pkg/logentry/stages/multiline_test.go index 33b71c8f5f023..b175f89845dea 100644 --- a/clients/pkg/logentry/stages/multiline_test.go +++ b/clients/pkg/logentry/stages/multiline_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_multilineStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/output_test.go b/clients/pkg/logentry/stages/output_test.go index a7b02714faf74..dc6aac54f0b93 100644 --- a/clients/pkg/logentry/stages/output_test.go +++ b/clients/pkg/logentry/stages/output_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testOutputYaml = ` diff --git a/clients/pkg/logentry/stages/pack.go b/clients/pkg/logentry/stages/pack.go index 737fa8d36b796..881650d8c6aa1 100644 --- a/clients/pkg/logentry/stages/pack.go +++ b/clients/pkg/logentry/stages/pack.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var ( diff --git a/clients/pkg/logentry/stages/pack_test.go b/clients/pkg/logentry/stages/pack_test.go index b767f90a76063..44935051a9523 100644 --- a/clients/pkg/logentry/stages/pack_test.go +++ b/clients/pkg/logentry/stages/pack_test.go @@ -12,11 +12,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Not all these are tested but are here to make sure the different types marshal without error diff --git a/clients/pkg/logentry/stages/pipeline.go b/clients/pkg/logentry/stages/pipeline.go index c20a7784c511c..1c4d2ba8e5ab4 100644 --- a/clients/pkg/logentry/stages/pipeline.go +++ b/clients/pkg/logentry/stages/pipeline.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // PipelineStages contains configuration for each stage within a pipeline diff --git a/clients/pkg/logentry/stages/pipeline_test.go b/clients/pkg/logentry/stages/pipeline_test.go index 51fe66e30c36b..2649de6a83441 100644 --- a/clients/pkg/logentry/stages/pipeline_test.go +++ b/clients/pkg/logentry/stages/pipeline_test.go @@ -14,11 +14,11 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/clients/pkg/logentry/stages/regex_test.go b/clients/pkg/logentry/stages/regex_test.go index dc3402e6e7a45..f7fa5390a1959 100644 --- a/clients/pkg/logentry/stages/regex_test.go +++ b/clients/pkg/logentry/stages/regex_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testRegexYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/replace_test.go b/clients/pkg/logentry/stages/replace_test.go index f8feef3c898df..87bb3eecb898a 100644 --- a/clients/pkg/logentry/stages/replace_test.go +++ b/clients/pkg/logentry/stages/replace_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testReplaceYamlSingleStageWithoutSource = ` diff --git a/clients/pkg/logentry/stages/sampling_test.go b/clients/pkg/logentry/stages/sampling_test.go index 171277e961d66..9b56eec5c0c5a 100644 --- a/clients/pkg/logentry/stages/sampling_test.go +++ b/clients/pkg/logentry/stages/sampling_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testSampingYaml = ` diff --git a/clients/pkg/logentry/stages/stage.go b/clients/pkg/logentry/stages/stage.go index 1c19face4044d..9de1d4e0a5904 100644 --- a/clients/pkg/logentry/stages/stage.go +++ b/clients/pkg/logentry/stages/stage.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) const ( diff --git a/clients/pkg/logentry/stages/static_labels_test.go b/clients/pkg/logentry/stages/static_labels_test.go index 9643d3da7aa51..bad2ec68f4a97 100644 --- a/clients/pkg/logentry/stages/static_labels_test.go +++ b/clients/pkg/logentry/stages/static_labels_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_staticLabelStage_Process(t *testing.T) { diff --git a/clients/pkg/logentry/stages/structuredmetadata.go b/clients/pkg/logentry/stages/structuredmetadata.go index cdab88a956c7f..cdf70c01d4fa7 100644 --- a/clients/pkg/logentry/stages/structuredmetadata.go +++ b/clients/pkg/logentry/stages/structuredmetadata.go @@ -5,7 +5,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func newStructuredMetadataStage(params StageCreationParams) (Stage, error) { diff --git a/clients/pkg/logentry/stages/structuredmetadata_test.go b/clients/pkg/logentry/stages/structuredmetadata_test.go index d9a70300b8b0b..6bcb7a1ee9d25 100644 --- a/clients/pkg/logentry/stages/structuredmetadata_test.go +++ b/clients/pkg/logentry/stages/structuredmetadata_test.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/push" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var pipelineStagesStructuredMetadataUsingMatch = ` diff --git a/clients/pkg/logentry/stages/template_test.go b/clients/pkg/logentry/stages/template_test.go index 96e7f1b06a2df..7977c87ffee66 100644 --- a/clients/pkg/logentry/stages/template_test.go +++ b/clients/pkg/logentry/stages/template_test.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTemplateYaml = ` diff --git a/clients/pkg/logentry/stages/tenant.go b/clients/pkg/logentry/stages/tenant.go index 13717ccb29bf6..ed730fbc0c121 100644 --- a/clients/pkg/logentry/stages/tenant.go +++ b/clients/pkg/logentry/stages/tenant.go @@ -10,7 +10,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" ) const ( diff --git a/clients/pkg/logentry/stages/tenant_test.go b/clients/pkg/logentry/stages/tenant_test.go index eb02b0bda9db8..8eee783d47ddf 100644 --- a/clients/pkg/logentry/stages/tenant_test.go +++ b/clients/pkg/logentry/stages/tenant_test.go @@ -12,10 +12,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - lokiutil "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + lokiutil "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTenantYamlExtractedData = ` diff --git a/clients/pkg/logentry/stages/timestamp.go b/clients/pkg/logentry/stages/timestamp.go index 592ae13565643..fb1fb8a27c3b5 100644 --- a/clients/pkg/logentry/stages/timestamp.go +++ b/clients/pkg/logentry/stages/timestamp.go @@ -12,7 +12,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/logentry/stages/timestamp_test.go b/clients/pkg/logentry/stages/timestamp_test.go index 73e4fb196b5b5..f3f23dcfcebab 100644 --- a/clients/pkg/logentry/stages/timestamp_test.go +++ b/clients/pkg/logentry/stages/timestamp_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - lokiutil "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + lokiutil "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var testTimestampYaml = ` diff --git a/clients/pkg/logentry/stages/util_test.go b/clients/pkg/logentry/stages/util_test.go index b58490cc56fc6..5ce0ae9a7f93a 100644 --- a/clients/pkg/logentry/stages/util_test.go +++ b/clients/pkg/logentry/stages/util_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func newEntry(ex map[string]interface{}, lbs model.LabelSet, line string, ts time.Time) Entry { diff --git a/clients/pkg/promtail/api/types.go b/clients/pkg/promtail/api/types.go index 2bb2482da4628..36f9cc484160c 100644 --- a/clients/pkg/promtail/api/types.go +++ b/clients/pkg/promtail/api/types.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // Entry is a log entry with labels. diff --git a/clients/pkg/promtail/client/batch.go b/clients/pkg/promtail/client/batch.go index 8681b67bd13f1..a6e7b45dd984b 100644 --- a/clients/pkg/promtail/client/batch.go +++ b/clients/pkg/promtail/client/batch.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/promtail/client/batch_test.go b/clients/pkg/promtail/client/batch_test.go index 56dc9477e8b6a..ec92fbc1c4225 100644 --- a/clients/pkg/promtail/client/batch_test.go +++ b/clients/pkg/promtail/client/batch_test.go @@ -9,9 +9,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestBatch_MaxStreams(t *testing.T) { diff --git a/clients/pkg/promtail/client/client.go b/clients/pkg/promtail/client/client.go index 4dfd11363a824..ea93a604d32fb 100644 --- a/clients/pkg/promtail/client/client.go +++ b/clients/pkg/promtail/client/client.go @@ -20,10 +20,10 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - lokiutil "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" + lokiutil "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/clients/pkg/promtail/client/client_test.go b/clients/pkg/promtail/client/client_test.go index 01cbb87cc1116..b775bdb8eb8a8 100644 --- a/clients/pkg/promtail/client/client_test.go +++ b/clients/pkg/promtail/client/client_test.go @@ -19,12 +19,12 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/push" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/push" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var logEntries = []api.Entry{ diff --git a/clients/pkg/promtail/client/client_writeto.go b/clients/pkg/promtail/client/client_writeto.go index 6fa549dfa2721..bd5ecfc424689 100644 --- a/clients/pkg/promtail/client/client_writeto.go +++ b/clients/pkg/promtail/client/client_writeto.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util" ) // clientWriteTo implements a wal.WriteTo that re-builds entries with the stored series, and the received entries. After, diff --git a/clients/pkg/promtail/client/client_writeto_test.go b/clients/pkg/promtail/client/client_writeto_test.go index 2254fbb073658..3693b677f2ccf 100644 --- a/clients/pkg/promtail/client/client_writeto_test.go +++ b/clients/pkg/promtail/client/client_writeto_test.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestClientWriter_LogEntriesAreReconstructedAndForwardedCorrectly(t *testing.T) { diff --git a/clients/pkg/promtail/client/config.go b/clients/pkg/promtail/client/config.go index ab36353ba4903..eab0eb8863e65 100644 --- a/clients/pkg/promtail/client/config.go +++ b/clients/pkg/promtail/client/config.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/flagext" "github.com/prometheus/common/config" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) // NOTE the helm chart for promtail and fluent-bit also have defaults for these values, please update to match if you make changes here. diff --git a/clients/pkg/promtail/client/fake/client.go b/clients/pkg/promtail/client/fake/client.go index 33e886c30980c..03257135a585d 100644 --- a/clients/pkg/promtail/client/fake/client.go +++ b/clients/pkg/promtail/client/fake/client.go @@ -3,7 +3,7 @@ package fake import ( "sync" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // Client is a fake client used for testing. diff --git a/clients/pkg/promtail/client/logger.go b/clients/pkg/promtail/client/logger.go index 890d51177c26c..ba20055a0d94b 100644 --- a/clients/pkg/promtail/client/logger.go +++ b/clients/pkg/promtail/client/logger.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" ) var ( diff --git a/clients/pkg/promtail/client/logger_test.go b/clients/pkg/promtail/client/logger_test.go index 43c710d69088c..c19f39ac75784 100644 --- a/clients/pkg/promtail/client/logger_test.go +++ b/clients/pkg/promtail/client/logger_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestNewLogger(t *testing.T) { diff --git a/clients/pkg/promtail/client/manager.go b/clients/pkg/promtail/client/manager.go index 84dc48de350d5..ef08d2c04f528 100644 --- a/clients/pkg/promtail/client/manager.go +++ b/clients/pkg/promtail/client/manager.go @@ -9,9 +9,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" ) // WriterEventsNotifier implements a notifier that's received by the Manager, to which wal.Watcher can subscribe for diff --git a/clients/pkg/promtail/client/manager_test.go b/clients/pkg/promtail/client/manager_test.go index 14165ea503f2b..f11821c82120a 100644 --- a/clients/pkg/promtail/client/manager_test.go +++ b/clients/pkg/promtail/client/manager_test.go @@ -16,14 +16,14 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" - - "github.com/grafana/loki/pkg/logproto" - lokiflag "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" + + "github.com/grafana/loki/v3/pkg/logproto" + lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) var testLimitsConfig = limit.Config{ diff --git a/clients/pkg/promtail/config/config.go b/clients/pkg/promtail/config/config.go index 4a720a2dd28f3..615b8e9abaad5 100644 --- a/clients/pkg/promtail/config/config.go +++ b/clients/pkg/promtail/config/config.go @@ -8,16 +8,16 @@ import ( "github.com/go-kit/log/level" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/limit" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/server" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/limit" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/util/flagext" ) // Options contains cross-cutting promtail configurations diff --git a/clients/pkg/promtail/config/config_test.go b/clients/pkg/promtail/config/config_test.go index 04cd09f56dfc1..32bab70501e39 100644 --- a/clients/pkg/promtail/config/config_test.go +++ b/clients/pkg/promtail/config/config_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const testFile = ` diff --git a/clients/pkg/promtail/limit/config.go b/clients/pkg/promtail/limit/config.go index 02589afd86b89..aed6f23c0b041 100644 --- a/clients/pkg/promtail/limit/config.go +++ b/clients/pkg/promtail/limit/config.go @@ -3,7 +3,7 @@ package limit import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Config struct { diff --git a/clients/pkg/promtail/positions/positions_test.go b/clients/pkg/promtail/positions/positions_test.go index 1dce97b08ec7e..6a7044a5a868d 100644 --- a/clients/pkg/promtail/positions/positions_test.go +++ b/clients/pkg/promtail/positions/positions_test.go @@ -9,7 +9,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func tempFilename(t *testing.T) string { diff --git a/clients/pkg/promtail/promtail.go b/clients/pkg/promtail/promtail.go index 1ef3368a697e5..ffe774a405be8 100644 --- a/clients/pkg/promtail/promtail.go +++ b/clients/pkg/promtail/promtail.go @@ -14,17 +14,17 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/server" - "github.com/grafana/loki/clients/pkg/promtail/targets" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/clients/pkg/promtail/promtail_test.go b/clients/pkg/promtail/promtail_test.go index ca35fc403882a..695f3faeb0f5f 100644 --- a/clients/pkg/promtail/promtail_test.go +++ b/clients/pkg/promtail/promtail_test.go @@ -30,19 +30,19 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/server" - pserver "github.com/grafana/loki/clients/pkg/promtail/server" - file2 "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/targets/testutils" - - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/server" + pserver "github.com/grafana/loki/v3/clients/pkg/promtail/server" + file2 "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/testutils" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var clientMetrics = client.NewMetrics(prometheus.DefaultRegisterer) diff --git a/clients/pkg/promtail/promtail_wal_test.go b/clients/pkg/promtail/promtail_wal_test.go index dc00c398e91c8..f2013ca93e7b8 100644 --- a/clients/pkg/promtail/promtail_wal_test.go +++ b/clients/pkg/promtail/promtail_wal_test.go @@ -20,15 +20,15 @@ import ( "github.com/prometheus/prometheus/discovery/targetgroup" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/config" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/utils" - "github.com/grafana/loki/clients/pkg/promtail/wal" - - "github.com/grafana/loki/pkg/push" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/config" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/v3/clients/pkg/promtail/wal" + + "github.com/grafana/loki/v3/pkg/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/clients/pkg/promtail/scrapeconfig/scrapeconfig.go b/clients/pkg/promtail/scrapeconfig/scrapeconfig.go index 262b4b925d25d..a261a9a08a383 100644 --- a/clients/pkg/promtail/scrapeconfig/scrapeconfig.go +++ b/clients/pkg/promtail/scrapeconfig/scrapeconfig.go @@ -27,8 +27,8 @@ import ( "github.com/prometheus/prometheus/discovery/zookeeper" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/discovery/consulagent" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/discovery/consulagent" ) // Config describes a job to scrape. diff --git a/clients/pkg/promtail/server/server.go b/clients/pkg/promtail/server/server.go index 1b47247630e05..2e7752812c93c 100644 --- a/clients/pkg/promtail/server/server.go +++ b/clients/pkg/promtail/server/server.go @@ -23,9 +23,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/common/version" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" - "github.com/grafana/loki/clients/pkg/promtail/targets" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var ( diff --git a/clients/pkg/promtail/server/template.go b/clients/pkg/promtail/server/template.go index 1ed7fde54547d..53013bc485d46 100644 --- a/clients/pkg/promtail/server/template.go +++ b/clients/pkg/promtail/server/template.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/template" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" ) // templateOptions is a set of options to render a template. diff --git a/clients/pkg/promtail/server/ui/assets_generate.go b/clients/pkg/promtail/server/ui/assets_generate.go index 984a1f9c99e4d..0165b2031f60c 100644 --- a/clients/pkg/promtail/server/ui/assets_generate.go +++ b/clients/pkg/promtail/server/ui/assets_generate.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/alertmanager/pkg/modtimevfs" "github.com/shurcooL/vfsgen" - "github.com/grafana/loki/clients/pkg/promtail/server/ui" + "github.com/grafana/loki/v3/clients/pkg/promtail/server/ui" ) func main() { diff --git a/clients/pkg/promtail/targets/azureeventhubs/parser.go b/clients/pkg/promtail/targets/azureeventhubs/parser.go index cd2ddc7145cbb..0001dc525019e 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/parser.go +++ b/clients/pkg/promtail/targets/azureeventhubs/parser.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type azureMonitorResourceLogs struct { diff --git a/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go b/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go index e16d3b4914a10..bc2175768f460 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go +++ b/clients/pkg/promtail/targets/azureeventhubs/target_syncer.go @@ -10,10 +10,10 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" ) func NewSyncer( diff --git a/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go b/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go index 2113afffd4e2e..1874453cf364b 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go +++ b/clients/pkg/promtail/targets/azureeventhubs/target_syncer_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_validateConfig(t *testing.T) { diff --git a/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go b/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go index 2651233942ba2..48f3a3fe8b8e6 100644 --- a/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go +++ b/clients/pkg/promtail/targets/azureeventhubs/targetmanager.go @@ -6,10 +6,10 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of kafka targets. diff --git a/clients/pkg/promtail/targets/cloudflare/target.go b/clients/pkg/promtail/targets/cloudflare/target.go index 19d1f18758273..bede17bc45327 100644 --- a/clients/pkg/promtail/targets/cloudflare/target.go +++ b/clients/pkg/promtail/targets/cloudflare/target.go @@ -18,12 +18,12 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // The minimun window size is 1 minute. diff --git a/clients/pkg/promtail/targets/cloudflare/target_test.go b/clients/pkg/promtail/targets/cloudflare/target_test.go index d275a7e845f10..64cb6c4492e5e 100644 --- a/clients/pkg/promtail/targets/cloudflare/target_test.go +++ b/clients/pkg/promtail/targets/cloudflare/target_test.go @@ -15,9 +15,9 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_CloudflareTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/cloudflare/targetmanager.go b/clients/pkg/promtail/targets/cloudflare/targetmanager.go index c60fd6577a5f3..31a05fe0b75d4 100644 --- a/clients/pkg/promtail/targets/cloudflare/targetmanager.go +++ b/clients/pkg/promtail/targets/cloudflare/targetmanager.go @@ -3,11 +3,11 @@ package cloudflare import ( "github.com/go-kit/log" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of cloudflare targets. diff --git a/clients/pkg/promtail/targets/docker/target.go b/clients/pkg/promtail/targets/docker/target.go index 3b3e55cfb8445..3ec9d02a022c6 100644 --- a/clients/pkg/promtail/targets/docker/target.go +++ b/clients/pkg/promtail/targets/docker/target.go @@ -20,11 +20,11 @@ import ( "github.com/prometheus/prometheus/model/relabel" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type Target struct { diff --git a/clients/pkg/promtail/targets/docker/target_group.go b/clients/pkg/promtail/targets/docker/target_group.go index 0b0ea9eef6f56..b9fd8940824d0 100644 --- a/clients/pkg/promtail/targets/docker/target_group.go +++ b/clients/pkg/promtail/targets/docker/target_group.go @@ -15,11 +15,11 @@ import ( "github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) const DockerSource = "Docker" diff --git a/clients/pkg/promtail/targets/docker/target_test.go b/clients/pkg/promtail/targets/docker/target_test.go index 27a22871e4541..9bb5c9bfacd57 100644 --- a/clients/pkg/promtail/targets/docker/target_test.go +++ b/clients/pkg/promtail/targets/docker/target_test.go @@ -19,8 +19,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" ) func Test_DockerTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/docker/targetmanager.go b/clients/pkg/promtail/targets/docker/targetmanager.go index 2d6b55116584f..6321705b8f142 100644 --- a/clients/pkg/promtail/targets/docker/targetmanager.go +++ b/clients/pkg/promtail/targets/docker/targetmanager.go @@ -9,13 +9,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/discovery" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/targets/docker/targetmanager_test.go b/clients/pkg/promtail/targets/docker/targetmanager_test.go index 23bca7a923216..224e58d5a8930 100644 --- a/clients/pkg/promtail/targets/docker/targetmanager_test.go +++ b/clients/pkg/promtail/targets/docker/targetmanager_test.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/discovery/moby" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_TargetManager(t *testing.T) { diff --git a/clients/pkg/promtail/targets/file/decompresser.go b/clients/pkg/promtail/targets/file/decompresser.go index 3beb35415fb6b..34d2434d8b04e 100644 --- a/clients/pkg/promtail/targets/file/decompresser.go +++ b/clients/pkg/promtail/targets/file/decompresser.go @@ -23,11 +23,11 @@ import ( "golang.org/x/text/encoding/ianaindex" "golang.org/x/text/transform" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func supportedCompressedFormats() map[string]struct{} { diff --git a/clients/pkg/promtail/targets/file/decompresser_test.go b/clients/pkg/promtail/targets/file/decompresser_test.go index 443f14a4a8443..a575922ec6e5c 100644 --- a/clients/pkg/promtail/targets/file/decompresser_test.go +++ b/clients/pkg/promtail/targets/file/decompresser_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) type noopClient struct { diff --git a/clients/pkg/promtail/targets/file/filetarget.go b/clients/pkg/promtail/targets/file/filetarget.go index 2c52cbead922f..97dc10f148293 100644 --- a/clients/pkg/promtail/targets/file/filetarget.go +++ b/clients/pkg/promtail/targets/file/filetarget.go @@ -14,10 +14,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) const ( diff --git a/clients/pkg/promtail/targets/file/filetarget_test.go b/clients/pkg/promtail/targets/file/filetarget_test.go index f3cde7bf819a4..57bc31b0802ee 100644 --- a/clients/pkg/promtail/targets/file/filetarget_test.go +++ b/clients/pkg/promtail/targets/file/filetarget_test.go @@ -20,8 +20,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" ) func TestFileTargetSync(t *testing.T) { diff --git a/clients/pkg/promtail/targets/file/filetargetmanager.go b/clients/pkg/promtail/targets/file/filetargetmanager.go index c56b8fa5f8f4d..a02d0295d2bda 100644 --- a/clients/pkg/promtail/targets/file/filetargetmanager.go +++ b/clients/pkg/promtail/targets/file/filetargetmanager.go @@ -20,13 +20,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/targets/file/filetargetmanager_test.go b/clients/pkg/promtail/targets/file/filetargetmanager_test.go index 62e41635f1111..d27cd43106fe2 100644 --- a/clients/pkg/promtail/targets/file/filetargetmanager_test.go +++ b/clients/pkg/promtail/targets/file/filetargetmanager_test.go @@ -16,10 +16,10 @@ import ( "github.com/prometheus/prometheus/discovery" "github.com/prometheus/prometheus/discovery/targetgroup" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func newTestLogDirectories(t *testing.T) string { diff --git a/clients/pkg/promtail/targets/file/tailer.go b/clients/pkg/promtail/targets/file/tailer.go index 387978b6a7707..1e72e35306490 100644 --- a/clients/pkg/promtail/targets/file/tailer.go +++ b/clients/pkg/promtail/targets/file/tailer.go @@ -17,11 +17,11 @@ import ( "golang.org/x/text/encoding/ianaindex" "golang.org/x/text/transform" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) type tailer struct { diff --git a/clients/pkg/promtail/targets/gcplog/formatter.go b/clients/pkg/promtail/targets/gcplog/formatter.go index 9c175a7750f42..9435ec4775958 100644 --- a/clients/pkg/promtail/targets/gcplog/formatter.go +++ b/clients/pkg/promtail/targets/gcplog/formatter.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // GCPLogEntry that will be written to the pubsub topic. diff --git a/clients/pkg/promtail/targets/gcplog/formatter_test.go b/clients/pkg/promtail/targets/gcplog/formatter_test.go index f70fa1d79d122..d5703890197d2 100644 --- a/clients/pkg/promtail/targets/gcplog/formatter_test.go +++ b/clients/pkg/promtail/targets/gcplog/formatter_test.go @@ -10,9 +10,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFormat(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/pull_target.go b/clients/pkg/promtail/targets/gcplog/pull_target.go index 38db550bdf730..671b160c6f4ca 100644 --- a/clients/pkg/promtail/targets/gcplog/pull_target.go +++ b/clients/pkg/promtail/targets/gcplog/pull_target.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "google.golang.org/api/option" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var defaultBackoff = backoff.Config{ diff --git a/clients/pkg/promtail/targets/gcplog/pull_target_test.go b/clients/pkg/promtail/targets/gcplog/pull_target_test.go index e81ee20a6938c..81a16e6872bdd 100644 --- a/clients/pkg/promtail/targets/gcplog/pull_target_test.go +++ b/clients/pkg/promtail/targets/gcplog/pull_target_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) func TestPullTarget_RunStop(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/push_target.go b/clients/pkg/promtail/targets/gcplog/push_target.go index 95260fb914a8a..4d0a2d2b8407c 100644 --- a/clients/pkg/promtail/targets/gcplog/push_target.go +++ b/clients/pkg/promtail/targets/gcplog/push_target.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type pushTarget struct { diff --git a/clients/pkg/promtail/targets/gcplog/push_target_test.go b/clients/pkg/promtail/targets/gcplog/push_target_test.go index 50075820fa497..d6e9ce3f75e0a 100644 --- a/clients/pkg/promtail/targets/gcplog/push_target_test.go +++ b/clients/pkg/promtail/targets/gcplog/push_target_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" @@ -17,10 +17,10 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/gcplog" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gcplog" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/gcplog/push_translation.go b/clients/pkg/promtail/targets/gcplog/push_translation.go index f96f7171f2185..aae5ee4fb25d6 100644 --- a/clients/pkg/promtail/targets/gcplog/push_translation.go +++ b/clients/pkg/promtail/targets/gcplog/push_translation.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // PushMessage is the POST body format sent by GCP PubSub push subscriptions. diff --git a/clients/pkg/promtail/targets/gcplog/target.go b/clients/pkg/promtail/targets/gcplog/target.go index b122fb24112f7..1c60e8a3eb2ca 100644 --- a/clients/pkg/promtail/targets/gcplog/target.go +++ b/clients/pkg/promtail/targets/gcplog/target.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "google.golang.org/api/option" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // Target is a common interface implemented by both GCPLog targets. diff --git a/clients/pkg/promtail/targets/gcplog/target_test.go b/clients/pkg/promtail/targets/gcplog/target_test.go index 1a7cec47131f6..96bf7606985ed 100644 --- a/clients/pkg/promtail/targets/gcplog/target_test.go +++ b/clients/pkg/promtail/targets/gcplog/target_test.go @@ -17,9 +17,9 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func TestNewGCPLogTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gcplog/targetmanager.go b/clients/pkg/promtail/targets/gcplog/targetmanager.go index 71f3b5130a2fc..cf731c6c9f995 100644 --- a/clients/pkg/promtail/targets/gcplog/targetmanager.go +++ b/clients/pkg/promtail/targets/gcplog/targetmanager.go @@ -6,10 +6,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // nolint:revive diff --git a/clients/pkg/promtail/targets/gelf/gelftarget.go b/clients/pkg/promtail/targets/gelf/gelftarget.go index baaf8e3911fd9..42298b7588cca 100644 --- a/clients/pkg/promtail/targets/gelf/gelftarget.go +++ b/clients/pkg/promtail/targets/gelf/gelftarget.go @@ -14,11 +14,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // SeverityLevels maps severity levels to severity string levels. diff --git a/clients/pkg/promtail/targets/gelf/gelftarget_test.go b/clients/pkg/promtail/targets/gelf/gelftarget_test.go index 86a304ef9a7a0..8bdc5470e28b5 100644 --- a/clients/pkg/promtail/targets/gelf/gelftarget_test.go +++ b/clients/pkg/promtail/targets/gelf/gelftarget_test.go @@ -15,8 +15,8 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_Gelf(t *testing.T) { diff --git a/clients/pkg/promtail/targets/gelf/gelftargetmanager.go b/clients/pkg/promtail/targets/gelf/gelftargetmanager.go index f9824d3152f07..f6b7048287ce0 100644 --- a/clients/pkg/promtail/targets/gelf/gelftargetmanager.go +++ b/clients/pkg/promtail/targets/gelf/gelftargetmanager.go @@ -4,10 +4,10 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of Gelf Targets. diff --git a/clients/pkg/promtail/targets/heroku/target.go b/clients/pkg/promtail/targets/heroku/target.go index 9ab2fdaacfac4..83aceda6b7921 100644 --- a/clients/pkg/promtail/targets/heroku/target.go +++ b/clients/pkg/promtail/targets/heroku/target.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type Target struct { diff --git a/clients/pkg/promtail/targets/heroku/target_test.go b/clients/pkg/promtail/targets/heroku/target_test.go index c584bedba471d..42657d83ff1b4 100644 --- a/clients/pkg/promtail/targets/heroku/target_test.go +++ b/clients/pkg/promtail/targets/heroku/target_test.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - lokiClient "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + lokiClient "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/heroku/targetmanager.go b/clients/pkg/promtail/targets/heroku/targetmanager.go index 5d046ca3d45cf..3ad94bc5a79f0 100644 --- a/clients/pkg/promtail/targets/heroku/targetmanager.go +++ b/clients/pkg/promtail/targets/heroku/targetmanager.go @@ -5,10 +5,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type TargetManager struct { diff --git a/clients/pkg/promtail/targets/journal/journaltarget.go b/clients/pkg/promtail/targets/journal/journaltarget.go index 6630b827e7c14..fa04ac50c3db4 100644 --- a/clients/pkg/promtail/targets/journal/journaltarget.go +++ b/clients/pkg/promtail/targets/journal/journaltarget.go @@ -19,12 +19,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/clients/pkg/promtail/targets/journal/journaltarget_test.go b/clients/pkg/promtail/targets/journal/journaltarget_test.go index b0186d1504f47..768cb7f5c1510 100644 --- a/clients/pkg/promtail/targets/journal/journaltarget_test.go +++ b/clients/pkg/promtail/targets/journal/journaltarget_test.go @@ -20,10 +20,10 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/testutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/testutils" ) type mockJournalReader struct { diff --git a/clients/pkg/promtail/targets/journal/journaltargetmanager.go b/clients/pkg/promtail/targets/journal/journaltargetmanager.go index 9e55c37d9268c..f0d99f091db17 100644 --- a/clients/pkg/promtail/targets/journal/journaltargetmanager.go +++ b/clients/pkg/promtail/targets/journal/journaltargetmanager.go @@ -7,10 +7,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // JournalTargetManager manages a series of JournalTargets. diff --git a/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go b/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go index 6940d57ead5d7..52d216e58a090 100644 --- a/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go +++ b/clients/pkg/promtail/targets/journal/journaltargetmanager_linux.go @@ -7,11 +7,11 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // JournalTargetManager manages a series of JournalTargets. diff --git a/clients/pkg/promtail/targets/kafka/consumer.go b/clients/pkg/promtail/targets/kafka/consumer.go index 34cb61da00e7e..f4b8a4d260cf2 100644 --- a/clients/pkg/promtail/targets/kafka/consumer.go +++ b/clients/pkg/promtail/targets/kafka/consumer.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) var defaultBackOff = backoff.Config{ diff --git a/clients/pkg/promtail/targets/kafka/consumer_test.go b/clients/pkg/promtail/targets/kafka/consumer_test.go index 63ab60b1a64f5..7420bdf6c1f11 100644 --- a/clients/pkg/promtail/targets/kafka/consumer_test.go +++ b/clients/pkg/promtail/targets/kafka/consumer_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type DiscovererFn func(sarama.ConsumerGroupSession, sarama.ConsumerGroupClaim) (RunnableTarget, error) diff --git a/clients/pkg/promtail/targets/kafka/formatter.go b/clients/pkg/promtail/targets/kafka/formatter.go index b0f61e4332e3b..f800dbe20b9dd 100644 --- a/clients/pkg/promtail/targets/kafka/formatter.go +++ b/clients/pkg/promtail/targets/kafka/formatter.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) func format(lbs labels.Labels, cfg []*relabel.Config) model.LabelSet { diff --git a/clients/pkg/promtail/targets/kafka/parser.go b/clients/pkg/promtail/targets/kafka/parser.go index 872ea0e45bc05..9ad3b7f8271c0 100644 --- a/clients/pkg/promtail/targets/kafka/parser.go +++ b/clients/pkg/promtail/targets/kafka/parser.go @@ -5,9 +5,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // messageParser implements MessageParser. It doesn't modify the content of the original `message.Value`. diff --git a/clients/pkg/promtail/targets/kafka/target.go b/clients/pkg/promtail/targets/kafka/target.go index 519af759b66c7..707cc01ca1947 100644 --- a/clients/pkg/promtail/targets/kafka/target.go +++ b/clients/pkg/promtail/targets/kafka/target.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) type runnableDroppedTarget struct { diff --git a/clients/pkg/promtail/targets/kafka/target_syncer.go b/clients/pkg/promtail/targets/kafka/target_syncer.go index 187f4749ce2df..6afcd24ad7832 100644 --- a/clients/pkg/promtail/targets/kafka/target_syncer.go +++ b/clients/pkg/promtail/targets/kafka/target_syncer.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) var TopicPollInterval = 30 * time.Second diff --git a/clients/pkg/promtail/targets/kafka/target_syncer_test.go b/clients/pkg/promtail/targets/kafka/target_syncer_test.go index cc1161c63dcb8..1f0255cedf62e 100644 --- a/clients/pkg/promtail/targets/kafka/target_syncer_test.go +++ b/clients/pkg/promtail/targets/kafka/target_syncer_test.go @@ -17,9 +17,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_TopicDiscovery(t *testing.T) { diff --git a/clients/pkg/promtail/targets/kafka/target_test.go b/clients/pkg/promtail/targets/kafka/target_test.go index a62488b04a7cb..0f8061027de3a 100644 --- a/clients/pkg/promtail/targets/kafka/target_test.go +++ b/clients/pkg/promtail/targets/kafka/target_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" ) // Consumergroup handler diff --git a/clients/pkg/promtail/targets/kafka/targetmanager.go b/clients/pkg/promtail/targets/kafka/targetmanager.go index f16606aefda75..c9cc382503704 100644 --- a/clients/pkg/promtail/targets/kafka/targetmanager.go +++ b/clients/pkg/promtail/targets/kafka/targetmanager.go @@ -5,9 +5,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of kafka targets. diff --git a/clients/pkg/promtail/targets/lokipush/pushtarget.go b/clients/pkg/promtail/targets/lokipush/pushtarget.go index 88c7859bd36e5..63630c6e5ac2d 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtarget.go +++ b/clients/pkg/promtail/targets/lokipush/pushtarget.go @@ -20,14 +20,14 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/serverutils" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/serverutils" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type PushTarget struct { diff --git a/clients/pkg/promtail/targets/lokipush/pushtarget_test.go b/clients/pkg/promtail/targets/lokipush/pushtarget_test.go index 147994fb2df1a..3fe48b599a5e4 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtarget_test.go +++ b/clients/pkg/promtail/targets/lokipush/pushtarget_test.go @@ -20,12 +20,12 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const localhost = "127.0.0.1" diff --git a/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go b/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go index be29037544726..e924647c2c073 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go +++ b/clients/pkg/promtail/targets/lokipush/pushtargetmanager.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/util/strutil" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // PushTargetManager manages a series of PushTargets. diff --git a/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go b/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go index 40621d18f5ba0..08730042bf841 100644 --- a/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go +++ b/clients/pkg/promtail/targets/lokipush/pushtargetmanager_test.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" ) func Test_validateJobName(t *testing.T) { diff --git a/clients/pkg/promtail/targets/manager.go b/clients/pkg/promtail/targets/manager.go index 91ce61a1c84f4..241dd25aaa5cc 100644 --- a/clients/pkg/promtail/targets/manager.go +++ b/clients/pkg/promtail/targets/manager.go @@ -8,23 +8,23 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/positions" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/azureeventhubs" - "github.com/grafana/loki/clients/pkg/promtail/targets/cloudflare" - "github.com/grafana/loki/clients/pkg/promtail/targets/docker" - "github.com/grafana/loki/clients/pkg/promtail/targets/file" - "github.com/grafana/loki/clients/pkg/promtail/targets/gcplog" - "github.com/grafana/loki/clients/pkg/promtail/targets/gelf" - "github.com/grafana/loki/clients/pkg/promtail/targets/heroku" - "github.com/grafana/loki/clients/pkg/promtail/targets/journal" - "github.com/grafana/loki/clients/pkg/promtail/targets/kafka" - "github.com/grafana/loki/clients/pkg/promtail/targets/lokipush" - "github.com/grafana/loki/clients/pkg/promtail/targets/stdin" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/positions" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/azureeventhubs" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/cloudflare" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/docker" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/file" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gcplog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/gelf" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/heroku" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/journal" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/kafka" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/lokipush" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/stdin" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows" ) const ( diff --git a/clients/pkg/promtail/targets/stdin/stdin_target_manager.go b/clients/pkg/promtail/targets/stdin/stdin_target_manager.go index 065d6bd93feb0..bcc441950e3a9 100644 --- a/clients/pkg/promtail/targets/stdin/stdin_target_manager.go +++ b/clients/pkg/promtail/targets/stdin/stdin_target_manager.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/discovery/targetgroup" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // bufferSize is the size of the buffered reader diff --git a/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go b/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go index 58abae3c802b8..8f2135f3aff32 100644 --- a/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go +++ b/clients/pkg/promtail/targets/stdin/stdin_target_manager_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_newReaderTarget(t *testing.T) { diff --git a/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go b/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go index 89d17c4645f55..f99742de48a24 100644 --- a/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go +++ b/clients/pkg/promtail/targets/syslog/syslogparser/syslogparser_test.go @@ -9,7 +9,7 @@ import ( "github.com/influxdata/go-syslog/v3/rfc5424" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtarget.go b/clients/pkg/promtail/targets/syslog/syslogtarget.go index 54befebc38931..35ba4d8cf297f 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtarget.go +++ b/clients/pkg/promtail/targets/syslog/syslogtarget.go @@ -15,11 +15,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtarget_test.go b/clients/pkg/promtail/targets/syslog/syslogtarget_test.go index 62b5924626f14..2f06e04321ece 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtarget_test.go +++ b/clients/pkg/promtail/targets/syslog/syslogtarget_test.go @@ -19,9 +19,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go b/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go index 8a7246a28a464..ffda8b3de4203 100644 --- a/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go +++ b/clients/pkg/promtail/targets/syslog/syslogtargetmanager.go @@ -5,10 +5,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // SyslogTargetManager manages a series of SyslogTargets. diff --git a/clients/pkg/promtail/targets/syslog/transport.go b/clients/pkg/promtail/targets/syslog/transport.go index 67a78136e311b..6b1bdfeb91c11 100644 --- a/clients/pkg/promtail/targets/syslog/transport.go +++ b/clients/pkg/promtail/targets/syslog/transport.go @@ -20,8 +20,8 @@ import ( "github.com/influxdata/go-syslog/v3" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/syslog/syslogparser" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/syslog/syslogparser" ) var ( diff --git a/clients/pkg/promtail/targets/windows/bookmark.go b/clients/pkg/promtail/targets/windows/bookmark.go index b7a4a7698cde4..55ad7f3040526 100644 --- a/clients/pkg/promtail/targets/windows/bookmark.go +++ b/clients/pkg/promtail/targets/windows/bookmark.go @@ -9,7 +9,7 @@ import ( "github.com/spf13/afero" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) type bookMark struct { diff --git a/clients/pkg/promtail/targets/windows/format.go b/clients/pkg/promtail/targets/windows/format.go index 9fc44cc62a8ba..821aa4ecf0d9b 100644 --- a/clients/pkg/promtail/targets/windows/format.go +++ b/clients/pkg/promtail/targets/windows/format.go @@ -9,8 +9,8 @@ import ( jsoniter "github.com/json-iterator/go" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) type Event struct { diff --git a/clients/pkg/promtail/targets/windows/target.go b/clients/pkg/promtail/targets/windows/target.go index c4e1806724a54..42cb298f0995c 100644 --- a/clients/pkg/promtail/targets/windows/target.go +++ b/clients/pkg/promtail/targets/windows/target.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" ) var fs = afero.NewOsFs() diff --git a/clients/pkg/promtail/targets/windows/target_test.go b/clients/pkg/promtail/targets/windows/target_test.go index a9a692b21ecfc..c766cb869cd87 100644 --- a/clients/pkg/promtail/targets/windows/target_test.go +++ b/clients/pkg/promtail/targets/windows/target_test.go @@ -14,13 +14,13 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/sys/windows/svc/eventlog" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/client/fake" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/windows/win_eventlog" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/client/fake" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/windows/win_eventlog" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func init() { diff --git a/clients/pkg/promtail/targets/windows/targetmanager.go b/clients/pkg/promtail/targets/windows/targetmanager.go index 78e98880ca2bf..9bb12ebc15a9c 100644 --- a/clients/pkg/promtail/targets/windows/targetmanager.go +++ b/clients/pkg/promtail/targets/windows/targetmanager.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of windows event targets. diff --git a/clients/pkg/promtail/targets/windows/targetmanager_windows.go b/clients/pkg/promtail/targets/windows/targetmanager_windows.go index 78793b26c730a..4bc53bcc42153 100644 --- a/clients/pkg/promtail/targets/windows/targetmanager_windows.go +++ b/clients/pkg/promtail/targets/windows/targetmanager_windows.go @@ -8,10 +8,10 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/clients/pkg/logentry/stages" - "github.com/grafana/loki/clients/pkg/promtail/api" - "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" - "github.com/grafana/loki/clients/pkg/promtail/targets/target" + "github.com/grafana/loki/v3/clients/pkg/logentry/stages" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/scrapeconfig" + "github.com/grafana/loki/v3/clients/pkg/promtail/targets/target" ) // TargetManager manages a series of windows event targets. diff --git a/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go b/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go index f2411698d4b14..71ff148de58ff 100644 --- a/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go +++ b/clients/pkg/promtail/targets/windows/win_eventlog/win_eventlog.go @@ -39,7 +39,7 @@ import ( "github.com/influxdata/telegraf/plugins/inputs" "golang.org/x/sys/windows" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var sampleConfig = ` diff --git a/clients/pkg/promtail/utils/entries.go b/clients/pkg/promtail/utils/entries.go index 214422b674bad..10204e8fb37f4 100644 --- a/clients/pkg/promtail/utils/entries.go +++ b/clients/pkg/promtail/utils/entries.go @@ -5,7 +5,7 @@ import ( "sync" "time" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" ) // FanoutEntryHandler implements api.EntryHandler, fanning out received entries to one or multiple channels. diff --git a/clients/pkg/promtail/utils/entries_test.go b/clients/pkg/promtail/utils/entries_test.go index 8029e895c2a62..c9b098d9ee4a4 100644 --- a/clients/pkg/promtail/utils/entries_test.go +++ b/clients/pkg/promtail/utils/entries_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFanoutEntryHandler_SuccessfulFanout(t *testing.T) { diff --git a/clients/pkg/promtail/utils/remotewrite_server.go b/clients/pkg/promtail/utils/remotewrite_server.go index 089f4a79a01bb..837d3a8581f10 100644 --- a/clients/pkg/promtail/utils/remotewrite_server.go +++ b/clients/pkg/promtail/utils/remotewrite_server.go @@ -5,8 +5,8 @@ import ( "net/http" "net/http/httptest" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) // RemoteWriteRequest wraps the received logs remote write request that is received. diff --git a/clients/pkg/promtail/wal/reader.go b/clients/pkg/promtail/wal/reader.go index b19b2bbecc10b..769c566efbee9 100644 --- a/clients/pkg/promtail/wal/reader.go +++ b/clients/pkg/promtail/wal/reader.go @@ -5,11 +5,11 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util" - walUtils "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util" + walUtils "github.com/grafana/loki/v3/pkg/util/wal" ) // ReadWAL will read all entries in the WAL located under dir. Mainly used for testing diff --git a/clients/pkg/promtail/wal/wal.go b/clients/pkg/promtail/wal/wal.go index af1fa7e3d5098..8e747530470c7 100644 --- a/clients/pkg/promtail/wal/wal.go +++ b/clients/pkg/promtail/wal/wal.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" ) var ( diff --git a/clients/pkg/promtail/wal/watcher.go b/clients/pkg/promtail/wal/watcher.go index 3e8719a235812..926c93c01bcfc 100644 --- a/clients/pkg/promtail/wal/watcher.go +++ b/clients/pkg/promtail/wal/watcher.go @@ -14,7 +14,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/ingester/wal" ) const ( diff --git a/clients/pkg/promtail/wal/watcher_test.go b/clients/pkg/promtail/wal/watcher_test.go index d9a5e04cb0b89..b41880f5d20ff 100644 --- a/clients/pkg/promtail/wal/watcher_test.go +++ b/clients/pkg/promtail/wal/watcher_test.go @@ -13,11 +13,11 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) type testWriteTo struct { diff --git a/clients/pkg/promtail/wal/writer.go b/clients/pkg/promtail/wal/writer.go index 8e754a01038f8..e9360645716d9 100644 --- a/clients/pkg/promtail/wal/writer.go +++ b/clients/pkg/promtail/wal/writer.go @@ -16,11 +16,11 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/clients/pkg/promtail/wal/writer_test.go b/clients/pkg/promtail/wal/writer_test.go index fbce817f2a26e..a9c637f98b1ce 100644 --- a/clients/pkg/promtail/wal/writer_test.go +++ b/clients/pkg/promtail/wal/writer_test.go @@ -13,9 +13,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/clients/pkg/promtail/api" + "github.com/grafana/loki/v3/clients/pkg/promtail/api" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestWriter_EntriesAreWrittenToWAL(t *testing.T) { diff --git a/cmd/logcli/main.go b/cmd/logcli/main.go index 56a954cd5b44b..3d2aa85297b3f 100644 --- a/cmd/logcli/main.go +++ b/cmd/logcli/main.go @@ -15,15 +15,15 @@ import ( "github.com/prometheus/common/version" "gopkg.in/alecthomas/kingpin.v2" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/index" - "github.com/grafana/loki/pkg/logcli/labelquery" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/query" - "github.com/grafana/loki/pkg/logcli/seriesquery" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/logql/syntax" - _ "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/index" + "github.com/grafana/loki/v3/pkg/logcli/labelquery" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/query" + "github.com/grafana/loki/v3/pkg/logcli/seriesquery" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/logql/syntax" + _ "github.com/grafana/loki/v3/pkg/util/build" ) var ( diff --git a/cmd/logql-analyzer/main.go b/cmd/logql-analyzer/main.go index 5031dbad7d894..beed1226709d4 100644 --- a/cmd/logql-analyzer/main.go +++ b/cmd/logql-analyzer/main.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/server" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlanalyzer" - "github.com/grafana/loki/pkg/sizing" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logqlanalyzer" + "github.com/grafana/loki/v3/pkg/sizing" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func main() { diff --git a/cmd/loki-canary/main.go b/cmd/loki-canary/main.go index 70aad7b8dfd80..061b98321047e 100644 --- a/cmd/loki-canary/main.go +++ b/cmd/loki-canary/main.go @@ -18,10 +18,10 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/canary/comparator" - "github.com/grafana/loki/pkg/canary/reader" - "github.com/grafana/loki/pkg/canary/writer" - _ "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/canary/comparator" + "github.com/grafana/loki/v3/pkg/canary/reader" + "github.com/grafana/loki/v3/pkg/canary/writer" + _ "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/cmd/loki/main.go b/cmd/loki/main.go index 20a5925acbb4a..250568203be15 100644 --- a/cmd/loki/main.go +++ b/cmd/loki/main.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/util" - _ "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/util" + _ "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func exit(code int) { diff --git a/cmd/migrate/main.go b/cmd/migrate/main.go index d638adaaa812e..e42468e532b07 100644 --- a/cmd/migrate/main.go +++ b/cmd/migrate/main.go @@ -17,16 +17,16 @@ import ( "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type syncRange struct { diff --git a/cmd/querytee/main.go b/cmd/querytee/main.go index 9007dd6a3e3f2..5acebfed85179 100644 --- a/cmd/querytee/main.go +++ b/cmd/querytee/main.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/collectors" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/querytee" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/querytee" ) type Config struct { diff --git a/go.mod b/go.mod index 2fad4fa4375f8..379a29274ff4f 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/grafana/loki +module github.com/grafana/loki/v3 go 1.21 @@ -125,7 +125,7 @@ require ( github.com/fsnotify/fsnotify v1.7.0 github.com/gogo/googleapis v1.4.0 github.com/grafana/jsonparser v0.0.0-20240209175146-098958973a2d - github.com/grafana/loki/pkg/push v0.0.0-20231124142027-e52380921608 + github.com/grafana/loki/v3/pkg/push v0.0.0-20231124142027-e52380921608 github.com/heroku/x v0.0.61 github.com/influxdata/tdigest v0.0.2-0.20210216194612-fc98d27c9e8b github.com/prometheus/alertmanager v0.27.0 @@ -358,4 +358,4 @@ replace github.com/hashicorp/memberlist => github.com/grafana/memberlist v0.3.1- // Insist on the optimised version of grafana/regexp replace github.com/grafana/regexp => github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd -replace github.com/grafana/loki/pkg/push => ./pkg/push +replace github.com/grafana/loki/v3/pkg/push => ./pkg/push diff --git a/integration/client/client.go b/integration/client/client.go index 1ad94fd0edbb6..a749789036f07 100644 --- a/integration/client/client.go +++ b/integration/client/client.go @@ -23,9 +23,9 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog/plogotlp" - logcli "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/unmarshal" + logcli "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) const requestTimeout = 30 * time.Second diff --git a/integration/cluster/cluster.go b/integration/cluster/cluster.go index 79dc7ce2809ff..446f84cfbb130 100644 --- a/integration/cluster/cluster.go +++ b/integration/cluster/cluster.go @@ -23,14 +23,14 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/integration/util" - - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/integration/util" + + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/integration/loki_micro_services_delete_test.go b/integration/loki_micro_services_delete_test.go index d77d7ab115083..0ddce4aa11cbd 100644 --- a/integration/loki_micro_services_delete_test.go +++ b/integration/loki_micro_services_delete_test.go @@ -13,13 +13,13 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/storage" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/storage" ) type pushRequest struct { diff --git a/integration/loki_micro_services_test.go b/integration/loki_micro_services_test.go index 3480757f07930..611fafb15ab7a 100644 --- a/integration/loki_micro_services_test.go +++ b/integration/loki_micro_services_test.go @@ -21,12 +21,12 @@ import ( "golang.org/x/exp/slices" "google.golang.org/protobuf/proto" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) func TestMicroServicesIngestQuery(t *testing.T) { diff --git a/integration/loki_rule_eval_test.go b/integration/loki_rule_eval_test.go index 5ee9bf97ac566..00caeef8883c2 100644 --- a/integration/loki_rule_eval_test.go +++ b/integration/loki_rule_eval_test.go @@ -14,10 +14,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/ruler" + "github.com/grafana/loki/v3/pkg/ruler" ) // TestLocalRuleEval tests that rules are evaluated locally with an embedded query engine diff --git a/integration/loki_simple_scalable_test.go b/integration/loki_simple_scalable_test.go index f831dcc406c4c..070d3f918a14d 100644 --- a/integration/loki_simple_scalable_test.go +++ b/integration/loki_simple_scalable_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestSimpleScalable_IngestQuery(t *testing.T) { diff --git a/integration/loki_single_binary_test.go b/integration/loki_single_binary_test.go index 7e26f9c4caf72..6aaf64f5b4152 100644 --- a/integration/loki_single_binary_test.go +++ b/integration/loki_single_binary_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestSingleBinaryIngestQuery(t *testing.T) { diff --git a/integration/multi_tenant_queries_test.go b/integration/multi_tenant_queries_test.go index 4c13d6f9e6249..339b380d1b01e 100644 --- a/integration/multi_tenant_queries_test.go +++ b/integration/multi_tenant_queries_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" ) func TestMultiTenantQuery(t *testing.T) { diff --git a/integration/per_request_limits_test.go b/integration/per_request_limits_test.go index 34d9c7e99f44c..482ff0e93fcfa 100644 --- a/integration/per_request_limits_test.go +++ b/integration/per_request_limits_test.go @@ -10,10 +10,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/integration/client" - "github.com/grafana/loki/integration/cluster" + "github.com/grafana/loki/v3/integration/client" + "github.com/grafana/loki/v3/integration/cluster" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) func TestPerRequestLimits(t *testing.T) { diff --git a/pkg/analytics/reporter.go b/pkg/analytics/reporter.go index 85050237d99ad..d58e727aac7fb 100644 --- a/pkg/analytics/reporter.go +++ b/pkg/analytics/reporter.go @@ -18,8 +18,8 @@ import ( "github.com/grafana/dskit/services" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/analytics/reporter_test.go b/pkg/analytics/reporter_test.go index 183f2d8246202..a986ac66de05d 100644 --- a/pkg/analytics/reporter_test.go +++ b/pkg/analytics/reporter_test.go @@ -14,7 +14,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) func Test_LeaderElection(t *testing.T) { diff --git a/pkg/analytics/seed_test.go b/pkg/analytics/seed_test.go index 4229c508d2dfe..366789354d69d 100644 --- a/pkg/analytics/seed_test.go +++ b/pkg/analytics/seed_test.go @@ -15,7 +15,7 @@ import ( "github.com/grafana/dskit/services" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) type dnsProviderMock struct { diff --git a/pkg/analytics/stats.go b/pkg/analytics/stats.go index 2479ae28c804c..e4ea068f0cabb 100644 --- a/pkg/analytics/stats.go +++ b/pkg/analytics/stats.go @@ -14,7 +14,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" "github.com/cespare/xxhash/v2" jsoniter "github.com/json-iterator/go" diff --git a/pkg/analytics/stats_test.go b/pkg/analytics/stats_test.go index 8c676af358652..b2ba30db7ca85 100644 --- a/pkg/analytics/stats_test.go +++ b/pkg/analytics/stats_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func Test_BuildReport(t *testing.T) { diff --git a/pkg/bloomcompactor/batch.go b/pkg/bloomcompactor/batch.go index a9bf2c6c0cc80..4247fc1e4b52c 100644 --- a/pkg/bloomcompactor/batch.go +++ b/pkg/bloomcompactor/batch.go @@ -9,12 +9,12 @@ import ( "github.com/grafana/dskit/multierror" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - logql_log "github.com/grafana/loki/pkg/logql/log" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + logql_log "github.com/grafana/loki/v3/pkg/logql/log" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) type Fetcher[A, B any] interface { diff --git a/pkg/bloomcompactor/batch_test.go b/pkg/bloomcompactor/batch_test.go index bd2cb3378cfba..d64b8313e1066 100644 --- a/pkg/bloomcompactor/batch_test.go +++ b/pkg/bloomcompactor/batch_test.go @@ -7,8 +7,8 @@ import ( "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func TestBatchedLoader(t *testing.T) { diff --git a/pkg/bloomcompactor/bloomcompactor.go b/pkg/bloomcompactor/bloomcompactor.go index 7b91eca0565b0..3a99a1d1ad866 100644 --- a/pkg/bloomcompactor/bloomcompactor.go +++ b/pkg/bloomcompactor/bloomcompactor.go @@ -16,13 +16,13 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - util_ring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + util_ring "github.com/grafana/loki/v3/pkg/util/ring" ) var ( diff --git a/pkg/bloomcompactor/bloomcompactor_test.go b/pkg/bloomcompactor/bloomcompactor_test.go index 9ffa1a88ba65b..1734ecfa710f7 100644 --- a/pkg/bloomcompactor/bloomcompactor_test.go +++ b/pkg/bloomcompactor/bloomcompactor_test.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - util_ring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + util_ring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) func TestCompactor_ownsTenant(t *testing.T) { diff --git a/pkg/bloomcompactor/config.go b/pkg/bloomcompactor/config.go index e0b2afd924f4a..8b42cd6834710 100644 --- a/pkg/bloomcompactor/config.go +++ b/pkg/bloomcompactor/config.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) const ( diff --git a/pkg/bloomcompactor/controller.go b/pkg/bloomcompactor/controller.go index 1f8770cc216fb..37a7c6bc69b69 100644 --- a/pkg/bloomcompactor/controller.go +++ b/pkg/bloomcompactor/controller.go @@ -12,11 +12,11 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) type SimpleBloomController struct { diff --git a/pkg/bloomcompactor/controller_test.go b/pkg/bloomcompactor/controller_test.go index 7f81c5abe2d2f..2367ee3cc9566 100644 --- a/pkg/bloomcompactor/controller_test.go +++ b/pkg/bloomcompactor/controller_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) func Test_findGaps(t *testing.T) { diff --git a/pkg/bloomcompactor/metrics.go b/pkg/bloomcompactor/metrics.go index d4b4b0e53d50c..d569a4dbfd82d 100644 --- a/pkg/bloomcompactor/metrics.go +++ b/pkg/bloomcompactor/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) const ( diff --git a/pkg/bloomcompactor/retention.go b/pkg/bloomcompactor/retention.go index 4c7fc39c1ce64..7dd30dece9e8a 100644 --- a/pkg/bloomcompactor/retention.go +++ b/pkg/bloomcompactor/retention.go @@ -13,10 +13,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/client" - storageconfig "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/validation" ) type retentionSharding interface { diff --git a/pkg/bloomcompactor/retention_test.go b/pkg/bloomcompactor/retention_test.go index 0f880a2bd7e2a..26ad6b3d2e4a6 100644 --- a/pkg/bloomcompactor/retention_test.go +++ b/pkg/bloomcompactor/retention_test.go @@ -15,16 +15,16 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - storageconfig "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) var testTime = parseDayTime("2024-12-31").ModelTime() diff --git a/pkg/bloomcompactor/spec.go b/pkg/bloomcompactor/spec.go index 2f67d8f90dcdd..6c7e095dbed83 100644 --- a/pkg/bloomcompactor/spec.go +++ b/pkg/bloomcompactor/spec.go @@ -10,13 +10,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) // inclusive range diff --git a/pkg/bloomcompactor/spec_test.go b/pkg/bloomcompactor/spec_test.go index e9a403ac6929f..b35d82b9d3f41 100644 --- a/pkg/bloomcompactor/spec_test.go +++ b/pkg/bloomcompactor/spec_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/chunkenc" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func blocksFromSchema(t *testing.T, n int, options v1.BlockOptions) (res []*v1.Block, data []v1.SeriesWithBloom, refs []bloomshipper.BlockRef) { diff --git a/pkg/bloomcompactor/tracker.go b/pkg/bloomcompactor/tracker.go index 34f726f322a09..1c9bde0a4ae71 100644 --- a/pkg/bloomcompactor/tracker.go +++ b/pkg/bloomcompactor/tracker.go @@ -8,8 +8,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" ) type tableRangeProgress struct { diff --git a/pkg/bloomcompactor/tracker_test.go b/pkg/bloomcompactor/tracker_test.go index 494073e7cc520..e23eb55d6dc64 100644 --- a/pkg/bloomcompactor/tracker_test.go +++ b/pkg/bloomcompactor/tracker_test.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" ) func mkTblRange(tenant string, tbl config.DayTime, from, through model.Fingerprint) *tenantTableRange { diff --git a/pkg/bloomcompactor/tsdb.go b/pkg/bloomcompactor/tsdb.go index db2adf9fdc74f..3ad359bc31227 100644 --- a/pkg/bloomcompactor/tsdb.go +++ b/pkg/bloomcompactor/tsdb.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - baseStore "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/chunkenc" + baseStore "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) const ( diff --git a/pkg/bloomcompactor/tsdb_test.go b/pkg/bloomcompactor/tsdb_test.go index 30fc668a5a927..a18e36ddb6d15 100644 --- a/pkg/bloomcompactor/tsdb_test.go +++ b/pkg/bloomcompactor/tsdb_test.go @@ -9,8 +9,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type forSeriesTestImpl []*v1.Series diff --git a/pkg/bloomcompactor/versioned_range.go b/pkg/bloomcompactor/versioned_range.go index 0c399025f610f..03da12f1d7da5 100644 --- a/pkg/bloomcompactor/versioned_range.go +++ b/pkg/bloomcompactor/versioned_range.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) type tsdbToken struct { diff --git a/pkg/bloomcompactor/versioned_range_test.go b/pkg/bloomcompactor/versioned_range_test.go index 6c4329a0dba99..a85418bc6e1e5 100644 --- a/pkg/bloomcompactor/versioned_range_test.go +++ b/pkg/bloomcompactor/versioned_range_test.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" ) func Test_TsdbTokenRange(t *testing.T) { diff --git a/pkg/bloomgateway/bloomgateway.go b/pkg/bloomgateway/bloomgateway.go index 482d6d8ef8660..d2ac958f424aa 100644 --- a/pkg/bloomgateway/bloomgateway.go +++ b/pkg/bloomgateway/bloomgateway.go @@ -56,12 +56,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var errGatewayUnhealthy = errors.New("bloom-gateway is unhealthy in the ring") diff --git a/pkg/bloomgateway/bloomgateway_test.go b/pkg/bloomgateway/bloomgateway_test.go index 59f37974a4ce1..edaa2ea7f0c82 100644 --- a/pkg/bloomgateway/bloomgateway_test.go +++ b/pkg/bloomgateway/bloomgateway_test.go @@ -20,17 +20,17 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) func groupRefs(t *testing.T, chunkRefs []*logproto.ChunkRef) []*logproto.GroupedChunkRefs { diff --git a/pkg/bloomgateway/cache.go b/pkg/bloomgateway/cache.go index aec04333368d0..60124f353e2a6 100644 --- a/pkg/bloomgateway/cache.go +++ b/pkg/bloomgateway/cache.go @@ -11,9 +11,9 @@ import ( "golang.org/x/exp/slices" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) const ( diff --git a/pkg/bloomgateway/cache_test.go b/pkg/bloomgateway/cache_test.go index bf1a8dbaa365b..3694a20ca09dc 100644 --- a/pkg/bloomgateway/cache_test.go +++ b/pkg/bloomgateway/cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Range is 1000-4000 diff --git a/pkg/bloomgateway/client.go b/pkg/bloomgateway/client.go index f08397693f86f..ff3ef1defb736 100644 --- a/pkg/bloomgateway/client.go +++ b/pkg/bloomgateway/client.go @@ -25,17 +25,17 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/bloomgateway/client_test.go b/pkg/bloomgateway/client_test.go index e4b905c37b12c..d1de9dbab84e2 100644 --- a/pkg/bloomgateway/client_test.go +++ b/pkg/bloomgateway/client_test.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/bloomutils" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomutils" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/validation" ) func rs(id int, tokens ...uint32) ring.ReplicationSet { diff --git a/pkg/bloomgateway/config.go b/pkg/bloomgateway/config.go index 356bc782fb839..9eaa6771e674f 100644 --- a/pkg/bloomgateway/config.go +++ b/pkg/bloomgateway/config.go @@ -3,7 +3,7 @@ package bloomgateway import ( "flag" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) // Config configures the Bloom Gateway component. diff --git a/pkg/bloomgateway/multiplexing.go b/pkg/bloomgateway/multiplexing.go index 97e0b0aa6d66f..fab8fd867765a 100644 --- a/pkg/bloomgateway/multiplexing.go +++ b/pkg/bloomgateway/multiplexing.go @@ -9,11 +9,11 @@ import ( "github.com/oklog/ulid" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) const ( diff --git a/pkg/bloomgateway/multiplexing_test.go b/pkg/bloomgateway/multiplexing_test.go index af79f37b358b4..27c0bbbe5ec69 100644 --- a/pkg/bloomgateway/multiplexing_test.go +++ b/pkg/bloomgateway/multiplexing_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func TestTask(t *testing.T) { diff --git a/pkg/bloomgateway/processor.go b/pkg/bloomgateway/processor.go index 5d43e79eece3e..401bd9210e0ae 100644 --- a/pkg/bloomgateway/processor.go +++ b/pkg/bloomgateway/processor.go @@ -12,9 +12,9 @@ import ( "github.com/grafana/dskit/concurrency" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func newProcessor(id string, concurrency int, store bloomshipper.Store, logger log.Logger, metrics *workerMetrics) *processor { diff --git a/pkg/bloomgateway/processor_test.go b/pkg/bloomgateway/processor_test.go index d0ecaf769944f..69259a9193945 100644 --- a/pkg/bloomgateway/processor_test.go +++ b/pkg/bloomgateway/processor_test.go @@ -14,11 +14,11 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/util/constants" ) var _ bloomshipper.Store = &dummyStore{} diff --git a/pkg/bloomgateway/querier.go b/pkg/bloomgateway/querier.go index 32f14b7e668c9..a9d494beb0c7b 100644 --- a/pkg/bloomgateway/querier.go +++ b/pkg/bloomgateway/querier.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/util/constants" ) type querierMetrics struct { diff --git a/pkg/bloomgateway/querier_test.go b/pkg/bloomgateway/querier_test.go index 0d7872927cc42..f952f3733233e 100644 --- a/pkg/bloomgateway/querier_test.go +++ b/pkg/bloomgateway/querier_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) type noopClient struct { diff --git a/pkg/bloomgateway/util.go b/pkg/bloomgateway/util.go index e07c5740fdc6d..bf5f5ee1501e4 100644 --- a/pkg/bloomgateway/util.go +++ b/pkg/bloomgateway/util.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func getDayTime(ts model.Time) time.Time { diff --git a/pkg/bloomgateway/util_test.go b/pkg/bloomgateway/util_test.go index f624d337092b0..ed293566886a7 100644 --- a/pkg/bloomgateway/util_test.go +++ b/pkg/bloomgateway/util_test.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) func parseDayTime(s string) config.DayTime { diff --git a/pkg/bloomgateway/worker.go b/pkg/bloomgateway/worker.go index eadbd2fa33c91..fab243f29613a 100644 --- a/pkg/bloomgateway/worker.go +++ b/pkg/bloomgateway/worker.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/queue" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/queue" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" ) const ( diff --git a/pkg/bloomutils/ring.go b/pkg/bloomutils/ring.go index 9858f63e6ba3d..9743298e89b4d 100644 --- a/pkg/bloomutils/ring.go +++ b/pkg/bloomutils/ring.go @@ -13,7 +13,7 @@ import ( "golang.org/x/exp/constraints" "golang.org/x/exp/slices" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) var ( diff --git a/pkg/bloomutils/ring_test.go b/pkg/bloomutils/ring_test.go index a6ef7374f527f..8a373696c7c92 100644 --- a/pkg/bloomutils/ring_test.go +++ b/pkg/bloomutils/ring_test.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/ring" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func uint64Range(min, max uint64) Range[uint64] { diff --git a/pkg/canary/comparator/comparator.go b/pkg/canary/comparator/comparator.go index e7234df191f9c..8f57af09ba783 100644 --- a/pkg/canary/comparator/comparator.go +++ b/pkg/canary/comparator/comparator.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/canary/reader" + "github.com/grafana/loki/v3/pkg/canary/reader" ) const ( diff --git a/pkg/canary/reader/reader.go b/pkg/canary/reader/reader.go index 0725a658b4680..4576ca7a70ed8 100644 --- a/pkg/canary/reader/reader.go +++ b/pkg/canary/reader/reader.go @@ -23,10 +23,10 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/config" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/unmarshal" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) var ( diff --git a/pkg/canary/writer/push.go b/pkg/canary/writer/push.go index 799f265451f97..15d0b1ba8d6f4 100644 --- a/pkg/canary/writer/push.go +++ b/pkg/canary/writer/push.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/common/config" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/canary/writer/push_test.go b/pkg/canary/writer/push_test.go index 89204f2a00a82..b61272eb2ec37 100644 --- a/pkg/canary/writer/push_test.go +++ b/pkg/canary/writer/push_test.go @@ -17,8 +17,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index 793e0b751829b..e2d520df6e024 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -6,10 +6,10 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/filter" ) const ( diff --git a/pkg/chunkenc/facade.go b/pkg/chunkenc/facade.go index d66e994539f28..22a6e760d590b 100644 --- a/pkg/chunkenc/facade.go +++ b/pkg/chunkenc/facade.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" ) // GzipLogChunk is a cortex encoding type for our chunks. diff --git a/pkg/chunkenc/hash_test.go b/pkg/chunkenc/hash_test.go index 0ca899a72cbe9..e75251f57cb5c 100644 --- a/pkg/chunkenc/hash_test.go +++ b/pkg/chunkenc/hash_test.go @@ -8,7 +8,7 @@ import ( "github.com/segmentio/fasthash/fnv1a" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" ) var res uint64 diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index 0985f4a883c22..b96d9f705d092 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -8,10 +8,10 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/filter" ) // Errors returned by the chunk interface. diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index df59db7c755c8..107e3c71a97d5 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -17,13 +17,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index 071d7bab21538..06c137823ac46 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -18,15 +18,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc/testdata" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" ) var testEncoding = []Encoding{ diff --git a/pkg/chunkenc/pool.go b/pkg/chunkenc/pool.go index 4b6cf7abb90bc..486bef44b3da8 100644 --- a/pkg/chunkenc/pool.go +++ b/pkg/chunkenc/pool.go @@ -14,7 +14,7 @@ import ( "github.com/pierrec/lz4/v4" "github.com/prometheus/prometheus/util/pool" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // WriterPool is a pool of io.Writer diff --git a/pkg/chunkenc/symbols.go b/pkg/chunkenc/symbols.go index cb1c5586775ae..bed4035400c74 100644 --- a/pkg/chunkenc/symbols.go +++ b/pkg/chunkenc/symbols.go @@ -11,7 +11,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // symbol holds reference to a label name and value pair diff --git a/pkg/chunkenc/unordered.go b/pkg/chunkenc/unordered.go index 883be05154f2d..788f9c0a7c45b 100644 --- a/pkg/chunkenc/unordered.go +++ b/pkg/chunkenc/unordered.go @@ -14,10 +14,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) var noopStreamPipeline = log.NewNoopPipeline().ForStream(labels.Labels{}) diff --git a/pkg/chunkenc/unordered_test.go b/pkg/chunkenc/unordered_test.go index d92c2f1054bb9..f4930952660fc 100644 --- a/pkg/chunkenc/unordered_test.go +++ b/pkg/chunkenc/unordered_test.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) func iterEq(t *testing.T, exp []entry, got iter.EntryIterator) { diff --git a/pkg/chunkenc/util_test.go b/pkg/chunkenc/util_test.go index 3b5118495ddc6..a1860f9ae297a 100644 --- a/pkg/chunkenc/util_test.go +++ b/pkg/chunkenc/util_test.go @@ -4,8 +4,8 @@ import ( "math/rand" "time" - "github.com/grafana/loki/pkg/chunkenc/testdata" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" + "github.com/grafana/loki/v3/pkg/logproto" ) func logprotoEntry(ts int64, line string) *logproto.Entry { diff --git a/pkg/compactor/client/grpc.go b/pkg/compactor/client/grpc.go index 89fdbd7015eec..6a03b6b4a0c40 100644 --- a/pkg/compactor/client/grpc.go +++ b/pkg/compactor/client/grpc.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/common/model" "google.golang.org/grpc" - deletion_grpc "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletion" + deletion_grpc "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletion" ) type GRPCConfig struct { diff --git a/pkg/compactor/client/http.go b/pkg/compactor/client/http.go index ea30094055519..7e8120067ae80 100644 --- a/pkg/compactor/client/http.go +++ b/pkg/compactor/client/http.go @@ -13,8 +13,8 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/crypto/tls" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/compactor/compactor.go b/pkg/compactor/compactor.go index 6521983729b9d..f6fa95f10a4ad 100644 --- a/pkg/compactor/compactor.go +++ b/pkg/compactor/compactor.go @@ -19,18 +19,18 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) // Here is how the generic compactor works: diff --git a/pkg/compactor/compactor_test.go b/pkg/compactor/compactor_test.go index cfcc55e456d0d..3fccbb237b78f 100644 --- a/pkg/compactor/compactor_test.go +++ b/pkg/compactor/compactor_test.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - loki_net "github.com/grafana/loki/pkg/util/net" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + "github.com/grafana/loki/v3/pkg/validation" ) const indexTablePrefix = "table_" diff --git a/pkg/compactor/deletion/delete_request.go b/pkg/compactor/deletion/delete_request.go index 5af9716a74ed3..9ce7f381fb105 100644 --- a/pkg/compactor/deletion/delete_request.go +++ b/pkg/compactor/deletion/delete_request.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type timeInterval struct { diff --git a/pkg/compactor/deletion/delete_request_test.go b/pkg/compactor/deletion/delete_request_test.go index bd83f95913825..f67a06dc483fb 100644 --- a/pkg/compactor/deletion/delete_request_test.go +++ b/pkg/compactor/deletion/delete_request_test.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" ) const ( diff --git a/pkg/compactor/deletion/delete_requests_client.go b/pkg/compactor/deletion/delete_requests_client.go index 62b6f509880e2..8395b33cd9f39 100644 --- a/pkg/compactor/deletion/delete_requests_client.go +++ b/pkg/compactor/deletion/delete_requests_client.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) type CompactorClient interface { diff --git a/pkg/compactor/deletion/delete_requests_manager.go b/pkg/compactor/deletion/delete_requests_manager.go index c18d1b032ba73..ba99625b2dd96 100644 --- a/pkg/compactor/deletion/delete_requests_manager.go +++ b/pkg/compactor/deletion/delete_requests_manager.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/compactor/deletion/delete_requests_manager_test.go b/pkg/compactor/deletion/delete_requests_manager_test.go index 44285bb890b4e..04aa986ac492d 100644 --- a/pkg/compactor/deletion/delete_requests_manager_test.go +++ b/pkg/compactor/deletion/delete_requests_manager_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/filter" ) const testUserID = "test-user" diff --git a/pkg/compactor/deletion/delete_requests_store.go b/pkg/compactor/deletion/delete_requests_store.go index 20212467c15d7..ee8f324d6b0be 100644 --- a/pkg/compactor/deletion/delete_requests_store.go +++ b/pkg/compactor/deletion/delete_requests_store.go @@ -17,8 +17,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type ( diff --git a/pkg/compactor/deletion/delete_requests_store_test.go b/pkg/compactor/deletion/delete_requests_store_test.go index 0bf4cb27f7bd8..fa02a44bc7598 100644 --- a/pkg/compactor/deletion/delete_requests_store_test.go +++ b/pkg/compactor/deletion/delete_requests_store_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) func TestDeleteRequestsStore(t *testing.T) { diff --git a/pkg/compactor/deletion/delete_requests_table.go b/pkg/compactor/deletion/delete_requests_table.go index 3143a51cc5444..80a47a5e6435b 100644 --- a/pkg/compactor/deletion/delete_requests_table.go +++ b/pkg/compactor/deletion/delete_requests_table.go @@ -13,12 +13,12 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type deleteRequestsTable struct { diff --git a/pkg/compactor/deletion/delete_requests_table_test.go b/pkg/compactor/deletion/delete_requests_table_test.go index 4b30a9bc67b1f..b5fcacaa5d383 100644 --- a/pkg/compactor/deletion/delete_requests_table_test.go +++ b/pkg/compactor/deletion/delete_requests_table_test.go @@ -8,11 +8,11 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) func TestDeleteRequestsTable(t *testing.T) { diff --git a/pkg/compactor/deletion/grpc_request_handler.go b/pkg/compactor/deletion/grpc_request_handler.go index 94ba7c163524b..bf68c397043b4 100644 --- a/pkg/compactor/deletion/grpc_request_handler.go +++ b/pkg/compactor/deletion/grpc_request_handler.go @@ -8,8 +8,8 @@ import ( "github.com/grafana/dskit/tenant" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/compactor/client/grpc" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type GRPCRequestHandler struct { diff --git a/pkg/compactor/deletion/grpc_request_handler_test.go b/pkg/compactor/deletion/grpc_request_handler_test.go index c7171e6ac3406..612777e9101cc 100644 --- a/pkg/compactor/deletion/grpc_request_handler_test.go +++ b/pkg/compactor/deletion/grpc_request_handler_test.go @@ -17,8 +17,8 @@ import ( "google.golang.org/grpc/status" "google.golang.org/grpc/test/bufconn" - compactor_client_grpc "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletionmode" + compactor_client_grpc "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" ) func server(t *testing.T, h *GRPCRequestHandler) (compactor_client_grpc.CompactorClient, func()) { diff --git a/pkg/compactor/deletion/metrics.go b/pkg/compactor/deletion/metrics.go index b3196948830b9..9d89f46c88d9d 100644 --- a/pkg/compactor/deletion/metrics.go +++ b/pkg/compactor/deletion/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type DeleteRequestClientMetrics struct { diff --git a/pkg/compactor/deletion/request_handler.go b/pkg/compactor/deletion/request_handler.go index 458279d3b8523..c4c0e1105341c 100644 --- a/pkg/compactor/deletion/request_handler.go +++ b/pkg/compactor/deletion/request_handler.go @@ -15,9 +15,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // DeleteRequestHandler provides handlers for delete requests diff --git a/pkg/compactor/deletion/request_handler_test.go b/pkg/compactor/deletion/request_handler_test.go index 58e2ffd13c328..1df1446bd4e55 100644 --- a/pkg/compactor/deletion/request_handler_test.go +++ b/pkg/compactor/deletion/request_handler_test.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) func TestAddDeleteRequestHandler(t *testing.T) { diff --git a/pkg/compactor/deletion/tenant_delete_requests_client.go b/pkg/compactor/deletion/tenant_delete_requests_client.go index d3ba3a9905a3b..495ece96e181c 100644 --- a/pkg/compactor/deletion/tenant_delete_requests_client.go +++ b/pkg/compactor/deletion/tenant_delete_requests_client.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) const deletionNotAvailableMsg = "deletion is not available for this tenant" diff --git a/pkg/compactor/deletion/tenant_request_handler_test.go b/pkg/compactor/deletion/tenant_request_handler_test.go index c4a18543ccef2..c57dc84ba4caf 100644 --- a/pkg/compactor/deletion/tenant_request_handler_test.go +++ b/pkg/compactor/deletion/tenant_request_handler_test.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestDeleteRequestHandlerDeletionMiddleware(t *testing.T) { diff --git a/pkg/compactor/deletion/util.go b/pkg/compactor/deletion/util.go index 18d5822141f14..c20da8a4a2602 100644 --- a/pkg/compactor/deletion/util.go +++ b/pkg/compactor/deletion/util.go @@ -3,8 +3,8 @@ package deletion import ( "errors" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) var ( diff --git a/pkg/compactor/generationnumber/gennumber_loader.go b/pkg/compactor/generationnumber/gennumber_loader.go index ec8bb0bcab567..c2edb62dc1664 100644 --- a/pkg/compactor/generationnumber/gennumber_loader.go +++ b/pkg/compactor/generationnumber/gennumber_loader.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) const reloadDuration = 5 * time.Minute diff --git a/pkg/compactor/generationnumber/metrics.go b/pkg/compactor/generationnumber/metrics.go index c71f1b5821562..ccd06d74f8a73 100644 --- a/pkg/compactor/generationnumber/metrics.go +++ b/pkg/compactor/generationnumber/metrics.go @@ -3,7 +3,7 @@ package generationnumber import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Make this package level because we want several instances of a loader to be able to report metrics diff --git a/pkg/compactor/index_set.go b/pkg/compactor/index_set.go index 9b6267d60739d..7102aef564259 100644 --- a/pkg/compactor/index_set.go +++ b/pkg/compactor/index_set.go @@ -12,12 +12,12 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type IndexSet interface { diff --git a/pkg/compactor/retention/expiration.go b/pkg/compactor/retention/expiration.go index 9111a6c7afee1..45029f9652c5a 100644 --- a/pkg/compactor/retention/expiration.go +++ b/pkg/compactor/retention/expiration.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // IntervalFilter contains the interval to delete diff --git a/pkg/compactor/retention/expiration_test.go b/pkg/compactor/retention/expiration_test.go index ee61eb102b7ba..3cc69f88ae613 100644 --- a/pkg/compactor/retention/expiration_test.go +++ b/pkg/compactor/retention/expiration_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) type retentionLimit struct { diff --git a/pkg/compactor/retention/marker.go b/pkg/compactor/retention/marker.go index 84e461cd7bc5d..bad9b46d1c57b 100644 --- a/pkg/compactor/retention/marker.go +++ b/pkg/compactor/retention/marker.go @@ -15,9 +15,9 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/compactor/retention/retention.go b/pkg/compactor/retention/retention.go index 703e7e0182e6a..0a4aba59be474 100644 --- a/pkg/compactor/retention/retention.go +++ b/pkg/compactor/retention/retention.go @@ -15,13 +15,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var chunkBucket = []byte("chunks") diff --git a/pkg/compactor/retention/retention_test.go b/pkg/compactor/retention/retention_test.go index 36faaaf332dc8..6c261d34799e5 100644 --- a/pkg/compactor/retention/retention_test.go +++ b/pkg/compactor/retention/retention_test.go @@ -20,14 +20,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - ingesterclient "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/filter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + ingesterclient "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/filter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type mockChunkClient struct { diff --git a/pkg/compactor/retention/util_test.go b/pkg/compactor/retention/util_test.go index bb2f0fe2e0a77..3597a11565adb 100644 --- a/pkg/compactor/retention/util_test.go +++ b/pkg/compactor/retention/util_test.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func dayFromTime(t model.Time) config.DayTime { diff --git a/pkg/compactor/table.go b/pkg/compactor/table.go index b7b94627c7415..c371a5db88f59 100644 --- a/pkg/compactor/table.go +++ b/pkg/compactor/table.go @@ -12,11 +12,11 @@ import ( "github.com/grafana/dskit/concurrency" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/retention" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/compactor/table_test.go b/pkg/compactor/table_test.go index c08c0390de8e7..462511eca4782 100644 --- a/pkg/compactor/table_test.go +++ b/pkg/compactor/table_test.go @@ -12,10 +12,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) const ( diff --git a/pkg/compactor/testutil.go b/pkg/compactor/testutil.go index 094eea2903ad0..4ebba27f64bfa 100644 --- a/pkg/compactor/testutil.go +++ b/pkg/compactor/testutil.go @@ -17,12 +17,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const ( diff --git a/pkg/configs/client/client.go b/pkg/configs/client/client.go index fb530ed020e72..5592fbe1b83dc 100644 --- a/pkg/configs/client/client.go +++ b/pkg/configs/client/client.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/version" - "github.com/grafana/loki/pkg/configs/userconfig" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/configs/userconfig" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/configs/client/configs_test.go b/pkg/configs/client/configs_test.go index 96e1fa9f7d3cf..311c33ca91ad9 100644 --- a/pkg/configs/client/configs_test.go +++ b/pkg/configs/client/configs_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/configs/userconfig" ) var response = `{ diff --git a/pkg/configs/userconfig/config.go b/pkg/configs/userconfig/config.go index 2bb33c824f263..e7d22e033a8ec 100644 --- a/pkg/configs/userconfig/config.go +++ b/pkg/configs/userconfig/config.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/prometheus/rules" "gopkg.in/yaml.v3" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // An ID is the ID of a single users's Cortex configuration. When a diff --git a/pkg/configs/userconfig/config_test.go b/pkg/configs/userconfig/config_test.go index 0b304f28288db..ac81d47e4ee98 100644 --- a/pkg/configs/userconfig/config_test.go +++ b/pkg/configs/userconfig/config_test.go @@ -18,7 +18,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var legacyRulesFile = `ALERT TestAlert diff --git a/pkg/distributor/distributor.go b/pkg/distributor/distributor.go index 01f77e320b5a6..d721a60f3070e 100644 --- a/pkg/distributor/distributor.go +++ b/pkg/distributor/distributor.go @@ -33,22 +33,22 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/distributor/distributor_ring.go b/pkg/distributor/distributor_ring.go index 0c7451ebdea2f..0beffd91791a3 100644 --- a/pkg/distributor/distributor_ring.go +++ b/pkg/distributor/distributor_ring.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RingConfig masks the ring lifecycler config which contains diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index e4bf766b42ee6..09ee068c2d53d 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -30,20 +30,20 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/ingester/client" - loghttp_push "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util/constants" - fe "github.com/grafana/loki/pkg/util/flagext" - loki_flagext "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - loki_net "github.com/grafana/loki/pkg/util/net" - "github.com/grafana/loki/pkg/util/test" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/ingester/client" + loghttp_push "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util/constants" + fe "github.com/grafana/loki/v3/pkg/util/flagext" + loki_flagext "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + "github.com/grafana/loki/v3/pkg/util/test" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/distributor/http.go b/pkg/distributor/http.go index 54c9588662367..00c3ba53a2806 100644 --- a/pkg/distributor/http.go +++ b/pkg/distributor/http.go @@ -8,13 +8,13 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/loghttp/push" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // PushHandler reads a snappy-compressed proto from the HTTP body. diff --git a/pkg/distributor/http_test.go b/pkg/distributor/http_test.go index 23b2993c5b213..0ecf70fa9a498 100644 --- a/pkg/distributor/http_test.go +++ b/pkg/distributor/http_test.go @@ -9,13 +9,13 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/flagext" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestDistributorRingHandler(t *testing.T) { diff --git a/pkg/distributor/ingestion_rate_strategy_test.go b/pkg/distributor/ingestion_rate_strategy_test.go index e87c5ce69f8e2..657d34290984a 100644 --- a/pkg/distributor/ingestion_rate_strategy_test.go +++ b/pkg/distributor/ingestion_rate_strategy_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/distributor/instance_count_test.go b/pkg/distributor/instance_count_test.go index 92abf94c45061..7f861a262284a 100644 --- a/pkg/distributor/instance_count_test.go +++ b/pkg/distributor/instance_count_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestInstanceCountDelegateCounting(t *testing.T) { diff --git a/pkg/distributor/limits.go b/pkg/distributor/limits.go index 927374416e8ba..05734db4184f0 100644 --- a/pkg/distributor/limits.go +++ b/pkg/distributor/limits.go @@ -3,9 +3,9 @@ package distributor import ( "time" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/loghttp/push" ) // Limits is an interface for distributor limits/related configs diff --git a/pkg/distributor/ratestore.go b/pkg/distributor/ratestore.go index 2eb9e9f062326..0fe5c3aca0bce 100644 --- a/pkg/distributor/ratestore.go +++ b/pkg/distributor/ratestore.go @@ -15,9 +15,9 @@ import ( "github.com/opentracing/opentracing-go" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type poolClientFactory interface { diff --git a/pkg/distributor/ratestore_metrics.go b/pkg/distributor/ratestore_metrics.go index 2e31e3934f0bc..fce24ee6617b0 100644 --- a/pkg/distributor/ratestore_metrics.go +++ b/pkg/distributor/ratestore_metrics.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type ratestoreMetrics struct { diff --git a/pkg/distributor/ratestore_test.go b/pkg/distributor/ratestore_test.go index ab01fbc21593b..af9fa9f0adb70 100644 --- a/pkg/distributor/ratestore_test.go +++ b/pkg/distributor/ratestore_test.go @@ -7,16 +7,16 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/validation" "github.com/stretchr/testify/require" - client2 "github.com/grafana/loki/pkg/ingester/client" + client2 "github.com/grafana/loki/v3/pkg/ingester/client" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/ring" "github.com/grafana/dskit/ring/client" diff --git a/pkg/distributor/shardstreams/config.go b/pkg/distributor/shardstreams/config.go index 6a92472451543..90f1e65600a9b 100644 --- a/pkg/distributor/shardstreams/config.go +++ b/pkg/distributor/shardstreams/config.go @@ -3,7 +3,7 @@ package shardstreams import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Config struct { diff --git a/pkg/distributor/streamsharder_test.go b/pkg/distributor/streamsharder_test.go index 1bd03d9fdd82f..eff8b476c0866 100644 --- a/pkg/distributor/streamsharder_test.go +++ b/pkg/distributor/streamsharder_test.go @@ -3,7 +3,7 @@ package distributor import ( "fmt" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type StreamSharderMock struct { diff --git a/pkg/distributor/tee_test.go b/pkg/distributor/tee_test.go index f953e09b75111..ece0e97c1ce69 100644 --- a/pkg/distributor/tee_test.go +++ b/pkg/distributor/tee_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/mock" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/push" ) type mockedTee struct { diff --git a/pkg/distributor/validator.go b/pkg/distributor/validator.go index 6f0bce53d983b..2ef4c78cff94a 100644 --- a/pkg/distributor/validator.go +++ b/pkg/distributor/validator.go @@ -9,9 +9,9 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/distributor/validator_test.go b/pkg/distributor/validator_test.go index 0bfdd7d4d0568..75fee909000cc 100644 --- a/pkg/distributor/validator_test.go +++ b/pkg/distributor/validator_test.go @@ -11,10 +11,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/distributor/writefailures/cfg.go b/pkg/distributor/writefailures/cfg.go index 393000f2573a2..727c4a1dda0eb 100644 --- a/pkg/distributor/writefailures/cfg.go +++ b/pkg/distributor/writefailures/cfg.go @@ -3,7 +3,7 @@ package writefailures import ( "flag" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type Cfg struct { diff --git a/pkg/distributor/writefailures/manager.go b/pkg/distributor/writefailures/manager.go index 2804c47e9522b..f02ab2e57d76f 100644 --- a/pkg/distributor/writefailures/manager.go +++ b/pkg/distributor/writefailures/manager.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/limiter" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/runtime" + "github.com/grafana/loki/v3/pkg/runtime" ) type Manager struct { diff --git a/pkg/distributor/writefailures/manager_test.go b/pkg/distributor/writefailures/manager_test.go index 6f5f1eee38419..fb3d7577953a7 100644 --- a/pkg/distributor/writefailures/manager_test.go +++ b/pkg/distributor/writefailures/manager_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/util/flagext" ) func TestWriteFailuresLogging(t *testing.T) { diff --git a/pkg/distributor/writefailures/metrics.go b/pkg/distributor/writefailures/metrics.go index e62d6f19b4f79..1d4c1a2fe85c7 100644 --- a/pkg/distributor/writefailures/metrics.go +++ b/pkg/distributor/writefailures/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type metrics struct { diff --git a/pkg/ingester/checkpoint.go b/pkg/ingester/checkpoint.go index e2c8ef2c18681..9de54888f4748 100644 --- a/pkg/ingester/checkpoint.go +++ b/pkg/ingester/checkpoint.go @@ -20,11 +20,11 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" prompool "github.com/prometheus/prometheus/util/pool" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/pool" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/pool" ) var ( diff --git a/pkg/ingester/checkpoint.pb.go b/pkg/ingester/checkpoint.pb.go index 4f66c217b5ef1..b4dbe51913ace 100644 --- a/pkg/ingester/checkpoint.pb.go +++ b/pkg/ingester/checkpoint.pb.go @@ -10,8 +10,8 @@ import ( proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" io "io" math "math" math_bits "math/bits" @@ -139,9 +139,9 @@ func (m *Chunk) GetHead() []byte { type Series struct { UserID string `protobuf:"bytes,1,opt,name=userID,proto3" json:"userID,omitempty"` // post mapped fingerprint is necessary because subsequent wal writes will reference it. - Fingerprint uint64 `protobuf:"varint,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Chunks []Chunk `protobuf:"bytes,4,rep,name=chunks,proto3" json:"chunks"` + Fingerprint uint64 `protobuf:"varint,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Chunks []Chunk `protobuf:"bytes,4,rep,name=chunks,proto3" json:"chunks"` // most recently pushed timestamp. To time.Time `protobuf:"bytes,5,opt,name=to,proto3,stdtime" json:"to"` // most recently pushed line. @@ -242,40 +242,40 @@ func init() { func init() { proto.RegisterFile("pkg/ingester/checkpoint.proto", fileDescriptor_00f4b7152db9bdb5) } var fileDescriptor_00f4b7152db9bdb5 = []byte{ - // 517 bytes of a gzipped FileDescriptorProto + // 521 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x31, 0x8f, 0xd3, 0x30, - 0x14, 0x8e, 0xdb, 0x5c, 0xae, 0x75, 0x61, 0xb1, 0x4e, 0xc8, 0x14, 0xe1, 0x56, 0x37, 0x75, 0x4a, - 0x44, 0x61, 0x80, 0x05, 0xe9, 0x7a, 0x08, 0x09, 0xa9, 0x03, 0x0a, 0xc7, 0xc2, 0x82, 0xdc, 0xc4, - 0x75, 0xa2, 0xa6, 0x71, 0x64, 0xbb, 0x43, 0x37, 0x7e, 0xc2, 0x0d, 0xfc, 0x08, 0x7e, 0xca, 0x8d, - 0x1d, 0x4f, 0x20, 0x1d, 0x34, 0x5d, 0x18, 0xef, 0x27, 0x20, 0x3b, 0x49, 0x29, 0x1b, 0xdd, 0xde, - 0xf7, 0x9e, 0xbf, 0xf7, 0xc9, 0xdf, 0xfb, 0xe0, 0xd3, 0x62, 0xc1, 0x83, 0x34, 0xe7, 0x4c, 0x69, - 0x26, 0x83, 0x28, 0x61, 0xd1, 0xa2, 0x10, 0x69, 0xae, 0xfd, 0x42, 0x0a, 0x2d, 0xd0, 0xc3, 0x4c, - 0x2c, 0xd2, 0xcf, 0xcd, 0xbc, 0x7f, 0xc6, 0x05, 0x17, 0x76, 0x12, 0x98, 0xaa, 0x7a, 0xd4, 0x1f, - 0x70, 0x21, 0x78, 0xc6, 0x02, 0x8b, 0x66, 0xab, 0x79, 0xa0, 0xd3, 0x25, 0x53, 0x9a, 0x2e, 0x8b, - 0xfa, 0xc1, 0x13, 0x23, 0x92, 0x09, 0x5e, 0x31, 0x9b, 0xa2, 0x1a, 0x9e, 0xff, 0x68, 0xc1, 0x93, - 0xcb, 0x64, 0x95, 0x2f, 0xd0, 0x4b, 0xe8, 0xce, 0xa5, 0x58, 0x62, 0x30, 0x04, 0xa3, 0xde, 0xb8, - 0xef, 0x57, 0x6b, 0xfd, 0x66, 0xad, 0x7f, 0xd5, 0xac, 0x9d, 0x74, 0x6e, 0xee, 0x06, 0xce, 0xf5, - 0xcf, 0x01, 0x08, 0x2d, 0x03, 0xbd, 0x80, 0x2d, 0x2d, 0x70, 0xeb, 0x08, 0x5e, 0x4b, 0x0b, 0x34, - 0x81, 0xdd, 0x79, 0xb6, 0x52, 0x09, 0x8b, 0x2f, 0x34, 0x6e, 0x1f, 0x41, 0xfe, 0x4b, 0x43, 0x6f, - 0x61, 0x2f, 0xa3, 0x4a, 0x7f, 0x2c, 0x62, 0xaa, 0x59, 0x8c, 0xdd, 0x23, 0xb6, 0x1c, 0x12, 0xd1, - 0x23, 0xe8, 0x45, 0x99, 0x50, 0x2c, 0xc6, 0x27, 0x43, 0x30, 0xea, 0x84, 0x35, 0x32, 0x7d, 0xb5, - 0xce, 0x23, 0x16, 0x63, 0xaf, 0xea, 0x57, 0x08, 0x21, 0xe8, 0xc6, 0x54, 0x53, 0x7c, 0x3a, 0x04, - 0xa3, 0x07, 0xa1, 0xad, 0x4d, 0x2f, 0x61, 0x34, 0xc6, 0x9d, 0xaa, 0x67, 0xea, 0xf3, 0xaf, 0x6d, - 0xe8, 0x7d, 0x60, 0x32, 0x65, 0xca, 0xac, 0x5a, 0x29, 0x26, 0xdf, 0xbd, 0xb1, 0x06, 0x77, 0xc3, - 0x1a, 0xa1, 0x21, 0xec, 0xcd, 0xcd, 0x85, 0x65, 0x21, 0xd3, 0x5c, 0x5b, 0x17, 0xdd, 0xf0, 0xb0, - 0x85, 0x32, 0xe8, 0x65, 0x74, 0xc6, 0x32, 0x85, 0xdb, 0xc3, 0xf6, 0xa8, 0x37, 0x7e, 0xec, 0xef, - 0x6f, 0x38, 0x65, 0x9c, 0x46, 0xeb, 0xa9, 0x99, 0xbe, 0xa7, 0xa9, 0x9c, 0xbc, 0x32, 0xdf, 0xfb, - 0x7e, 0x37, 0x78, 0xc6, 0x53, 0x9d, 0xac, 0x66, 0x7e, 0x24, 0x96, 0x01, 0x97, 0x74, 0x4e, 0x73, - 0x1a, 0x98, 0x2c, 0x05, 0x87, 0x51, 0xf0, 0x2d, 0xef, 0x22, 0xa6, 0x85, 0x66, 0x32, 0xac, 0x35, - 0xd0, 0x18, 0x7a, 0x91, 0xc9, 0x83, 0xc2, 0xae, 0x55, 0x3b, 0xf3, 0xff, 0x09, 0xa1, 0x6f, 0xc3, - 0x32, 0x71, 0x8d, 0x50, 0x58, 0xbf, 0xac, 0x03, 0x70, 0x72, 0x64, 0x00, 0xfa, 0xb0, 0x63, 0x6e, - 0x30, 0x4d, 0x73, 0x66, 0xed, 0xed, 0x86, 0x7b, 0x8c, 0x30, 0x3c, 0x65, 0xb9, 0x96, 0xeb, 0x4b, - 0x6d, 0x3d, 0x6e, 0x87, 0x0d, 0x34, 0xb1, 0x49, 0x52, 0x9e, 0x30, 0xa5, 0xaf, 0x94, 0xf5, 0xfa, - 0xbf, 0x63, 0xb3, 0xa7, 0x4d, 0x5e, 0x6f, 0xb6, 0xc4, 0xb9, 0xdd, 0x12, 0xe7, 0x7e, 0x4b, 0xc0, - 0x97, 0x92, 0x80, 0x6f, 0x25, 0x01, 0x37, 0x25, 0x01, 0x9b, 0x92, 0x80, 0x5f, 0x25, 0x01, 0xbf, - 0x4b, 0xe2, 0xdc, 0x97, 0x04, 0x5c, 0xef, 0x88, 0xb3, 0xd9, 0x11, 0xe7, 0x76, 0x47, 0x9c, 0x4f, - 0x9d, 0xc6, 0x83, 0x99, 0x67, 0x85, 0x9e, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0xda, 0xcc, 0xb5, - 0xf6, 0xbf, 0x03, 0x00, 0x00, + 0x14, 0x8e, 0xdb, 0x5c, 0xae, 0xe7, 0xc2, 0x62, 0x9d, 0x90, 0x29, 0xc2, 0xad, 0x6e, 0xea, 0x94, + 0x48, 0xbd, 0x1b, 0x18, 0x10, 0xd2, 0xf5, 0x10, 0x12, 0x52, 0x07, 0x14, 0x8e, 0x85, 0x05, 0xb9, + 0x89, 0xeb, 0x44, 0x4d, 0xe3, 0xc8, 0x76, 0x91, 0xba, 0xf1, 0x13, 0x6e, 0xe3, 0x2f, 0xf0, 0x53, + 0x6e, 0xec, 0x78, 0x02, 0xe9, 0xa0, 0xe9, 0xc2, 0x78, 0x3f, 0x01, 0xd9, 0x49, 0x4a, 0xd9, 0xe8, + 0xf6, 0xbe, 0xf7, 0xfc, 0xbd, 0x4f, 0xfe, 0xde, 0x07, 0x9f, 0x17, 0x73, 0x1e, 0xa4, 0x39, 0x67, + 0x4a, 0x33, 0x19, 0x44, 0x09, 0x8b, 0xe6, 0x85, 0x48, 0x73, 0xed, 0x17, 0x52, 0x68, 0x81, 0x1e, + 0x67, 0x62, 0x9e, 0x7e, 0x6a, 0xe6, 0xbd, 0x53, 0x2e, 0xb8, 0xb0, 0x93, 0xc0, 0x54, 0xd5, 0xa3, + 0x5e, 0x9f, 0x0b, 0xc1, 0x33, 0x16, 0x58, 0x34, 0x5d, 0xce, 0x02, 0x9d, 0x2e, 0x98, 0xd2, 0x74, + 0x51, 0xd4, 0x0f, 0x9e, 0x19, 0x91, 0x4c, 0xf0, 0x8a, 0xd9, 0x14, 0xd5, 0xf0, 0xec, 0x47, 0x0b, + 0x1e, 0x5d, 0x25, 0xcb, 0x7c, 0x8e, 0x5e, 0x40, 0x77, 0x26, 0xc5, 0x02, 0x83, 0x01, 0x18, 0x76, + 0x47, 0x3d, 0xbf, 0x5a, 0xeb, 0x37, 0x6b, 0xfd, 0xeb, 0x66, 0xed, 0xb8, 0x73, 0x7b, 0xdf, 0x77, + 0x6e, 0x7e, 0xf6, 0x41, 0x68, 0x19, 0xe8, 0x02, 0xb6, 0xb4, 0xc0, 0xad, 0x03, 0x78, 0x2d, 0x2d, + 0xd0, 0x18, 0x9e, 0xcc, 0xb2, 0xa5, 0x4a, 0x58, 0x7c, 0xa9, 0x71, 0xfb, 0x00, 0xf2, 0x5f, 0x1a, + 0x7a, 0x03, 0xbb, 0x19, 0x55, 0xfa, 0x43, 0x11, 0x53, 0xcd, 0x62, 0xec, 0x1e, 0xb0, 0x65, 0x9f, + 0x88, 0x9e, 0x40, 0x2f, 0xca, 0x84, 0x62, 0x31, 0x3e, 0x1a, 0x80, 0x61, 0x27, 0xac, 0x91, 0xe9, + 0xab, 0x55, 0x1e, 0xb1, 0x18, 0x7b, 0x55, 0xbf, 0x42, 0x08, 0x41, 0x37, 0xa6, 0x9a, 0xe2, 0xe3, + 0x01, 0x18, 0x3e, 0x0a, 0x6d, 0x6d, 0x7a, 0x09, 0xa3, 0x31, 0xee, 0x54, 0x3d, 0x53, 0x9f, 0x7d, + 0x6d, 0x43, 0xef, 0x3d, 0x93, 0x29, 0x53, 0x66, 0xd5, 0x52, 0x31, 0xf9, 0xf6, 0xb5, 0x35, 0xf8, + 0x24, 0xac, 0x11, 0x1a, 0xc0, 0xee, 0xcc, 0x5c, 0x58, 0x16, 0x32, 0xcd, 0xb5, 0x75, 0xd1, 0x0d, + 0xf7, 0x5b, 0x48, 0x40, 0x2f, 0xa3, 0x53, 0x96, 0x29, 0xdc, 0x1e, 0xb4, 0x87, 0xdd, 0xd1, 0x53, + 0x7f, 0x77, 0xc3, 0x09, 0xe3, 0x34, 0x5a, 0x4d, 0xcc, 0xf4, 0x1d, 0x4d, 0xe5, 0xf8, 0xa5, 0xf9, + 0xde, 0xf7, 0xfb, 0xfe, 0x05, 0x4f, 0x75, 0xb2, 0x9c, 0xfa, 0x91, 0x58, 0x04, 0x5c, 0xd2, 0x19, + 0xcd, 0x69, 0x60, 0xb2, 0x14, 0x7c, 0x3e, 0x0f, 0xf6, 0xd3, 0xe0, 0x5b, 0xea, 0x65, 0x4c, 0x0b, + 0xcd, 0x64, 0x58, 0xcb, 0xa0, 0x11, 0xf4, 0x22, 0x13, 0x09, 0x85, 0x5d, 0x2b, 0x78, 0xea, 0xff, + 0x93, 0x43, 0xdf, 0xe6, 0x65, 0xec, 0x1a, 0xad, 0xb0, 0x7e, 0x59, 0x67, 0xe0, 0xe8, 0xc0, 0x0c, + 0xf4, 0x60, 0xc7, 0x9c, 0x61, 0x92, 0xe6, 0xcc, 0x3a, 0x7c, 0x12, 0xee, 0x30, 0xc2, 0xf0, 0x98, + 0xe5, 0x5a, 0xae, 0xae, 0xb4, 0xb5, 0xb9, 0x1d, 0x36, 0xd0, 0x24, 0x27, 0x49, 0x79, 0xc2, 0x94, + 0xbe, 0x56, 0xd6, 0xee, 0xff, 0x4e, 0xce, 0x8e, 0x36, 0x7e, 0xb5, 0xde, 0x10, 0xe7, 0x6e, 0x43, + 0x9c, 0x87, 0x0d, 0x01, 0x5f, 0x4a, 0x02, 0xbe, 0x95, 0x04, 0xdc, 0x96, 0x04, 0xac, 0x4b, 0x02, + 0x7e, 0x95, 0x04, 0xfc, 0x2e, 0x89, 0xf3, 0x50, 0x12, 0x70, 0xb3, 0x25, 0xce, 0x7a, 0x4b, 0x9c, + 0xbb, 0x2d, 0x71, 0x3e, 0x76, 0x1a, 0x0f, 0xa6, 0x9e, 0x15, 0x3a, 0xff, 0x13, 0x00, 0x00, 0xff, + 0xff, 0x4e, 0xf6, 0xfc, 0x95, 0xc2, 0x03, 0x00, 0x00, } func (this *Chunk) Equal(that interface{}) bool { @@ -1142,7 +1142,7 @@ func (m *Series) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ingester/checkpoint.proto b/pkg/ingester/checkpoint.proto index 48e100b32ab53..92dae8499ec04 100644 --- a/pkg/ingester/checkpoint.proto +++ b/pkg/ingester/checkpoint.proto @@ -42,7 +42,7 @@ message Series { uint64 fingerprint = 2; repeated logproto.LegacyLabelPair labels = 3 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated Chunk chunks = 4 [(gogoproto.nullable) = false]; // most recently pushed timestamp. diff --git a/pkg/ingester/checkpoint_test.go b/pkg/ingester/checkpoint_test.go index 2cf46d921ce94..e8871e7a13918 100644 --- a/pkg/ingester/checkpoint_test.go +++ b/pkg/ingester/checkpoint_test.go @@ -15,15 +15,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) // small util for ensuring data exists as we expect diff --git a/pkg/ingester/chunk_test.go b/pkg/ingester/chunk_test.go index c18c64fe67762..4523bc8cc1d8b 100644 --- a/pkg/ingester/chunk_test.go +++ b/pkg/ingester/chunk_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" ) func testIteratorForward(t *testing.T, iter iter.EntryIterator, from, through int64) { diff --git a/pkg/ingester/client/client.go b/pkg/ingester/client/client.go index 861a925d6b176..2c4329b56c93e 100644 --- a/pkg/ingester/client/client.go +++ b/pkg/ingester/client/client.go @@ -5,7 +5,7 @@ import ( "io" "time" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/server" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/middleware" @@ -16,8 +16,8 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" ) var ingesterClientRequestDuration = promauto.NewHistogramVec(prometheus.HistogramOpts{ diff --git a/pkg/ingester/client/compat.go b/pkg/ingester/client/compat.go index 4d6c3cacc3fd8..136c486694330 100644 --- a/pkg/ingester/client/compat.go +++ b/pkg/ingester/client/compat.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/ingester/encoding_test.go b/pkg/ingester/encoding_test.go index 200f97b6413b9..4bb1aab0b8da6 100644 --- a/pkg/ingester/encoding_test.go +++ b/pkg/ingester/encoding_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) func fillChunk(t testing.TB, c chunkenc.Chunk) { diff --git a/pkg/ingester/flush.go b/pkg/ingester/flush.go index 1326d916961ec..f9904ca8409e5 100644 --- a/pkg/ingester/flush.go +++ b/pkg/ingester/flush.go @@ -17,10 +17,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ingester/flush_test.go b/pkg/ingester/flush_test.go index 86127a3177f91..a4255f7510b7a 100644 --- a/pkg/ingester/flush_test.go +++ b/pkg/ingester/flush_test.go @@ -22,22 +22,22 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/ingester/index/bitprefix.go b/pkg/ingester/index/bitprefix.go index fe24a885917bc..38df9381e49d0 100644 --- a/pkg/ingester/index/bitprefix.go +++ b/pkg/ingester/index/bitprefix.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // BitPrefixInvertedIndex is another inverted index implementation diff --git a/pkg/ingester/index/bitprefix_test.go b/pkg/ingester/index/bitprefix_test.go index fbb297bd9c265..9832e15ed60c7 100644 --- a/pkg/ingester/index/bitprefix_test.go +++ b/pkg/ingester/index/bitprefix_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_BitPrefixGetShards(t *testing.T) { diff --git a/pkg/ingester/index/index.go b/pkg/ingester/index/index.go index 6536cc7f7c449..67246b081c544 100644 --- a/pkg/ingester/index/index.go +++ b/pkg/ingester/index/index.go @@ -17,10 +17,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/stores/series" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/series" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const DefaultIndexShards = 32 diff --git a/pkg/ingester/index/index_test.go b/pkg/ingester/index/index_test.go index f34633c0c6b54..23873cbfc3fdf 100644 --- a/pkg/ingester/index/index_test.go +++ b/pkg/ingester/index/index_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util" ) func Test_GetShards(t *testing.T) { diff --git a/pkg/ingester/index/multi.go b/pkg/ingester/index/multi.go index 0bfa57806ad0a..2d94d2e9ea3e7 100644 --- a/pkg/ingester/index/multi.go +++ b/pkg/ingester/index/multi.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/config" ) type periodIndex struct { diff --git a/pkg/ingester/index/multi_test.go b/pkg/ingester/index/multi_test.go index 6be07effea8d0..d78f132bc975e 100644 --- a/pkg/ingester/index/multi_test.go +++ b/pkg/ingester/index/multi_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func MustParseDayTime(s string) config.DayTime { diff --git a/pkg/ingester/ingester.go b/pkg/ingester/ingester.go index 7d0fb671a0a57..1aede454d53e3 100644 --- a/pkg/ingester/ingester.go +++ b/pkg/ingester/ingester.go @@ -12,7 +12,7 @@ import ( "sync" "time" - lokilog "github.com/grafana/loki/pkg/logql/log" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -29,28 +29,28 @@ import ( "github.com/prometheus/prometheus/model/labels" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - indexstore "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + indexstore "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/wal" ) const ( diff --git a/pkg/ingester/ingester_test.go b/pkg/ingester/ingester_test.go index 82a124f5116a4..bf7922fb61dfc 100644 --- a/pkg/ingester/ingester_test.go +++ b/pkg/ingester/ingester_test.go @@ -28,24 +28,24 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) func TestPrepareShutdownMarkerPathNotSet(t *testing.T) { diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index e0e9d5e4ca6b4..e0218c1a4d947 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -10,7 +10,7 @@ import ( "syscall" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" @@ -26,28 +26,28 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/index" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/deletion" - util_log "github.com/grafana/loki/pkg/util/log" - mathutil "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/index" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/deletion" + util_log "github.com/grafana/loki/v3/pkg/util/log" + mathutil "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/ingester/instance_test.go b/pkg/ingester/instance_test.go index 254779511465f..ed78943c23c4d 100644 --- a/pkg/ingester/instance_test.go +++ b/pkg/ingester/instance_test.go @@ -10,12 +10,12 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/grafana/dskit/tenant" "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/grafana/dskit/flagext" "github.com/pkg/errors" @@ -23,18 +23,18 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - loki_runtime "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + loki_runtime "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) func defaultConfig() *Config { diff --git a/pkg/ingester/limiter.go b/pkg/ingester/limiter.go index e48c2a018d277..193209a54f6b9 100644 --- a/pkg/ingester/limiter.go +++ b/pkg/ingester/limiter.go @@ -8,8 +8,8 @@ import ( "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/ingester/limiter_test.go b/pkg/ingester/limiter_test.go index b9646bb27d18f..6186e910663e0 100644 --- a/pkg/ingester/limiter_test.go +++ b/pkg/ingester/limiter_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestLimiter_AssertMaxStreamsPerUser(t *testing.T) { diff --git a/pkg/ingester/mapper.go b/pkg/ingester/mapper.go index 5677a2a08dbec..ced7c0d6833e6 100644 --- a/pkg/ingester/mapper.go +++ b/pkg/ingester/mapper.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const maxMappedFP = 1 << 20 // About 1M fingerprints reserved for mapping. diff --git a/pkg/ingester/metrics.go b/pkg/ingester/metrics.go index e3d3a41c1a592..8b005860555f1 100644 --- a/pkg/ingester/metrics.go +++ b/pkg/ingester/metrics.go @@ -4,9 +4,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type ingesterMetrics struct { diff --git a/pkg/ingester/recovery.go b/pkg/ingester/recovery.go index f6809f2a2bdf8..a93151e0e6fca 100644 --- a/pkg/ingester/recovery.go +++ b/pkg/ingester/recovery.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" "golang.org/x/net/context" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" ) type WALReader interface { diff --git a/pkg/ingester/recovery_test.go b/pkg/ingester/recovery_test.go index 525d39ce1d8ff..fd8f05136d6f5 100644 --- a/pkg/ingester/recovery_test.go +++ b/pkg/ingester/recovery_test.go @@ -17,14 +17,14 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/logproto" - loki_runtime "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/logproto" + loki_runtime "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type MemoryWALReader struct { diff --git a/pkg/ingester/replay_controller.go b/pkg/ingester/replay_controller.go index b5bf6d7b56a0c..3982921300638 100644 --- a/pkg/ingester/replay_controller.go +++ b/pkg/ingester/replay_controller.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "go.uber.org/atomic" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type replayFlusher struct { diff --git a/pkg/ingester/replay_controller_test.go b/pkg/ingester/replay_controller_test.go index 5559022eae97f..0cde538d7c503 100644 --- a/pkg/ingester/replay_controller_test.go +++ b/pkg/ingester/replay_controller_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type dumbFlusher struct { diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 81ce436929251..d7a29b73e802d 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -15,16 +15,16 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/distributor/writefailures" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/distributor/writefailures" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ErrEntriesExist = errors.New("duplicate push - entries already exist") diff --git a/pkg/ingester/stream_rate_calculator.go b/pkg/ingester/stream_rate_calculator.go index ef6578ed5d1be..e021850a9bd55 100644 --- a/pkg/ingester/stream_rate_calculator.go +++ b/pkg/ingester/stream_rate_calculator.go @@ -4,7 +4,7 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/ingester/stream_test.go b/pkg/ingester/stream_test.go index d1b01f22746c2..26eef4e3a7936 100644 --- a/pkg/ingester/stream_test.go +++ b/pkg/ingester/stream_test.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/flagext" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/ingester/streams_map_test.go b/pkg/ingester/streams_map_test.go index 2468ffd7c79d8..d98369ff152a9 100644 --- a/pkg/ingester/streams_map_test.go +++ b/pkg/ingester/streams_map_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func TestStreamsMap(t *testing.T) { diff --git a/pkg/ingester/tailer.go b/pkg/ingester/tailer.go index 25fdfdb740d7a..80cceba78fca6 100644 --- a/pkg/ingester/tailer.go +++ b/pkg/ingester/tailer.go @@ -10,11 +10,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "golang.org/x/net/context" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ingester/tailer_test.go b/pkg/ingester/tailer_test.go index fa44cc0a7dcb8..1f49ec0095086 100644 --- a/pkg/ingester/tailer_test.go +++ b/pkg/ingester/tailer_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestTailer_RoundTrip(t *testing.T) { diff --git a/pkg/ingester/wal.go b/pkg/ingester/wal.go index 2bb60d65749d1..5a32aee050325 100644 --- a/pkg/ingester/wal.go +++ b/pkg/ingester/wal.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ingester/wal" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/ingester/wal" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/ingester/wal/encoding.go b/pkg/ingester/wal/encoding.go index fee0b8b5f314d..a21ce57bf34b1 100644 --- a/pkg/ingester/wal/encoding.go +++ b/pkg/ingester/wal/encoding.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/record" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // RecordType represents the type of the WAL/Checkpoint record. diff --git a/pkg/ingester/wal/encoding_test.go b/pkg/ingester/wal/encoding_test.go index 9b36c1c79917f..a72e1c160565d 100644 --- a/pkg/ingester/wal/encoding_test.go +++ b/pkg/ingester/wal/encoding_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/pkg/iter/cache.go b/pkg/iter/cache.go index db26244e60d2a..a6e12dffbce20 100644 --- a/pkg/iter/cache.go +++ b/pkg/iter/cache.go @@ -1,7 +1,7 @@ package iter import ( - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type CacheEntryIterator interface { diff --git a/pkg/iter/cache_test.go b/pkg/iter/cache_test.go index 9befe34383538..23ee9cb7d9954 100644 --- a/pkg/iter/cache_test.go +++ b/pkg/iter/cache_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_CachedIterator(t *testing.T) { diff --git a/pkg/iter/categorized_labels_iterator.go b/pkg/iter/categorized_labels_iterator.go index 1e95cad09a16e..c91aa49911163 100644 --- a/pkg/iter/categorized_labels_iterator.go +++ b/pkg/iter/categorized_labels_iterator.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type categorizeLabelsIterator struct { diff --git a/pkg/iter/categorized_labels_iterator_test.go b/pkg/iter/categorized_labels_iterator_test.go index 18259edfbf169..790ca5413aba6 100644 --- a/pkg/iter/categorized_labels_iterator_test.go +++ b/pkg/iter/categorized_labels_iterator_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestNewCategorizeLabelsIterator(t *testing.T) { diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index fa67da6a3bc0a..7c373ddeac7cd 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -7,10 +7,10 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/loser" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/loser" ) // EntryIterator iterates over entries in time-order. diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index c900f898f1be4..3c64c01e296e8 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) const ( diff --git a/pkg/iter/iterator.go b/pkg/iter/iterator.go index 8d0bde08ec652..61c727428c71a 100644 --- a/pkg/iter/iterator.go +++ b/pkg/iter/iterator.go @@ -1,6 +1,6 @@ package iter -import "github.com/grafana/loki/pkg/logproto" +import "github.com/grafana/loki/v3/pkg/logproto" // Iterator iterates over data in time-order. type Iterator interface { diff --git a/pkg/iter/sample_iterator.go b/pkg/iter/sample_iterator.go index 632ed9106df15..261b75a0b33c9 100644 --- a/pkg/iter/sample_iterator.go +++ b/pkg/iter/sample_iterator.go @@ -6,9 +6,9 @@ import ( "io" "sync" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" ) // SampleIterator iterates over samples in time-order. diff --git a/pkg/iter/sample_iterator_test.go b/pkg/iter/sample_iterator_test.go index ec739e4d5a290..da3113c547e66 100644 --- a/pkg/iter/sample_iterator_test.go +++ b/pkg/iter/sample_iterator_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" ) func TestNewPeekingSampleIterator(t *testing.T) { diff --git a/pkg/logcli/client/client.go b/pkg/logcli/client/client.go index 964abc13d30bd..73ddccd7efd17 100644 --- a/pkg/logcli/client/client.go +++ b/pkg/logcli/client/client.go @@ -19,12 +19,12 @@ import ( "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" ) const ( diff --git a/pkg/logcli/client/file.go b/pkg/logcli/client/file.go index 82274ef79fb8d..dd0432a79e172 100644 --- a/pkg/logcli/client/file.go +++ b/pkg/logcli/client/file.go @@ -11,15 +11,15 @@ import ( "github.com/gorilla/websocket" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - logqllog "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + logqllog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/validation" "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" diff --git a/pkg/logcli/client/file_test.go b/pkg/logcli/client/file_test.go index 1e5a2ab77c630..1a1eac9fa7a7d 100644 --- a/pkg/logcli/client/file_test.go +++ b/pkg/logcli/client/file_test.go @@ -9,8 +9,8 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/pkg/logcli/index/stats.go b/pkg/logcli/index/stats.go index f67c6283ba556..723513a26cf25 100644 --- a/pkg/logcli/index/stats.go +++ b/pkg/logcli/index/stats.go @@ -7,8 +7,8 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logproto" ) type StatsQuery struct { diff --git a/pkg/logcli/index/volume.go b/pkg/logcli/index/volume.go index 85a378d20d255..b6a3205706912 100644 --- a/pkg/logcli/index/volume.go +++ b/pkg/logcli/index/volume.go @@ -3,11 +3,11 @@ package index import ( "log" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/print" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/print" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" ) // GetVolume executes a volume query and prints the results diff --git a/pkg/logcli/labelquery/labels.go b/pkg/logcli/labelquery/labels.go index 38e0a500df861..2759a2bb8cf22 100644 --- a/pkg/logcli/labelquery/labels.go +++ b/pkg/logcli/labelquery/labels.go @@ -5,8 +5,8 @@ import ( "log" "time" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" ) // LabelQuery contains all necessary fields to execute label queries and print out the results diff --git a/pkg/logcli/output/default.go b/pkg/logcli/output/default.go index d6edf3c30d34a..cfa9f2ad34a1e 100644 --- a/pkg/logcli/output/default.go +++ b/pkg/logcli/output/default.go @@ -8,7 +8,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // DefaultOutput provides logs and metadata in human readable format diff --git a/pkg/logcli/output/default_test.go b/pkg/logcli/output/default_test.go index 61e87633362c6..121b6d4816007 100644 --- a/pkg/logcli/output/default_test.go +++ b/pkg/logcli/output/default_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestDefaultOutput_Format(t *testing.T) { diff --git a/pkg/logcli/output/jsonl.go b/pkg/logcli/output/jsonl.go index 6c9372cf2194f..793c47eeafb39 100644 --- a/pkg/logcli/output/jsonl.go +++ b/pkg/logcli/output/jsonl.go @@ -7,7 +7,7 @@ import ( "log" "time" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // JSONLOutput prints logs and metadata as JSON Lines, suitable for scripts diff --git a/pkg/logcli/output/jsonl_test.go b/pkg/logcli/output/jsonl_test.go index bd984e912b87d..22e81fd29ea9a 100644 --- a/pkg/logcli/output/jsonl_test.go +++ b/pkg/logcli/output/jsonl_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestJSONLOutput_Format(t *testing.T) { diff --git a/pkg/logcli/output/output.go b/pkg/logcli/output/output.go index 92d3a214c230e..be82e6e97e69a 100644 --- a/pkg/logcli/output/output.go +++ b/pkg/logcli/output/output.go @@ -8,7 +8,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // Blue color is excluded since we are already printing timestamp diff --git a/pkg/logcli/output/raw.go b/pkg/logcli/output/raw.go index 22aba03e35914..4431858efb7ea 100644 --- a/pkg/logcli/output/raw.go +++ b/pkg/logcli/output/raw.go @@ -5,7 +5,7 @@ import ( "io" "time" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // RawOutput prints logs in their original form, without any metadata diff --git a/pkg/logcli/output/raw_test.go b/pkg/logcli/output/raw_test.go index 3c45708374639..844e8e811afc5 100644 --- a/pkg/logcli/output/raw_test.go +++ b/pkg/logcli/output/raw_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestRawOutput_Format(t *testing.T) { diff --git a/pkg/logcli/print/print.go b/pkg/logcli/print/print.go index 6528b2c7ec4b3..0f7d5d131151e 100644 --- a/pkg/logcli/print/print.go +++ b/pkg/logcli/print/print.go @@ -11,11 +11,11 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/util" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type QueryResultPrinter struct { diff --git a/pkg/logcli/print/print_test.go b/pkg/logcli/print/print_test.go index 91ada1c6e5687..737e494545c6f 100644 --- a/pkg/logcli/print/print_test.go +++ b/pkg/logcli/print/print_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_commonLabels(t *testing.T) { diff --git a/pkg/logcli/query/query.go b/pkg/logcli/query/query.go index 7e8c86e08ad15..84934e57730cc 100644 --- a/pkg/logcli/query/query.go +++ b/pkg/logcli/query/query.go @@ -16,22 +16,22 @@ import ( "github.com/prometheus/client_golang/prometheus" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/print" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - chunk "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/print" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + chunk "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/validation" ) const schemaConfigFilename = "schemaconfig" diff --git a/pkg/logcli/query/query_test.go b/pkg/logcli/query/query_test.go index 605155bca3db2..32a6538558cae 100644 --- a/pkg/logcli/query/query_test.go +++ b/pkg/logcli/query/query_test.go @@ -17,17 +17,17 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/volume" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/volume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_batch(t *testing.T) { diff --git a/pkg/logcli/query/tail.go b/pkg/logcli/query/tail.go index c1c092a9e1166..b65e546b904e3 100644 --- a/pkg/logcli/query/tail.go +++ b/pkg/logcli/query/tail.go @@ -13,11 +13,11 @@ import ( "github.com/gorilla/websocket" "github.com/grafana/dskit/backoff" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/logcli/output" - "github.com/grafana/loki/pkg/logcli/util" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/unmarshal" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/logcli/output" + "github.com/grafana/loki/v3/pkg/logcli/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/unmarshal" ) // TailQuery connects to the Loki websocket endpoint and tails logs diff --git a/pkg/logcli/seriesquery/series.go b/pkg/logcli/seriesquery/series.go index 94bd3cc842a05..c5d639ca6af1c 100644 --- a/pkg/logcli/seriesquery/series.go +++ b/pkg/logcli/seriesquery/series.go @@ -8,8 +8,8 @@ import ( "text/tabwriter" "time" - "github.com/grafana/loki/pkg/logcli/client" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logcli/client" + "github.com/grafana/loki/v3/pkg/loghttp" ) // SeriesQuery contains all necessary fields to execute label queries and print out the results diff --git a/pkg/logcli/util/util.go b/pkg/logcli/util/util.go index cb3a70f924a98..cf276d5ab365a 100644 --- a/pkg/logcli/util/util.go +++ b/pkg/logcli/util/util.go @@ -1,6 +1,6 @@ package util -import "github.com/grafana/loki/pkg/loghttp" +import "github.com/grafana/loki/v3/pkg/loghttp" func MatchLabels(on bool, l loghttp.LabelSet, names []string) loghttp.LabelSet { ret := loghttp.LabelSet{} diff --git a/pkg/loghttp/labels.go b/pkg/loghttp/labels.go index efa059fc9709c..b725340880262 100644 --- a/pkg/loghttp/labels.go +++ b/pkg/loghttp/labels.go @@ -9,7 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // LabelResponse represents the http json response to a label query diff --git a/pkg/loghttp/labels_test.go b/pkg/loghttp/labels_test.go index c2f0f0315b30b..c5b5c837f057b 100644 --- a/pkg/loghttp/labels_test.go +++ b/pkg/loghttp/labels_test.go @@ -9,7 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestParseLabelQuery(t *testing.T) { diff --git a/pkg/loghttp/legacy/tail.go b/pkg/loghttp/legacy/tail.go index a51629cb7c2ac..06e4b8b1b181a 100644 --- a/pkg/loghttp/legacy/tail.go +++ b/pkg/loghttp/legacy/tail.go @@ -3,7 +3,7 @@ package loghttp import ( "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // DroppedEntry represents a dropped entry in a tail call diff --git a/pkg/loghttp/params.go b/pkg/loghttp/params.go index 74597a1970d4f..7b9a7cb1f597a 100644 --- a/pkg/loghttp/params.go +++ b/pkg/loghttp/params.go @@ -12,9 +12,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) const ( diff --git a/pkg/loghttp/params_test.go b/pkg/loghttp/params_test.go index 873fdff36bb98..3456fdc2ed802 100644 --- a/pkg/loghttp/params_test.go +++ b/pkg/loghttp/params_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestHttp_defaultQueryRangeStep(t *testing.T) { diff --git a/pkg/loghttp/push/otlp.go b/pkg/loghttp/push/otlp.go index 8bd206fce29ac..2fa645ff4581d 100644 --- a/pkg/loghttp/push/otlp.go +++ b/pkg/loghttp/push/otlp.go @@ -18,9 +18,10 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog/plogotlp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/push" - loki_util "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/push" + + "github.com/grafana/loki/v3/pkg/logproto" + loki_util "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/loghttp/push/otlp_test.go b/pkg/loghttp/push/otlp_test.go index c711c85905cf2..d9147e061db3a 100644 --- a/pkg/loghttp/push/otlp_test.go +++ b/pkg/loghttp/push/otlp_test.go @@ -13,8 +13,8 @@ import ( "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/push" ) func TestOTLPToLokiPushRequest(t *testing.T) { diff --git a/pkg/loghttp/push/push.go b/pkg/loghttp/push/push.go index f4b8771e5fec8..74c8a20198f2c 100644 --- a/pkg/loghttp/push/push.go +++ b/pkg/loghttp/push/push.go @@ -12,7 +12,7 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/push" "github.com/dustin/go-humanize" "github.com/go-kit/log" @@ -20,15 +20,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" - loki_util "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/unmarshal" - unmarshal2 "github.com/grafana/loki/pkg/util/unmarshal/legacy" + loki_util "github.com/grafana/loki/v3/pkg/util" + + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/unmarshal" + unmarshal2 "github.com/grafana/loki/v3/pkg/util/unmarshal/legacy" ) var ( diff --git a/pkg/loghttp/push/push_test.go b/pkg/loghttp/push/push_test.go index 8c4768615ce6c..ac83492d62eba 100644 --- a/pkg/loghttp/push/push_test.go +++ b/pkg/loghttp/push/push_test.go @@ -17,7 +17,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // GZip source string and return compressed string diff --git a/pkg/loghttp/query.go b/pkg/loghttp/query.go index 75f75c60ccc03..dcc07e427f1a9 100644 --- a/pkg/loghttp/query.go +++ b/pkg/loghttp/query.go @@ -15,12 +15,12 @@ import ( "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) var ( diff --git a/pkg/loghttp/query_test.go b/pkg/loghttp/query_test.go index e94199352f12e..889a8900eac76 100644 --- a/pkg/loghttp/query_test.go +++ b/pkg/loghttp/query_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) func TestParseRangeQuery(t *testing.T) { diff --git a/pkg/loghttp/series.go b/pkg/loghttp/series.go index 2cb22ec6cfeca..1faef6d6e540a 100644 --- a/pkg/loghttp/series.go +++ b/pkg/loghttp/series.go @@ -5,8 +5,8 @@ import ( "sort" "strings" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" ) type SeriesResponse struct { diff --git a/pkg/loghttp/series_test.go b/pkg/loghttp/series_test.go index 403b0c19af981..928e5350ccb52 100644 --- a/pkg/loghttp/series_test.go +++ b/pkg/loghttp/series_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestParseAndValidateSeriesQuery(t *testing.T) { diff --git a/pkg/loghttp/tail.go b/pkg/loghttp/tail.go index 9ad2219b10979..658ae112cce07 100644 --- a/pkg/loghttp/tail.go +++ b/pkg/loghttp/tail.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) const ( diff --git a/pkg/loghttp/tail_test.go b/pkg/loghttp/tail_test.go index 6fe7163116675..06fd23f4f5f9d 100644 --- a/pkg/loghttp/tail_test.go +++ b/pkg/loghttp/tail_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) func TestParseTailQuery(t *testing.T) { diff --git a/pkg/logproto/alias.go b/pkg/logproto/alias.go index ab378fcbd0e81..294b19e85dcad 100644 --- a/pkg/logproto/alias.go +++ b/pkg/logproto/alias.go @@ -3,7 +3,7 @@ package logproto import ( "google.golang.org/grpc" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/push" ) // Aliases to avoid renaming all the imports of logproto diff --git a/pkg/logproto/bloomgateway.pb.go b/pkg/logproto/bloomgateway.pb.go index 98a22fd13168f..1ca062e722b33 100644 --- a/pkg/logproto/bloomgateway.pb.go +++ b/pkg/logproto/bloomgateway.pb.go @@ -8,8 +8,8 @@ import ( fmt "fmt" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" - github_com_grafana_loki_pkg_logql_syntax "github.com/grafana/loki/pkg/logql/syntax" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" + github_com_grafana_loki_v3_pkg_logql_syntax "github.com/grafana/loki/v3/pkg/logql/syntax" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -37,8 +37,8 @@ type FilterChunkRefRequest struct { Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` Refs []*GroupedChunkRefs `protobuf:"bytes,3,rep,name=refs,proto3" json:"refs,omitempty"` // TODO(salvacorts): Delete this field once the weekly release is done. - Filters []github_com_grafana_loki_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/pkg/logql/syntax.LineFilter" json:"filters"` - Plan github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan"` + Filters []github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter" json:"filters"` + Plan github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan"` } func (m *FilterChunkRefRequest) Reset() { *m = FilterChunkRefRequest{} } @@ -237,40 +237,41 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/bloomgateway.proto", fileDescriptor_a50b5dd1dbcd1415) } var fileDescriptor_a50b5dd1dbcd1415 = []byte{ - // 525 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xbb, 0x6e, 0x13, 0x41, - 0x14, 0xdd, 0xc1, 0x26, 0x8f, 0x31, 0x2f, 0x8d, 0x42, 0xb4, 0x32, 0xd2, 0x78, 0x65, 0x21, 0x70, - 0xb5, 0x2b, 0x39, 0x0d, 0x82, 0xce, 0x91, 0x88, 0x90, 0x28, 0x60, 0x40, 0x14, 0x29, 0x90, 0xd6, - 0xce, 0xdd, 0x87, 0xbc, 0x3b, 0xb3, 0x9e, 0x99, 0x15, 0xb8, 0xe3, 0x13, 0xf8, 0x08, 0x0a, 0xbe, - 0x80, 0x6f, 0x48, 0xe9, 0x32, 0xa2, 0x88, 0xf0, 0xba, 0xa1, 0xcc, 0x27, 0x20, 0xcf, 0x7a, 0xb3, - 0x76, 0x04, 0x44, 0xa2, 0xa2, 0x9a, 0xc7, 0xbd, 0xe7, 0x9e, 0x7b, 0xee, 0x03, 0x77, 0xb2, 0x71, - 0xe8, 0x25, 0x22, 0xcc, 0xa4, 0xd0, 0xc2, 0x1b, 0x26, 0x42, 0xa4, 0xa1, 0xaf, 0xe1, 0x83, 0x3f, - 0x75, 0xcd, 0x17, 0xd9, 0xa9, 0x8c, 0xed, 0xbd, 0x50, 0x84, 0xa2, 0xf4, 0x5b, 0xde, 0x4a, 0x7b, - 0xfb, 0xc1, 0x46, 0x80, 0xea, 0x52, 0x1a, 0xbb, 0x5f, 0x1a, 0xf8, 0xfe, 0xf3, 0x38, 0xd1, 0x20, - 0x0f, 0xa3, 0x9c, 0x8f, 0x19, 0x04, 0x0c, 0x26, 0x39, 0x28, 0x4d, 0x0e, 0x71, 0x33, 0x90, 0x22, - 0xb5, 0x91, 0x83, 0x7a, 0x8d, 0x81, 0x77, 0x7a, 0xde, 0xb1, 0xbe, 0x9f, 0x77, 0x1e, 0x87, 0xb1, - 0x8e, 0xf2, 0xa1, 0x3b, 0x12, 0xa9, 0x97, 0x49, 0x91, 0x82, 0x8e, 0x20, 0x57, 0xde, 0x48, 0xa4, - 0xa9, 0xe0, 0x5e, 0x2a, 0x4e, 0x20, 0x71, 0xdf, 0xc6, 0x29, 0x30, 0x03, 0x26, 0x2f, 0xf0, 0xb6, - 0x8e, 0xa4, 0xc8, 0xc3, 0xc8, 0xbe, 0xf1, 0x6f, 0x71, 0x2a, 0x3c, 0x71, 0x71, 0x53, 0x42, 0xa0, - 0xec, 0x86, 0xd3, 0xe8, 0xb5, 0xfa, 0x6d, 0xf7, 0x52, 0xc8, 0x91, 0x14, 0x79, 0x06, 0x27, 0x55, - 0xfe, 0x8a, 0x19, 0x3f, 0x32, 0xc6, 0xdb, 0x81, 0x11, 0xa6, 0xec, 0xa6, 0x81, 0xec, 0xd5, 0x90, - 0x97, 0x31, 0x87, 0x52, 0xf5, 0xe0, 0xd9, 0x2a, 0xa1, 0x83, 0xb5, 0x84, 0x42, 0xe9, 0x07, 0x3e, - 0xf7, 0xbd, 0x44, 0x8c, 0x63, 0x6f, 0x55, 0xbd, 0x49, 0xe2, 0xa9, 0x29, 0xd7, 0xfe, 0xc7, 0x35, - 0x30, 0xab, 0x18, 0xc8, 0x7b, 0xdc, 0xcc, 0x12, 0x9f, 0xdb, 0x37, 0x1d, 0xd4, 0x6b, 0xf5, 0xef, - 0xd4, 0x4c, 0xaf, 0x12, 0x9f, 0x0f, 0x9e, 0xae, 0x38, 0xfa, 0x7f, 0xe3, 0x98, 0xe4, 0x20, 0x63, - 0x90, 0xde, 0x32, 0x8e, 0xfb, 0x3a, 0x07, 0x39, 0x5d, 0x62, 0x99, 0x89, 0xdb, 0x65, 0x78, 0xff, - 0x6a, 0x97, 0x54, 0x26, 0xb8, 0x02, 0xf2, 0x04, 0xef, 0x8e, 0x2a, 0xe5, 0x36, 0xba, 0xb6, 0x36, - 0xb5, 0x73, 0xf7, 0x1b, 0xc2, 0x3b, 0x6f, 0x22, 0x21, 0x35, 0x83, 0xe0, 0xbf, 0xeb, 0x76, 0x1b, - 0xef, 0x8c, 0x22, 0x18, 0x8d, 0x55, 0x9e, 0xda, 0x0d, 0x07, 0xf5, 0x6e, 0xb3, 0xcb, 0x77, 0x57, - 0xe3, 0x7b, 0x57, 0x75, 0x11, 0x07, 0xb7, 0x82, 0x98, 0x87, 0x20, 0x33, 0x19, 0x73, 0x6d, 0x64, - 0x34, 0xd9, 0xfa, 0x17, 0xd9, 0xc7, 0x5b, 0x1a, 0xb8, 0xcf, 0xb5, 0xc9, 0x6d, 0x97, 0xad, 0x5e, - 0xe4, 0xd1, 0xc6, 0x5c, 0x91, 0xba, 0x76, 0x55, 0x6d, 0xca, 0x79, 0xea, 0x07, 0xf8, 0xd6, 0x60, - 0xb9, 0x7c, 0x47, 0xe5, 0xf2, 0x91, 0x77, 0xf8, 0xee, 0x66, 0x4b, 0x14, 0xe9, 0xd4, 0xe0, 0xdf, - 0xee, 0x54, 0xdb, 0xf9, 0xb3, 0x43, 0xd9, 0xce, 0xae, 0x35, 0x38, 0x9e, 0xcd, 0xa9, 0x75, 0x36, - 0xa7, 0xd6, 0xc5, 0x9c, 0xa2, 0x4f, 0x05, 0x45, 0x5f, 0x0b, 0x8a, 0x4e, 0x0b, 0x8a, 0x66, 0x05, - 0x45, 0x3f, 0x0a, 0x8a, 0x7e, 0x16, 0xd4, 0xba, 0x28, 0x28, 0xfa, 0xbc, 0xa0, 0xd6, 0x6c, 0x41, - 0xad, 0xb3, 0x05, 0xb5, 0x8e, 0x1f, 0x5e, 0x33, 0xbe, 0x86, 0x74, 0xb8, 0x65, 0x8e, 0x83, 0x5f, - 0x01, 0x00, 0x00, 0xff, 0xff, 0xbe, 0xe2, 0x64, 0x8a, 0x54, 0x04, 0x00, 0x00, + // 529 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x53, 0xbb, 0x6e, 0x13, 0x4d, + 0x14, 0xde, 0xf9, 0xed, 0x3f, 0x71, 0xc6, 0xdc, 0x34, 0x0a, 0xd1, 0xca, 0x48, 0xe3, 0x95, 0x0b, + 0xe2, 0x6a, 0x47, 0x72, 0x04, 0xa2, 0x81, 0xc2, 0x91, 0x88, 0x90, 0x28, 0x60, 0x40, 0x14, 0x48, + 0x14, 0x6b, 0xe7, 0xec, 0x45, 0xde, 0x9d, 0x59, 0xcf, 0xce, 0x02, 0xee, 0x78, 0x04, 0x1e, 0x03, + 0xf1, 0x00, 0x3c, 0x43, 0x4a, 0x97, 0x11, 0x45, 0x84, 0xd7, 0x0d, 0x65, 0x1e, 0x01, 0x79, 0xd6, + 0x1b, 0xdb, 0x11, 0xc8, 0x12, 0x15, 0xd5, 0x5c, 0xce, 0xf9, 0xce, 0x77, 0xbe, 0x73, 0xc1, 0xed, + 0x74, 0x14, 0xb0, 0x58, 0x06, 0xa9, 0x92, 0x5a, 0xb2, 0x41, 0x2c, 0x65, 0x12, 0x78, 0x1a, 0x3e, + 0x78, 0x13, 0xd7, 0x7c, 0x91, 0x46, 0x65, 0x6c, 0xed, 0x07, 0x32, 0x90, 0xa5, 0xdf, 0xe2, 0x56, + 0xda, 0x5b, 0xf7, 0x36, 0x02, 0x54, 0x97, 0xd2, 0xd8, 0xf9, 0x5a, 0xc3, 0x77, 0x9f, 0x46, 0xb1, + 0x06, 0x75, 0x1c, 0xe6, 0x62, 0xc4, 0xc1, 0xe7, 0x30, 0xce, 0x21, 0xd3, 0xe4, 0x18, 0xd7, 0x7d, + 0x25, 0x13, 0x1b, 0x39, 0xa8, 0x5b, 0xeb, 0xb3, 0xb3, 0x8b, 0xb6, 0xf5, 0xfd, 0xa2, 0x7d, 0x18, + 0x44, 0x3a, 0xcc, 0x07, 0xee, 0x50, 0x26, 0x2c, 0x55, 0x32, 0x01, 0x1d, 0x42, 0x9e, 0xb1, 0xa1, + 0x4c, 0x12, 0x29, 0x58, 0x22, 0x4f, 0x21, 0x76, 0x5f, 0x47, 0x09, 0x70, 0x03, 0x26, 0xcf, 0xf0, + 0xae, 0x0e, 0x95, 0xcc, 0x83, 0xd0, 0xfe, 0xef, 0xef, 0xe2, 0x54, 0x78, 0xe2, 0xe2, 0xba, 0x02, + 0x3f, 0xb3, 0x6b, 0x4e, 0xad, 0xdb, 0xec, 0xb5, 0xdc, 0x2b, 0x21, 0x27, 0x4a, 0xe6, 0x29, 0x9c, + 0x56, 0xf9, 0x67, 0xdc, 0xf8, 0x11, 0x81, 0x77, 0x7d, 0x23, 0x2c, 0xb3, 0xeb, 0x06, 0xb2, 0xbf, + 0x82, 0x3c, 0x8f, 0x04, 0x94, 0xaa, 0xfb, 0x4f, 0x96, 0x09, 0x3d, 0x5c, 0x4b, 0x28, 0x50, 0x9e, + 0xef, 0x09, 0x8f, 0xc5, 0x72, 0x14, 0xb1, 0xf7, 0x47, 0x6c, 0x59, 0xc0, 0x71, 0xcc, 0xb2, 0x89, + 0xd0, 0xde, 0xc7, 0x35, 0x3c, 0xaf, 0x48, 0x88, 0x87, 0xeb, 0x69, 0xec, 0x09, 0xfb, 0x7f, 0x07, + 0x75, 0x9b, 0xbd, 0x5b, 0x2b, 0xb2, 0x17, 0xb1, 0x27, 0xfa, 0x8f, 0x97, 0x34, 0x0f, 0xb6, 0xd0, + 0x8c, 0x73, 0x50, 0x11, 0x28, 0xb6, 0x08, 0xe5, 0xbe, 0xcc, 0x41, 0x4d, 0x16, 0x70, 0x6e, 0x42, + 0x77, 0x38, 0x3e, 0xb8, 0xde, 0xab, 0x2c, 0x95, 0x22, 0x03, 0xf2, 0x08, 0xef, 0x0d, 0x2b, 0xfd, + 0x36, 0xda, 0x5a, 0xa1, 0x95, 0x73, 0xe7, 0x1b, 0xc2, 0x8d, 0x57, 0xa1, 0x54, 0x9a, 0x83, 0xff, + 0xcf, 0xf5, 0xbc, 0x85, 0x1b, 0xc3, 0x10, 0x86, 0xa3, 0x2c, 0x4f, 0xec, 0x9a, 0x83, 0xba, 0x37, + 0xf9, 0xd5, 0xbb, 0xa3, 0xf1, 0x9d, 0xeb, 0xba, 0x88, 0x83, 0x9b, 0x7e, 0x24, 0x02, 0x50, 0xa9, + 0x8a, 0x84, 0x36, 0x32, 0xea, 0x7c, 0xfd, 0x8b, 0x1c, 0xe0, 0x1d, 0x0d, 0xc2, 0x13, 0xda, 0xe4, + 0xb6, 0xc7, 0x97, 0x2f, 0x72, 0x7f, 0x63, 0xba, 0xc8, 0xaa, 0x76, 0x55, 0x6d, 0xca, 0xa9, 0xea, + 0xf9, 0xf8, 0x46, 0x7f, 0xb1, 0x82, 0x27, 0xe5, 0x0a, 0x92, 0x37, 0xf8, 0xf6, 0x66, 0x4b, 0x32, + 0xd2, 0x5e, 0x81, 0x7f, 0xbb, 0x59, 0x2d, 0xe7, 0xcf, 0x0e, 0x65, 0x3b, 0x3b, 0x56, 0xff, 0xdd, + 0x74, 0x46, 0xad, 0xf3, 0x19, 0xb5, 0x2e, 0x67, 0x14, 0x7d, 0x2a, 0x28, 0xfa, 0x52, 0x50, 0x74, + 0x56, 0x50, 0x34, 0x2d, 0x28, 0xfa, 0x51, 0x50, 0xf4, 0xb3, 0xa0, 0xd6, 0x65, 0x41, 0xd1, 0xe7, + 0x39, 0xb5, 0xa6, 0x73, 0x6a, 0x9d, 0xcf, 0xa9, 0xf5, 0xf6, 0x70, 0xfb, 0x10, 0x1b, 0xde, 0xc1, + 0x8e, 0x39, 0x8e, 0x7e, 0x05, 0x00, 0x00, 0xff, 0xff, 0x1b, 0x54, 0xbe, 0xd4, 0x5d, 0x04, 0x00, + 0x00, } func (this *FilterChunkRefRequest) Equal(that interface{}) bool { @@ -1053,7 +1054,7 @@ func (m *FilterChunkRefRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Filters = append(m.Filters, github_com_grafana_loki_pkg_logql_syntax.LineFilter{}) + m.Filters = append(m.Filters, github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/logproto/bloomgateway.proto b/pkg/logproto/bloomgateway.proto index 13d5c25e763f6..ffef97efe7ccd 100644 --- a/pkg/logproto/bloomgateway.proto +++ b/pkg/logproto/bloomgateway.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; message FilterChunkRefRequest { int64 from = 1 [ @@ -19,11 +19,11 @@ message FilterChunkRefRequest { repeated GroupedChunkRefs refs = 3; // TODO(salvacorts): Delete this field once the weekly release is done. repeated LineFilter filters = 4 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logql/syntax.LineFilter", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter", (gogoproto.nullable) = false ]; Plan plan = 5 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan", (gogoproto.nullable) = false ]; } diff --git a/pkg/logproto/compat.go b/pkg/logproto/compat.go index 25b5269e1ae6c..82979824a5f57 100644 --- a/pkg/logproto/compat.go +++ b/pkg/logproto/compat.go @@ -20,9 +20,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" ) // ToWriteRequest converts matched slices of Labels, Samples and Metadata into a WriteRequest proto. diff --git a/pkg/logproto/compat_test.go b/pkg/logproto/compat_test.go index c4e3307d03e39..83b2e61787fa0 100644 --- a/pkg/logproto/compat_test.go +++ b/pkg/logproto/compat_test.go @@ -16,8 +16,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) // This test verifies that jsoninter uses our custom method for marshalling. diff --git a/pkg/logproto/extensions.go b/pkg/logproto/extensions.go index 9c0e5c3d432d5..19e1f7be3b0c5 100644 --- a/pkg/logproto/extensions.go +++ b/pkg/logproto/extensions.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // This is the separator define in the Prometheus Labels.Hash function. diff --git a/pkg/logproto/indexgateway.pb.go b/pkg/logproto/indexgateway.pb.go index 1229caebbb604..bd2650fbc01a9 100644 --- a/pkg/logproto/indexgateway.pb.go +++ b/pkg/logproto/indexgateway.pb.go @@ -8,7 +8,7 @@ import ( fmt "fmt" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" - stats "github.com/grafana/loki/pkg/logqlmodel/stats" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -249,53 +249,54 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/indexgateway.proto", fileDescriptor_d27585148d0a52c8) } var fileDescriptor_d27585148d0a52c8 = []byte{ - // 734 bytes of a gzipped FileDescriptorProto + // 737 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xbf, 0x4f, 0xdb, 0x4c, - 0x18, 0xf6, 0x91, 0x84, 0x0f, 0x8e, 0x0f, 0x86, 0xfb, 0x7e, 0x60, 0x05, 0xb0, 0xa3, 0xa8, 0x52, - 0xd3, 0xc5, 0xae, 0xe8, 0x50, 0xb5, 0x12, 0x12, 0x75, 0xa5, 0x44, 0x88, 0xb4, 0xa2, 0x06, 0x31, - 0x30, 0x94, 0x3a, 0xe1, 0x70, 0x2c, 0x6c, 0x5f, 0xb8, 0x3b, 0xab, 0x61, 0xeb, 0x58, 0x75, 0xaa, - 0xfa, 0x1f, 0x54, 0xea, 0xd0, 0x3f, 0x85, 0x91, 0x11, 0x75, 0xb0, 0x4a, 0x58, 0xaa, 0x4c, 0xcc, - 0x9d, 0x2a, 0xdf, 0xd9, 0x89, 0x09, 0x41, 0xa2, 0x5d, 0x72, 0xe7, 0xe7, 0x7d, 0xde, 0xe7, 0xfd, - 0x79, 0x0a, 0xd4, 0xbb, 0x47, 0xae, 0xe9, 0x13, 0xb7, 0x4b, 0x09, 0x27, 0xa6, 0x17, 0x1e, 0xe0, - 0x9e, 0xeb, 0x70, 0xfc, 0xd6, 0x39, 0x31, 0x04, 0x84, 0x16, 0xf2, 0x58, 0xb7, 0x55, 0xfe, 0xd7, - 0x25, 0x2e, 0x91, 0xec, 0xe4, 0x26, 0x59, 0xe5, 0xa5, 0x6b, 0x32, 0xd9, 0x25, 0x35, 0x56, 0x52, - 0xe3, 0xb1, 0x1f, 0x90, 0x03, 0xec, 0x9b, 0x8c, 0x3b, 0x9c, 0xc9, 0x5f, 0xc9, 0xa8, 0x7e, 0x9e, - 0x82, 0xf3, 0xdb, 0x1d, 0x87, 0x1e, 0x30, 0x1b, 0x1f, 0x47, 0x98, 0x71, 0xb4, 0x09, 0x8b, 0x87, - 0x94, 0x04, 0x2a, 0xa8, 0x80, 0x5a, 0xc1, 0x7a, 0x7c, 0x1a, 0xeb, 0xca, 0xb7, 0x58, 0xbf, 0xef, - 0x7a, 0xbc, 0x13, 0xb5, 0x8c, 0x36, 0x09, 0xcc, 0x2e, 0x25, 0x01, 0xe6, 0x1d, 0x1c, 0x31, 0xb3, - 0x4d, 0x82, 0x80, 0x84, 0xa6, 0x50, 0x37, 0x76, 0xbc, 0x00, 0x0f, 0x62, 0x5d, 0xb8, 0xdb, 0xe2, - 0x17, 0xed, 0xc0, 0xbf, 0x78, 0x87, 0x92, 0xc8, 0xed, 0xa8, 0x53, 0x42, 0xef, 0xe9, 0xef, 0xeb, - 0x65, 0x0a, 0x76, 0x76, 0x41, 0x3a, 0x2c, 0x1d, 0x47, 0x98, 0x9e, 0xa8, 0x85, 0x0a, 0xa8, 0xcd, - 0x5a, 0xb3, 0x83, 0x58, 0x97, 0x80, 0x2d, 0x0f, 0xd4, 0x84, 0xff, 0x73, 0x87, 0xba, 0x98, 0xef, - 0xb7, 0x4e, 0x38, 0x66, 0xfb, 0x5d, 0x4c, 0xf7, 0x59, 0x52, 0xa5, 0x5a, 0xac, 0x80, 0x5a, 0xd1, - 0x5a, 0x1c, 0xc4, 0xfa, 0x3f, 0x92, 0x61, 0x25, 0x84, 0x2d, 0x4c, 0x45, 0x13, 0xec, 0x49, 0x60, - 0xf5, 0x13, 0x80, 0x0b, 0x59, 0x8f, 0x58, 0x97, 0x84, 0x0c, 0xa3, 0x35, 0x38, 0x2d, 0xf4, 0x98, - 0x0a, 0x2a, 0x85, 0xda, 0xdc, 0xea, 0x7f, 0xc6, 0xf5, 0x61, 0x19, 0x82, 0x6f, 0x2d, 0x24, 0xd5, - 0x0e, 0x62, 0x3d, 0x25, 0xdb, 0xe9, 0x89, 0x9e, 0x41, 0x98, 0x0c, 0xc1, 0x63, 0xdc, 0x6b, 0x33, - 0xd1, 0x99, 0xb9, 0xd5, 0x79, 0x43, 0xce, 0xc5, 0xc6, 0x2c, 0xf2, 0xb9, 0x85, 0x52, 0xd7, 0x1c, - 0xd1, 0xce, 0xdd, 0xab, 0xef, 0x01, 0x2c, 0x89, 0x20, 0x68, 0x1d, 0x4e, 0xb7, 0x48, 0x14, 0x8a, - 0x5c, 0x12, 0x21, 0x75, 0x3c, 0x97, 0xfa, 0x96, 0x25, 0xec, 0xa3, 0x74, 0x24, 0xdf, 0x4e, 0x4f, - 0xb4, 0x06, 0x4b, 0x22, 0x76, 0x9a, 0xc9, 0xb2, 0x31, 0x5c, 0xa3, 0x8d, 0x44, 0x69, 0x3b, 0xb1, - 0x65, 0xa5, 0xcb, 0x6e, 0x0b, 0xba, 0x2d, 0x8f, 0xea, 0x17, 0x00, 0x67, 0xb2, 0x18, 0x68, 0x13, - 0x16, 0x02, 0x2f, 0x14, 0xa9, 0x14, 0xad, 0x27, 0x83, 0x58, 0x4f, 0x3e, 0x7f, 0xc6, 0xba, 0x71, - 0x87, 0x81, 0xd7, 0xbd, 0xd0, 0xc5, 0xb4, 0x4b, 0xbd, 0x90, 0xdb, 0x89, 0x9b, 0x10, 0x73, 0x7a, - 0x22, 0xad, 0x4c, 0xcc, 0xe9, 0xfd, 0x91, 0x98, 0xd3, 0x5b, 0xfd, 0x50, 0x82, 0x7f, 0x8b, 0x7a, - 0x1a, 0xb2, 0x33, 0x68, 0x03, 0xc2, 0x57, 0xc9, 0xba, 0x08, 0x10, 0x2d, 0x8d, 0xaa, 0x1e, 0xa1, - 0xe9, 0xa3, 0x28, 0x2f, 0x4f, 0x36, 0xca, 0x96, 0x3c, 0x04, 0xa8, 0x09, 0xe7, 0x1a, 0x98, 0x3f, - 0xef, 0x44, 0xe1, 0x91, 0x8d, 0x0f, 0x51, 0x8e, 0x9e, 0x83, 0x33, 0xb1, 0x95, 0x5b, 0xac, 0x52, - 0xad, 0xaa, 0xa0, 0x3a, 0x9c, 0x6d, 0x60, 0xbe, 0x8d, 0xa9, 0x87, 0x19, 0x2a, 0x5f, 0x63, 0x4b, - 0x30, 0x53, 0x5a, 0x9a, 0x68, 0x1b, 0xea, 0xbc, 0x86, 0x8b, 0x4d, 0xa7, 0x85, 0xfd, 0x97, 0x4e, - 0x80, 0x59, 0x9d, 0xd0, 0x17, 0x98, 0x53, 0xaf, 0x9d, 0x7c, 0xa1, 0xda, 0xc8, 0xf3, 0x16, 0x4a, - 0x16, 0x63, 0x71, 0x8c, 0x99, 0xd3, 0x7f, 0x03, 0x55, 0x01, 0xed, 0x3a, 0x7e, 0x34, 0x1e, 0xe0, - 0xc1, 0x98, 0xdb, 0x04, 0xce, 0x1d, 0x22, 0x34, 0xe0, 0x4c, 0x52, 0x58, 0xb2, 0x66, 0xf9, 0x01, - 0xe5, 0xd7, 0xf2, 0xc6, 0x80, 0x6e, 0xee, 0x6c, 0x55, 0x41, 0xeb, 0xa2, 0xa5, 0xbb, 0xc4, 0x8f, - 0x02, 0x8c, 0x72, 0x01, 0x25, 0x92, 0xa9, 0xa8, 0x37, 0x0d, 0x43, 0x85, 0xa6, 0x1c, 0x8a, 0x7c, - 0xc0, 0x2b, 0x13, 0xdf, 0xfb, 0x30, 0x1b, 0xed, 0x36, 0x73, 0xb6, 0x30, 0xd6, 0xde, 0xd9, 0x85, - 0xa6, 0x9c, 0x5f, 0x68, 0xca, 0xd5, 0x85, 0x06, 0xde, 0xf5, 0x35, 0xf0, 0xb5, 0xaf, 0x81, 0xd3, - 0xbe, 0x06, 0xce, 0xfa, 0x1a, 0xf8, 0xde, 0xd7, 0xc0, 0x8f, 0xbe, 0xa6, 0x5c, 0xf5, 0x35, 0xf0, - 0xf1, 0x52, 0x53, 0xce, 0x2e, 0x35, 0xe5, 0xfc, 0x52, 0x53, 0xf6, 0xee, 0xe5, 0xf6, 0xde, 0xa5, - 0xce, 0xa1, 0x13, 0x3a, 0xa6, 0x4f, 0x8e, 0x3c, 0x33, 0xff, 0x27, 0xd0, 0x9a, 0x16, 0xc7, 0xa3, - 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xbd, 0xfc, 0xaf, 0x62, 0x06, 0x00, 0x00, + 0x18, 0xf6, 0x91, 0x84, 0x0f, 0x8e, 0x0f, 0x86, 0xfb, 0x7e, 0x60, 0x05, 0xb0, 0xa3, 0x2c, 0x5f, + 0xbe, 0xc5, 0xae, 0x60, 0xa8, 0x5a, 0x09, 0x89, 0xba, 0x52, 0x22, 0x44, 0x5a, 0x51, 0x83, 0x18, + 0x2a, 0xb5, 0xd4, 0x09, 0x87, 0x63, 0x61, 0xfb, 0xc2, 0xdd, 0xb9, 0x0d, 0x5b, 0xc7, 0xaa, 0x53, + 0xd5, 0xff, 0xa0, 0x52, 0x87, 0xfe, 0x29, 0x8c, 0x8c, 0xa8, 0x83, 0x55, 0xc2, 0x52, 0x65, 0x62, + 0xee, 0x54, 0xf9, 0xce, 0x4e, 0x4c, 0x08, 0x12, 0xed, 0xe2, 0x3b, 0x3f, 0xef, 0xf3, 0x3e, 0xef, + 0x4f, 0x27, 0x50, 0xef, 0x1e, 0xb9, 0xa6, 0x4f, 0xdc, 0x2e, 0x25, 0x9c, 0x98, 0x5e, 0x78, 0x80, + 0x7b, 0xae, 0xc3, 0xf1, 0x1b, 0xe7, 0xc4, 0x10, 0x10, 0x5a, 0xc8, 0x63, 0xdd, 0x56, 0xf9, 0x6f, + 0x97, 0xb8, 0x44, 0xb2, 0x93, 0x9b, 0x64, 0x95, 0x97, 0xae, 0xc9, 0x64, 0x97, 0xd4, 0x58, 0x49, + 0x8d, 0xc7, 0x7e, 0x40, 0x0e, 0xb0, 0x6f, 0x32, 0xee, 0x70, 0x26, 0x9f, 0x92, 0x51, 0xfd, 0x34, + 0x05, 0xe7, 0x77, 0x3a, 0x0e, 0x3d, 0x60, 0x36, 0x3e, 0x8e, 0x30, 0xe3, 0x68, 0x0b, 0x16, 0x0f, + 0x29, 0x09, 0x54, 0x50, 0x01, 0xb5, 0x82, 0x75, 0xff, 0x34, 0xd6, 0x95, 0xaf, 0xb1, 0xfe, 0x9f, + 0xeb, 0xf1, 0x4e, 0xd4, 0x32, 0xda, 0x24, 0x30, 0xbb, 0x94, 0x04, 0x98, 0x77, 0x70, 0xc4, 0xcc, + 0x36, 0x09, 0x02, 0x12, 0x9a, 0x42, 0xdd, 0xd8, 0xf5, 0x02, 0x3c, 0x88, 0x75, 0xe1, 0x6e, 0x8b, + 0x27, 0xda, 0x85, 0x7f, 0xf0, 0x0e, 0x25, 0x91, 0xdb, 0x51, 0xa7, 0x84, 0xde, 0xc3, 0x5f, 0xd7, + 0xcb, 0x14, 0xec, 0xec, 0x82, 0x74, 0x58, 0x3a, 0x8e, 0x30, 0x3d, 0x51, 0x0b, 0x15, 0x50, 0x9b, + 0xb5, 0x66, 0x07, 0xb1, 0x2e, 0x01, 0x5b, 0x1e, 0xa8, 0x09, 0xff, 0xe5, 0x0e, 0x75, 0x31, 0xdf, + 0x6f, 0x9d, 0x70, 0xcc, 0xf6, 0xbb, 0x98, 0xee, 0xb3, 0xa4, 0x4a, 0xb5, 0x58, 0x01, 0xb5, 0xa2, + 0xb5, 0x38, 0x88, 0xf5, 0xbf, 0x24, 0xc3, 0x4a, 0x08, 0xdb, 0x98, 0x8a, 0x26, 0xd8, 0x93, 0xc0, + 0xea, 0x47, 0x00, 0x17, 0xb2, 0x1e, 0xb1, 0x2e, 0x09, 0x19, 0x46, 0xeb, 0x70, 0x5a, 0xe8, 0x31, + 0x15, 0x54, 0x0a, 0xb5, 0xb9, 0xd5, 0x7f, 0x8c, 0xeb, 0xc3, 0x32, 0x04, 0xdf, 0x5a, 0x48, 0xaa, + 0x1d, 0xc4, 0x7a, 0x4a, 0xb6, 0xd3, 0x13, 0x3d, 0x82, 0x30, 0x19, 0x82, 0xc7, 0xb8, 0xd7, 0x66, + 0xa2, 0x33, 0x73, 0xab, 0xf3, 0x86, 0x9c, 0x8b, 0x8d, 0x59, 0xe4, 0x73, 0x0b, 0xa5, 0xae, 0x39, + 0xa2, 0x9d, 0xbb, 0x57, 0xdf, 0x01, 0x58, 0x12, 0x41, 0xd0, 0x06, 0x9c, 0x6e, 0x91, 0x28, 0x14, + 0xb9, 0x24, 0x42, 0xea, 0x78, 0x2e, 0xf5, 0x6d, 0x4b, 0xd8, 0x47, 0xe9, 0x48, 0xbe, 0x9d, 0x9e, + 0x68, 0x1d, 0x96, 0x44, 0xec, 0x34, 0x93, 0x65, 0x63, 0xb8, 0x46, 0x9b, 0x89, 0xd2, 0x4e, 0x62, + 0xcb, 0x4a, 0x97, 0xdd, 0x16, 0x74, 0x5b, 0x1e, 0xd5, 0xcf, 0x00, 0xce, 0x64, 0x31, 0xd0, 0x16, + 0x2c, 0x04, 0x5e, 0x28, 0x52, 0x29, 0x5a, 0x0f, 0x06, 0xb1, 0x9e, 0xbc, 0xfe, 0x88, 0x75, 0xe3, + 0x0e, 0x03, 0xaf, 0x7b, 0xa1, 0x8b, 0x69, 0x97, 0x7a, 0x21, 0xb7, 0x13, 0x37, 0x21, 0xe6, 0xf4, + 0x44, 0x5a, 0x99, 0x98, 0xd3, 0xfb, 0x2d, 0x31, 0xa7, 0xb7, 0xfa, 0xbe, 0x04, 0xff, 0x14, 0xf5, + 0x34, 0x64, 0x67, 0xd0, 0x26, 0x84, 0xcf, 0x92, 0x75, 0x11, 0x20, 0x5a, 0x1a, 0x55, 0x3d, 0x42, + 0xd3, 0x8f, 0xa2, 0xbc, 0x3c, 0xd9, 0x28, 0x5b, 0x72, 0x0f, 0xa0, 0x26, 0x9c, 0x6b, 0x60, 0xfe, + 0xb8, 0x13, 0x85, 0x47, 0x36, 0x3e, 0x44, 0x39, 0x7a, 0x0e, 0xce, 0xc4, 0x56, 0x6e, 0xb1, 0x4a, + 0xb5, 0xaa, 0x82, 0xea, 0x70, 0xb6, 0x81, 0xf9, 0x0e, 0xa6, 0x1e, 0x66, 0xa8, 0x7c, 0x8d, 0x2d, + 0xc1, 0x4c, 0x69, 0x69, 0xa2, 0x6d, 0xa8, 0xf3, 0x12, 0x2e, 0x36, 0x9d, 0x16, 0xf6, 0x9f, 0x3a, + 0x01, 0x66, 0x75, 0x42, 0x9f, 0x60, 0x4e, 0xbd, 0x76, 0xf2, 0x86, 0x6a, 0x23, 0xcf, 0x5b, 0x28, + 0x59, 0x8c, 0xc5, 0x31, 0x66, 0x4e, 0xff, 0x15, 0x54, 0x05, 0xb4, 0xe7, 0xf8, 0xd1, 0x78, 0x80, + 0xff, 0xc7, 0xdc, 0x26, 0x70, 0xee, 0x10, 0xa1, 0x01, 0x67, 0x92, 0xc2, 0x92, 0x35, 0xcb, 0x0f, + 0x28, 0xbf, 0x96, 0x37, 0x06, 0x74, 0x73, 0x67, 0xab, 0x0a, 0xda, 0x10, 0x2d, 0xdd, 0x23, 0x7e, + 0x14, 0x60, 0x94, 0x0b, 0x28, 0x91, 0x4c, 0x45, 0xbd, 0x69, 0x18, 0x2a, 0x34, 0xe5, 0x50, 0xe4, + 0x07, 0xbc, 0x32, 0xf1, 0x7b, 0x1f, 0x66, 0xa3, 0xdd, 0x66, 0xce, 0x16, 0xc6, 0x7a, 0x71, 0x76, + 0xa1, 0x29, 0xe7, 0x17, 0x9a, 0x72, 0x75, 0xa1, 0x81, 0xb7, 0x7d, 0x0d, 0x7c, 0xe9, 0x6b, 0xe0, + 0xb4, 0xaf, 0x81, 0xb3, 0xbe, 0x06, 0xbe, 0xf5, 0x35, 0xf0, 0xbd, 0xaf, 0x29, 0x57, 0x7d, 0x0d, + 0x7c, 0xb8, 0xd4, 0x94, 0xb3, 0x4b, 0x4d, 0x39, 0xbf, 0xd4, 0x94, 0xe7, 0xf9, 0x5f, 0x4d, 0x97, + 0x3a, 0x87, 0x4e, 0xe8, 0x98, 0x3e, 0x39, 0xf2, 0xcc, 0xd7, 0x6b, 0x66, 0xfe, 0x7f, 0xa0, 0x35, + 0x2d, 0x8e, 0xb5, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x1e, 0x17, 0x36, 0xcf, 0x65, 0x06, 0x00, + 0x00, } func (this *ShardsRequest) Equal(that interface{}) bool { diff --git a/pkg/logproto/indexgateway.proto b/pkg/logproto/indexgateway.proto index 33cfbec317d57..1b54542d8f5a9 100644 --- a/pkg/logproto/indexgateway.proto +++ b/pkg/logproto/indexgateway.proto @@ -6,7 +6,7 @@ import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; import "pkg/logqlmodel/stats/stats.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; // This exists in a different file to retain proto namespacing compatibility with it's prior definition, but has been relocated to the logproto go pkg. service IndexGateway { diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 11482676c0efb..bf8d45cef9058 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -12,11 +12,11 @@ import ( proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - github_com_grafana_loki_pkg_logql_syntax "github.com/grafana/loki/pkg/logql/syntax" - stats "github.com/grafana/loki/pkg/logqlmodel/stats" - _ "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" + github_com_grafana_loki_v3_pkg_logql_syntax "github.com/grafana/loki/v3/pkg/logql/syntax" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + _ "github.com/grafana/loki/v3/pkg/push" + github_com_grafana_loki_v3_pkg_push "github.com/grafana/loki/v3/pkg/push" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -217,14 +217,14 @@ func (m *StreamRate) GetPushes() uint32 { } type QueryRequest struct { - Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - Start time.Time `protobuf:"bytes,3,opt,name=start,proto3,stdtime" json:"start"` - End time.Time `protobuf:"bytes,4,opt,name=end,proto3,stdtime" json:"end"` - Direction Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Shards []string `protobuf:"bytes,7,rep,name=shards,proto3" json:"shards,omitempty"` - Deletes []*Delete `protobuf:"bytes,8,rep,name=deletes,proto3" json:"deletes,omitempty"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,9,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + Start time.Time `protobuf:"bytes,3,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,4,opt,name=end,proto3,stdtime" json:"end"` + Direction Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Shards []string `protobuf:"bytes,7,rep,name=shards,proto3" json:"shards,omitempty"` + Deletes []*Delete `protobuf:"bytes,8,rep,name=deletes,proto3" json:"deletes,omitempty"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,9,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *QueryRequest) Reset() { *m = QueryRequest{} } @@ -310,12 +310,12 @@ func (m *QueryRequest) GetDeletes() []*Delete { } type SampleQueryRequest struct { - Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. - Start time.Time `protobuf:"bytes,2,opt,name=start,proto3,stdtime" json:"start"` - End time.Time `protobuf:"bytes,3,opt,name=end,proto3,stdtime" json:"end"` - Shards []string `protobuf:"bytes,4,rep,name=shards,proto3" json:"shards,omitempty"` - Deletes []*Delete `protobuf:"bytes,5,rep,name=deletes,proto3" json:"deletes,omitempty"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` // Deprecated: Do not use. + Start time.Time `protobuf:"bytes,2,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,3,opt,name=end,proto3,stdtime" json:"end"` + Shards []string `protobuf:"bytes,4,rep,name=shards,proto3" json:"shards,omitempty"` + Deletes []*Delete `protobuf:"bytes,5,rep,name=deletes,proto3" json:"deletes,omitempty"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *SampleQueryRequest) Reset() { *m = SampleQueryRequest{} } @@ -490,8 +490,8 @@ func (m *Delete) GetEnd() int64 { } type QueryResponse struct { - Streams []github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,1,rep,name=streams,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"streams,omitempty"` - Stats stats.Ingester `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` + Streams []github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,1,rep,name=streams,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"streams,omitempty"` + Stats stats.Ingester `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } @@ -866,11 +866,11 @@ func (m *Series) GetStreamHash() uint64 { } type TailRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` // Deprecated: Do not use. - DelayFor uint32 `protobuf:"varint,3,opt,name=delayFor,proto3" json:"delayFor,omitempty"` - Limit uint32 `protobuf:"varint,4,opt,name=limit,proto3" json:"limit,omitempty"` - Start time.Time `protobuf:"bytes,5,opt,name=start,proto3,stdtime" json:"start"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` // Deprecated: Do not use. + DelayFor uint32 `protobuf:"varint,3,opt,name=delayFor,proto3" json:"delayFor,omitempty"` + Limit uint32 `protobuf:"varint,4,opt,name=limit,proto3" json:"limit,omitempty"` + Start time.Time `protobuf:"bytes,5,opt,name=start,proto3,stdtime" json:"start"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,6,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *TailRequest) Reset() { *m = TailRequest{} } @@ -935,8 +935,8 @@ func (m *TailRequest) GetStart() time.Time { } type TailResponse struct { - Stream *github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,1,opt,name=stream,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"stream,omitempty"` - DroppedStreams []*DroppedStream `protobuf:"bytes,2,rep,name=droppedStreams,proto3" json:"droppedStreams,omitempty"` + Stream *github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,1,opt,name=stream,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"stream,omitempty"` + DroppedStreams []*DroppedStream `protobuf:"bytes,2,rep,name=droppedStreams,proto3" json:"droppedStreams,omitempty"` } func (m *TailResponse) Reset() { *m = TailResponse{} } @@ -1790,8 +1790,8 @@ type GetChunkRefRequest struct { Through github_com_prometheus_common_model.Time `protobuf:"varint,2,opt,name=through,proto3,customtype=github.com/prometheus/common/model.Time" json:"through"` Matchers string `protobuf:"bytes,3,opt,name=matchers,proto3" json:"matchers,omitempty"` // TODO(salvacorts): Delete this field once the weekly release is done. - Filters []github_com_grafana_loki_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/pkg/logql/syntax.LineFilter" json:"filters"` - Plan github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan"` + Filters []github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter `protobuf:"bytes,4,rep,name=filters,proto3,customtype=github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter" json:"filters"` + Plan github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,5,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan"` } func (m *GetChunkRefRequest) Reset() { *m = GetChunkRefRequest{} } @@ -2877,159 +2877,160 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2431 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4b, 0x6f, 0x1b, 0xc7, - 0x99, 0x4b, 0x2e, 0x5f, 0x1f, 0x29, 0x59, 0x1e, 0xd1, 0x32, 0xc1, 0xd8, 0xa4, 0x3c, 0x48, 0x1c, - 0xd5, 0x71, 0xc4, 0x58, 0x6e, 0xdc, 0xd4, 0x6e, 0x90, 0x9a, 0x52, 0xec, 0xc8, 0x96, 0x1f, 0x19, - 0xb9, 0x6e, 0x6a, 0xb4, 0x35, 0x56, 0xe4, 0x88, 0x5a, 0x88, 0xdc, 0xa5, 0x77, 0x87, 0xb1, 0x05, - 0xf4, 0xd0, 0x3f, 0x10, 0x34, 0xb7, 0xa2, 0x97, 0xa2, 0x05, 0x0a, 0xa4, 0x40, 0xd1, 0x4b, 0x7f, - 0x40, 0x7b, 0xe9, 0xc1, 0xbd, 0x39, 0xb7, 0x20, 0x07, 0xb6, 0x96, 0x2f, 0x85, 0x4e, 0xb9, 0x15, - 0xe8, 0xa9, 0x98, 0xd7, 0xbe, 0x44, 0xb9, 0xa1, 0xea, 0xa2, 0xf5, 0x85, 0x3b, 0xf3, 0xcd, 0x37, - 0xdf, 0x7c, 0xaf, 0xf9, 0x1e, 0x43, 0x78, 0x65, 0xb0, 0xdd, 0x6d, 0xf6, 0xdc, 0xee, 0xc0, 0x73, - 0x99, 0x1b, 0x0c, 0x16, 0xc5, 0x2f, 0x2a, 0xe8, 0x79, 0xad, 0xd2, 0x75, 0xbb, 0xae, 0xc4, 0xe1, - 0x23, 0xb9, 0x5e, 0x6b, 0x74, 0x5d, 0xb7, 0xdb, 0xa3, 0x4d, 0x31, 0xdb, 0x18, 0x6e, 0x36, 0x99, - 0xdd, 0xa7, 0x3e, 0xb3, 0xfa, 0x03, 0x85, 0x30, 0xaf, 0xa8, 0x3f, 0xe8, 0xf5, 0xdd, 0x0e, 0xed, - 0x35, 0x7d, 0x66, 0x31, 0x5f, 0xfe, 0x2a, 0x8c, 0x59, 0x8e, 0x31, 0x18, 0xfa, 0x5b, 0xe2, 0x47, - 0x02, 0x71, 0x05, 0xd0, 0x3a, 0xf3, 0xa8, 0xd5, 0x27, 0x16, 0xa3, 0x3e, 0xa1, 0x0f, 0x86, 0xd4, - 0x67, 0xf8, 0x06, 0xcc, 0xc6, 0xa0, 0xfe, 0xc0, 0x75, 0x7c, 0x8a, 0x2e, 0x40, 0xc9, 0x0f, 0xc1, - 0x55, 0x63, 0x3e, 0xb3, 0x50, 0x5a, 0xaa, 0x2c, 0x06, 0xa2, 0x84, 0x7b, 0x48, 0x14, 0x11, 0xff, - 0xd2, 0x00, 0x08, 0xd7, 0x50, 0x1d, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0x5b, 0x55, 0x63, 0xde, 0x58, - 0x30, 0x49, 0x04, 0x82, 0xce, 0xc2, 0xd1, 0x70, 0x76, 0xd3, 0x5d, 0xdf, 0xb2, 0xbc, 0x4e, 0x35, - 0x2d, 0xd0, 0xf6, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0xcd, 0xcc, 0x1b, 0x0b, 0x19, 0x22, - 0xc6, 0x68, 0x0e, 0x72, 0x8c, 0x3a, 0x96, 0xc3, 0xaa, 0xe6, 0xbc, 0xb1, 0x50, 0x24, 0x6a, 0xc6, - 0xe1, 0x5c, 0x76, 0xea, 0x57, 0xb3, 0xf3, 0xc6, 0xc2, 0x14, 0x51, 0x33, 0xfc, 0x59, 0x06, 0xca, - 0x1f, 0x0e, 0xa9, 0xb7, 0xa3, 0x14, 0x80, 0xea, 0x50, 0xf0, 0x69, 0x8f, 0xb6, 0x99, 0xeb, 0x09, - 0x06, 0x8b, 0xad, 0x74, 0xd5, 0x20, 0x01, 0x0c, 0x55, 0x20, 0xdb, 0xb3, 0xfb, 0x36, 0x13, 0x6c, - 0x4d, 0x11, 0x39, 0x41, 0x17, 0x21, 0xeb, 0x33, 0xcb, 0x63, 0x82, 0x97, 0xd2, 0x52, 0x6d, 0x51, - 0x1a, 0x6d, 0x51, 0x1b, 0x6d, 0xf1, 0x8e, 0x36, 0x5a, 0xab, 0xf0, 0x78, 0xd4, 0x48, 0x7d, 0xfa, - 0xd7, 0x86, 0x41, 0xe4, 0x16, 0x74, 0x01, 0x32, 0xd4, 0xe9, 0x08, 0x7e, 0xbf, 0xee, 0x4e, 0xbe, - 0x01, 0x9d, 0x83, 0x62, 0xc7, 0xf6, 0x68, 0x9b, 0xd9, 0xae, 0x23, 0xa4, 0x9a, 0x5e, 0x9a, 0x0d, - 0x2d, 0xb2, 0xa2, 0x97, 0x48, 0x88, 0x85, 0xce, 0x42, 0xce, 0xe7, 0xaa, 0xf3, 0xab, 0xf9, 0xf9, - 0xcc, 0x42, 0xb1, 0x55, 0xd9, 0x1b, 0x35, 0x66, 0x24, 0xe4, 0xac, 0xdb, 0xb7, 0x19, 0xed, 0x0f, - 0xd8, 0x0e, 0x51, 0x38, 0xe8, 0x0c, 0xe4, 0x3b, 0xb4, 0x47, 0xb9, 0xc1, 0x0b, 0xc2, 0xe0, 0x33, - 0x11, 0xf2, 0x62, 0x81, 0x68, 0x04, 0x74, 0x0f, 0xcc, 0x41, 0xcf, 0x72, 0xaa, 0x45, 0x21, 0xc5, - 0x74, 0x88, 0x78, 0xbb, 0x67, 0x39, 0xad, 0x0b, 0x5f, 0x8e, 0x1a, 0x4b, 0x5d, 0x9b, 0x6d, 0x0d, - 0x37, 0x16, 0xdb, 0x6e, 0xbf, 0xd9, 0xf5, 0xac, 0x4d, 0xcb, 0xb1, 0x9a, 0x3d, 0x77, 0xdb, 0x6e, - 0x72, 0xe7, 0x7c, 0x30, 0xa4, 0x9e, 0x4d, 0xbd, 0x26, 0xa7, 0xb1, 0x28, 0xec, 0xc1, 0xf7, 0x11, - 0x41, 0xf3, 0x9a, 0x59, 0xc8, 0xcd, 0xe4, 0xf1, 0x28, 0x0d, 0x68, 0xdd, 0xea, 0x0f, 0x7a, 0x74, - 0x22, 0x7b, 0x05, 0x96, 0x49, 0x1f, 0xda, 0x32, 0x99, 0x49, 0x2d, 0x13, 0xaa, 0xd9, 0x9c, 0x4c, - 0xcd, 0xd9, 0xaf, 0xab, 0xe6, 0xdc, 0x8b, 0x57, 0x33, 0xae, 0x82, 0xc9, 0x67, 0x68, 0x06, 0x32, - 0x9e, 0xf5, 0x50, 0x28, 0xb3, 0x4c, 0xf8, 0x10, 0xaf, 0x41, 0x4e, 0x32, 0x82, 0x6a, 0x49, 0x6d, - 0xc7, 0x6f, 0x46, 0xa8, 0xe9, 0x8c, 0xd6, 0xe1, 0x4c, 0xa8, 0xc3, 0x8c, 0xd0, 0x0e, 0xfe, 0x95, - 0x01, 0x53, 0xca, 0x84, 0x2a, 0xba, 0x6c, 0x40, 0x5e, 0xde, 0x6e, 0x1d, 0x59, 0x8e, 0x27, 0x23, - 0xcb, 0xe5, 0x8e, 0x35, 0x60, 0xd4, 0x6b, 0x35, 0x1f, 0x8f, 0x1a, 0xc6, 0x97, 0xa3, 0xc6, 0xeb, - 0xcf, 0x93, 0x52, 0x04, 0x39, 0x15, 0x75, 0x34, 0x61, 0xf4, 0x86, 0xe0, 0x8e, 0xf9, 0xca, 0x0f, - 0x8e, 0x2c, 0xca, 0x00, 0xb9, 0xea, 0x74, 0xa9, 0xcf, 0x29, 0x9b, 0xdc, 0x84, 0x44, 0xe2, 0xe0, - 0x9f, 0xc0, 0x6c, 0xcc, 0xd5, 0x14, 0x9f, 0xef, 0x40, 0xce, 0xe7, 0x0a, 0xd4, 0x6c, 0x46, 0x0c, - 0xb5, 0x2e, 0xe0, 0xad, 0x69, 0xc5, 0x5f, 0x4e, 0xce, 0x89, 0xc2, 0x9f, 0xec, 0xf4, 0x3f, 0x1b, - 0x50, 0x5e, 0xb3, 0x36, 0x68, 0x4f, 0xfb, 0x38, 0x02, 0xd3, 0xb1, 0xfa, 0x54, 0x69, 0x5c, 0x8c, - 0x79, 0x40, 0xfb, 0xd8, 0xea, 0x0d, 0xa9, 0x24, 0x59, 0x20, 0x6a, 0x36, 0x69, 0x24, 0x32, 0x0e, - 0x1d, 0x89, 0x8c, 0xd0, 0xdf, 0x2b, 0x90, 0xe5, 0x9e, 0xb5, 0x23, 0xa2, 0x50, 0x91, 0xc8, 0x09, - 0x7e, 0x1d, 0xa6, 0x94, 0x14, 0x4a, 0x7d, 0x21, 0xcb, 0x5c, 0x7d, 0x45, 0xcd, 0x32, 0xee, 0x43, - 0x4e, 0x6a, 0x1b, 0xbd, 0x0a, 0xc5, 0x20, 0xbb, 0x09, 0x69, 0x33, 0xad, 0xdc, 0xde, 0xa8, 0x91, - 0x66, 0x3e, 0x09, 0x17, 0x50, 0x03, 0xb2, 0x62, 0xa7, 0x90, 0xdc, 0x68, 0x15, 0xf7, 0x46, 0x0d, - 0x09, 0x20, 0xf2, 0x83, 0x4e, 0x80, 0xb9, 0xc5, 0x13, 0x0c, 0x57, 0x81, 0xd9, 0x2a, 0xec, 0x8d, - 0x1a, 0x62, 0x4e, 0xc4, 0x2f, 0xbe, 0x0a, 0xe5, 0x35, 0xda, 0xb5, 0xda, 0x3b, 0xea, 0xd0, 0x8a, - 0x26, 0xc7, 0x0f, 0x34, 0x34, 0x8d, 0x53, 0x50, 0x0e, 0x4e, 0xbc, 0xdf, 0xf7, 0x95, 0x53, 0x97, - 0x02, 0xd8, 0x0d, 0x1f, 0xff, 0xc2, 0x00, 0x65, 0x67, 0x84, 0x21, 0xd7, 0xe3, 0xb2, 0xfa, 0x2a, - 0x06, 0xc1, 0xde, 0xa8, 0xa1, 0x20, 0x44, 0x7d, 0xd1, 0x25, 0xc8, 0xfb, 0xe2, 0x44, 0x4e, 0x2c, - 0xe9, 0x3e, 0x62, 0xa1, 0x75, 0x84, 0xbb, 0xc1, 0xde, 0xa8, 0xa1, 0x11, 0x89, 0x1e, 0xa0, 0xc5, - 0x58, 0xe6, 0x94, 0x82, 0x4d, 0xef, 0x8d, 0x1a, 0x11, 0x68, 0x34, 0x93, 0xe2, 0x7f, 0x18, 0x50, - 0xba, 0x63, 0xd9, 0x81, 0x0b, 0x55, 0xb5, 0x89, 0xc2, 0x18, 0x29, 0x01, 0xfc, 0x4a, 0x77, 0x68, - 0xcf, 0xda, 0xb9, 0xe2, 0x7a, 0x82, 0xee, 0x14, 0x09, 0xe6, 0x61, 0xb2, 0x33, 0xc7, 0x26, 0xbb, - 0xec, 0xe4, 0x21, 0xf5, 0xbf, 0x18, 0xc0, 0xae, 0x99, 0x85, 0xf4, 0x4c, 0x06, 0xff, 0xde, 0x80, - 0xb2, 0x94, 0x5c, 0xb9, 0xdd, 0x0f, 0x21, 0x27, 0x15, 0x23, 0x64, 0x7f, 0x4e, 0x70, 0x79, 0x63, - 0x92, 0xc0, 0xa2, 0x68, 0xa2, 0xf7, 0x60, 0xba, 0xe3, 0xb9, 0x83, 0x01, 0xed, 0xac, 0xab, 0x10, - 0x96, 0x4e, 0x86, 0xb0, 0x95, 0xe8, 0x3a, 0x49, 0xa0, 0xe3, 0xbf, 0x18, 0x30, 0xa5, 0xa2, 0x85, - 0xb2, 0x55, 0xa0, 0x5f, 0xe3, 0xd0, 0x29, 0x2b, 0x3d, 0x69, 0xca, 0x9a, 0x83, 0x5c, 0xd7, 0x73, - 0x87, 0x03, 0xbf, 0x9a, 0x91, 0x77, 0x53, 0xce, 0x26, 0x4b, 0x65, 0xf8, 0x1a, 0x4c, 0x6b, 0x51, - 0x0e, 0x08, 0x99, 0xb5, 0x64, 0xc8, 0x5c, 0xed, 0x50, 0x87, 0xd9, 0x9b, 0x76, 0x10, 0x04, 0x15, - 0x3e, 0xfe, 0x99, 0x01, 0x33, 0x49, 0x14, 0xb4, 0x12, 0xb9, 0x67, 0x9c, 0xdc, 0xe9, 0x83, 0xc9, - 0x2d, 0x8a, 0xe0, 0xe3, 0xbf, 0xef, 0x30, 0x6f, 0x47, 0x93, 0x96, 0x7b, 0x6b, 0x6f, 0x43, 0x29, - 0xb2, 0xc8, 0x53, 0xd4, 0x36, 0x55, 0x37, 0x83, 0xf0, 0x61, 0x18, 0x12, 0xd2, 0x32, 0xa0, 0x89, - 0x09, 0xfe, 0xb9, 0x01, 0x53, 0x31, 0x5b, 0xa2, 0x77, 0xc0, 0xdc, 0xf4, 0xdc, 0xfe, 0x44, 0x86, - 0x12, 0x3b, 0xd0, 0x37, 0x21, 0xcd, 0xdc, 0x89, 0xcc, 0x94, 0x66, 0x2e, 0xb7, 0x92, 0x12, 0x3f, - 0x23, 0xab, 0x5b, 0x39, 0xc3, 0x6f, 0x43, 0x51, 0x08, 0x74, 0xdb, 0xb2, 0xbd, 0xb1, 0xd9, 0x62, - 0xbc, 0x40, 0x97, 0xe0, 0x88, 0x8c, 0x84, 0xe3, 0x37, 0x97, 0xc7, 0x6d, 0x2e, 0xeb, 0xcd, 0xaf, - 0x40, 0x76, 0x79, 0x6b, 0xe8, 0x6c, 0xf3, 0x2d, 0x1d, 0x8b, 0x59, 0x7a, 0x0b, 0x1f, 0xe3, 0x63, - 0x30, 0xcb, 0xef, 0x20, 0xf5, 0xfc, 0x65, 0x77, 0xe8, 0x30, 0xdd, 0x5d, 0x9c, 0x85, 0x4a, 0x1c, - 0xac, 0xbc, 0xa4, 0x02, 0xd9, 0x36, 0x07, 0x08, 0x1a, 0x53, 0x44, 0x4e, 0xf0, 0x6f, 0x0c, 0x40, - 0x57, 0x29, 0x13, 0xa7, 0xac, 0xae, 0x04, 0xd7, 0xa3, 0x06, 0x85, 0xbe, 0xc5, 0xda, 0x5b, 0xd4, - 0xf3, 0x75, 0x0d, 0xa2, 0xe7, 0xff, 0x8b, 0x6a, 0x0f, 0x9f, 0x83, 0xd9, 0x18, 0x97, 0x4a, 0xa6, - 0x1a, 0x14, 0xda, 0x0a, 0xa6, 0xf2, 0x5d, 0x30, 0xc7, 0x7f, 0x48, 0x43, 0x41, 0x6c, 0x20, 0x74, - 0x13, 0x9d, 0x83, 0xd2, 0xa6, 0xed, 0x74, 0xa9, 0x37, 0xf0, 0x6c, 0xa5, 0x02, 0xb3, 0x75, 0x64, - 0x6f, 0xd4, 0x88, 0x82, 0x49, 0x74, 0x82, 0xde, 0x84, 0xfc, 0xd0, 0xa7, 0xde, 0x7d, 0x5b, 0xde, - 0xf4, 0x62, 0xab, 0xb2, 0x3b, 0x6a, 0xe4, 0xbe, 0xe7, 0x53, 0x6f, 0x75, 0x85, 0x67, 0x9e, 0xa1, - 0x18, 0x11, 0xf9, 0xed, 0xa0, 0xeb, 0xca, 0x4d, 0x45, 0x11, 0xd6, 0xfa, 0x16, 0x67, 0x3f, 0x11, - 0xea, 0x06, 0x9e, 0xdb, 0xa7, 0x6c, 0x8b, 0x0e, 0xfd, 0x66, 0xdb, 0xed, 0xf7, 0x5d, 0xa7, 0x29, - 0x7a, 0x49, 0x21, 0x34, 0x4f, 0x9f, 0x7c, 0xbb, 0xf2, 0xdc, 0x3b, 0x90, 0x67, 0x5b, 0x9e, 0x3b, - 0xec, 0x6e, 0x89, 0xac, 0x90, 0x69, 0x5d, 0x9c, 0x9c, 0x9e, 0xa6, 0x40, 0xf4, 0x00, 0x9d, 0xe2, - 0xda, 0xa2, 0xed, 0x6d, 0x7f, 0xd8, 0x97, 0x1d, 0x5a, 0x2b, 0xbb, 0x37, 0x6a, 0x18, 0x6f, 0x92, - 0x00, 0x8c, 0x3f, 0x49, 0x43, 0x43, 0x38, 0xea, 0x5d, 0x51, 0x36, 0x5c, 0x71, 0xbd, 0x1b, 0x94, - 0x79, 0x76, 0xfb, 0xa6, 0xd5, 0xa7, 0xda, 0x37, 0x1a, 0x50, 0xea, 0x0b, 0xe0, 0xfd, 0xc8, 0x15, - 0x80, 0x7e, 0x80, 0x87, 0x4e, 0x02, 0x88, 0x3b, 0x23, 0xd7, 0xe5, 0x6d, 0x28, 0x0a, 0x88, 0x58, - 0x5e, 0x8e, 0x69, 0xaa, 0x39, 0xa1, 0x64, 0x4a, 0x43, 0xab, 0x49, 0x0d, 0x4d, 0x4c, 0x27, 0x50, - 0x4b, 0xd4, 0xd7, 0xb3, 0x71, 0x5f, 0xc7, 0x9f, 0x1b, 0x50, 0x5f, 0xd3, 0x9c, 0x1f, 0x52, 0x1d, - 0x5a, 0xde, 0xf4, 0x0b, 0x92, 0x37, 0xf3, 0x9f, 0xc9, 0x8b, 0xeb, 0x00, 0x6b, 0xb6, 0x43, 0xaf, - 0xd8, 0x3d, 0x46, 0xbd, 0x31, 0x9d, 0xc8, 0x27, 0x99, 0x30, 0x24, 0x10, 0xba, 0xa9, 0xe5, 0x5c, - 0x8e, 0xc4, 0xe1, 0x17, 0x21, 0x46, 0xfa, 0x05, 0x9a, 0x2d, 0x93, 0x08, 0x51, 0xdb, 0x90, 0xdf, - 0x14, 0xe2, 0xc9, 0x94, 0x1a, 0x7b, 0x46, 0x09, 0x65, 0x6f, 0x5d, 0x52, 0x87, 0x9f, 0x7f, 0x5e, - 0x41, 0x22, 0x5e, 0x7d, 0x9a, 0xfe, 0x8e, 0xc3, 0xac, 0x47, 0x91, 0xcd, 0x44, 0x9f, 0x80, 0x7e, - 0xac, 0xca, 0xad, 0xec, 0xd8, 0x72, 0x4b, 0xdf, 0xdc, 0xc3, 0xf7, 0x8c, 0xef, 0x86, 0xb1, 0x4f, - 0x98, 0x43, 0xc5, 0xbe, 0xd3, 0x60, 0x7a, 0x74, 0x53, 0x27, 0x69, 0x14, 0x1e, 0x1b, 0x60, 0x8a, - 0x75, 0xfc, 0x47, 0x03, 0x66, 0xae, 0x52, 0x16, 0x2f, 0x7f, 0x5e, 0x22, 0x63, 0xe2, 0x0f, 0xe0, - 0x68, 0x84, 0x7f, 0x25, 0xfd, 0xf9, 0x44, 0xcd, 0x73, 0x2c, 0x94, 0x7f, 0xd5, 0xe9, 0xd0, 0x47, - 0xaa, 0x57, 0x8c, 0x97, 0x3b, 0xb7, 0xa1, 0x14, 0x59, 0x44, 0x97, 0x13, 0x85, 0x4e, 0xe4, 0x65, - 0x27, 0x48, 0xd6, 0xad, 0x8a, 0x92, 0x49, 0x76, 0x8b, 0xaa, 0x8c, 0x0d, 0x8a, 0x82, 0x75, 0x40, - 0xc2, 0x5c, 0x82, 0x6c, 0x34, 0x2d, 0x09, 0xe8, 0xf5, 0xa0, 0xe2, 0x09, 0xe6, 0xe8, 0x14, 0x98, - 0x9e, 0xfb, 0x50, 0x57, 0xb0, 0x53, 0xe1, 0x91, 0xc4, 0x7d, 0x48, 0xc4, 0x12, 0xbe, 0x04, 0x19, - 0xe2, 0x3e, 0x44, 0x75, 0x00, 0xcf, 0x72, 0xba, 0xf4, 0x6e, 0xd0, 0x38, 0x95, 0x49, 0x04, 0x72, - 0x40, 0xc9, 0xb0, 0x0c, 0x47, 0xa3, 0x1c, 0x49, 0x73, 0x2f, 0x42, 0xfe, 0xc3, 0x61, 0x54, 0x5d, - 0x95, 0x84, 0xba, 0x64, 0x0f, 0xae, 0x91, 0xb8, 0xcf, 0x40, 0x08, 0x47, 0x27, 0xa0, 0xc8, 0xac, - 0x8d, 0x1e, 0xbd, 0x19, 0x06, 0xb8, 0x10, 0xc0, 0x57, 0x79, 0xcf, 0x77, 0x37, 0x52, 0xfb, 0x84, - 0x00, 0x74, 0x06, 0x66, 0x42, 0x9e, 0x6f, 0x7b, 0x74, 0xd3, 0x7e, 0x24, 0x2c, 0x5c, 0x26, 0xfb, - 0xe0, 0x68, 0x01, 0x8e, 0x84, 0xb0, 0x75, 0x51, 0x63, 0x98, 0x02, 0x35, 0x09, 0xe6, 0xba, 0x11, - 0xe2, 0xbe, 0xff, 0x60, 0x68, 0xf5, 0xc4, 0xcd, 0x2b, 0x93, 0x08, 0x04, 0xff, 0xc9, 0x80, 0xa3, - 0xd2, 0xd4, 0xbc, 0xdb, 0x7f, 0x19, 0xbd, 0xfe, 0x33, 0x03, 0x50, 0x54, 0x02, 0xe5, 0x5a, 0xaf, - 0x45, 0x9f, 0x71, 0x78, 0x11, 0x53, 0x12, 0xad, 0xac, 0x04, 0x85, 0x2f, 0x31, 0x18, 0x72, 0xa2, - 0x10, 0x92, 0x3d, 0xb5, 0x29, 0x7b, 0x65, 0x09, 0x21, 0xea, 0xcb, 0x5b, 0xfc, 0x8d, 0x1d, 0x46, - 0x7d, 0xd5, 0xe9, 0x8a, 0x16, 0x5f, 0x00, 0x88, 0xfc, 0xf0, 0xb3, 0xa8, 0xc3, 0x84, 0xd7, 0x98, - 0xe1, 0x59, 0x0a, 0x44, 0xf4, 0x00, 0xff, 0x2e, 0x0d, 0x53, 0x77, 0xdd, 0xde, 0x30, 0x4c, 0x89, - 0x2f, 0x53, 0xaa, 0x88, 0xb5, 0xdf, 0x59, 0xdd, 0x7e, 0x23, 0x30, 0x7d, 0x46, 0x07, 0xc2, 0xb3, - 0x32, 0x44, 0x8c, 0x11, 0x86, 0x32, 0xb3, 0xbc, 0x2e, 0x65, 0xb2, 0xaf, 0xa9, 0xe6, 0x44, 0xc1, - 0x19, 0x83, 0xa1, 0x79, 0x28, 0x59, 0xdd, 0xae, 0x47, 0xbb, 0x16, 0xa3, 0xad, 0x9d, 0x6a, 0x5e, - 0x1c, 0x16, 0x05, 0xe1, 0x8f, 0x60, 0x5a, 0x2b, 0x4b, 0x99, 0xf4, 0x2d, 0xc8, 0x7f, 0x2c, 0x20, - 0x63, 0x9e, 0xbc, 0x24, 0xaa, 0x0a, 0x63, 0x1a, 0x2d, 0xfe, 0x3e, 0xae, 0x79, 0xc6, 0xd7, 0x20, - 0x27, 0xd1, 0xd1, 0x89, 0x68, 0x77, 0x22, 0xdf, 0x66, 0xf8, 0x5c, 0xb5, 0x1a, 0x18, 0x72, 0x92, - 0x90, 0x32, 0xbc, 0xf0, 0x0d, 0x09, 0x21, 0xea, 0x8b, 0x7f, 0x6d, 0xc0, 0xb1, 0x15, 0xca, 0x68, - 0x9b, 0xd1, 0xce, 0x15, 0x9b, 0xf6, 0x3a, 0x87, 0x6d, 0x9c, 0x8d, 0x43, 0x37, 0xce, 0xe3, 0xde, - 0xbe, 0x32, 0xd1, 0xb7, 0xaf, 0x55, 0x98, 0x4b, 0xb2, 0xa8, 0x34, 0xda, 0x84, 0xdc, 0xa6, 0x80, - 0xec, 0x7f, 0xea, 0x8c, 0xed, 0x20, 0x0a, 0x0d, 0x7b, 0x30, 0x15, 0x5b, 0x10, 0x1a, 0xe6, 0x16, - 0x55, 0xd1, 0x4e, 0x4e, 0xd0, 0x37, 0xc0, 0x64, 0x3b, 0x03, 0x15, 0xe4, 0x5a, 0xc7, 0xfe, 0x39, - 0x6a, 0x1c, 0x8d, 0x6d, 0xbb, 0xb3, 0x33, 0xa0, 0x44, 0xa0, 0x70, 0x47, 0x68, 0x5b, 0x5e, 0xc7, - 0x76, 0xac, 0x9e, 0xcd, 0x24, 0xe3, 0x26, 0x89, 0x82, 0x62, 0x2a, 0x96, 0xde, 0xf3, 0xff, 0xa7, - 0xe2, 0x1f, 0x84, 0x2a, 0xd6, 0x2c, 0x2a, 0x15, 0xbf, 0x07, 0xd3, 0x9d, 0xd8, 0xca, 0xc1, 0xaa, - 0x96, 0x0f, 0x94, 0x09, 0x74, 0xfc, 0x5a, 0xa8, 0x72, 0x01, 0x19, 0xaf, 0xf2, 0x33, 0xa7, 0xa1, - 0x18, 0xfc, 0xcb, 0x82, 0x4a, 0x90, 0xbf, 0x72, 0x8b, 0x7c, 0xff, 0x32, 0x59, 0x99, 0x49, 0xa1, - 0x32, 0x14, 0x5a, 0x97, 0x97, 0xaf, 0x8b, 0x99, 0xb1, 0xf4, 0x79, 0x56, 0xa7, 0x38, 0x0f, 0x7d, - 0x07, 0xb2, 0x32, 0x6f, 0xcd, 0x85, 0xcc, 0x44, 0xff, 0xcf, 0xa8, 0x1d, 0xdf, 0x07, 0x97, 0x52, - 0xe1, 0xd4, 0x5b, 0x06, 0xba, 0x09, 0x25, 0x01, 0x54, 0x2f, 0x97, 0x27, 0x92, 0x0f, 0x88, 0x31, - 0x4a, 0x27, 0x0f, 0x58, 0x8d, 0xd0, 0xbb, 0x08, 0x59, 0x29, 0xe0, 0x5c, 0xa2, 0xbc, 0x18, 0xc3, - 0x4d, 0xec, 0x2d, 0x17, 0xa7, 0xd0, 0xb7, 0xc1, 0xe4, 0xbd, 0x3c, 0x8a, 0x54, 0x37, 0x91, 0x07, - 0xc7, 0xda, 0x5c, 0x12, 0x1c, 0x39, 0xf6, 0xdd, 0xe0, 0xdd, 0xf4, 0x78, 0xf2, 0xfd, 0x46, 0x6f, - 0xaf, 0xee, 0x5f, 0x08, 0x4e, 0xbe, 0x25, 0x1f, 0xf8, 0xf4, 0x2b, 0x02, 0x3a, 0x19, 0x3f, 0x2a, - 0xf1, 0xe8, 0x50, 0xab, 0x1f, 0xb4, 0x1c, 0x10, 0x5c, 0x83, 0x52, 0xa4, 0x83, 0x8f, 0xaa, 0x75, - 0xff, 0xf3, 0x43, 0x54, 0xad, 0x63, 0xda, 0x7e, 0x9c, 0x42, 0x57, 0xa1, 0xc0, 0x6b, 0x42, 0x9e, - 0x1a, 0xd1, 0x2b, 0xc9, 0xd2, 0x2f, 0x92, 0xf2, 0x6b, 0x27, 0xc6, 0x2f, 0x06, 0x84, 0xbe, 0x0b, - 0xc5, 0xab, 0x94, 0xa9, 0xb8, 0x79, 0x3c, 0x19, 0x78, 0xc7, 0x68, 0x2a, 0x1e, 0xbc, 0x71, 0x0a, - 0x7d, 0x24, 0xca, 0xd3, 0x78, 0x24, 0x42, 0x8d, 0x03, 0x22, 0x4e, 0xc0, 0xd7, 0xfc, 0xc1, 0x08, - 0x9a, 0xf2, 0xd2, 0x8f, 0xf4, 0xff, 0xba, 0x2b, 0x16, 0xb3, 0xd0, 0x2d, 0x98, 0x16, 0x22, 0x07, - 0x7f, 0xfc, 0xc6, 0x5c, 0x73, 0xdf, 0xbf, 0xcc, 0x31, 0xd7, 0xdc, 0xff, 0x6f, 0x33, 0x4e, 0xb5, - 0xee, 0x3d, 0x79, 0x5a, 0x4f, 0x7d, 0xf1, 0xb4, 0x9e, 0xfa, 0xea, 0x69, 0xdd, 0xf8, 0xe9, 0x6e, - 0xdd, 0xf8, 0xed, 0x6e, 0xdd, 0x78, 0xbc, 0x5b, 0x37, 0x9e, 0xec, 0xd6, 0x8d, 0xbf, 0xed, 0xd6, - 0x8d, 0xbf, 0xef, 0xd6, 0x53, 0x5f, 0xed, 0xd6, 0x8d, 0x4f, 0x9f, 0xd5, 0x53, 0x4f, 0x9e, 0xd5, - 0x53, 0x5f, 0x3c, 0xab, 0xa7, 0xee, 0xbd, 0xfa, 0x6f, 0x7a, 0x25, 0x19, 0x6a, 0x72, 0xe2, 0x73, - 0xfe, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x75, 0x76, 0x03, 0x96, 0x1f, 0x00, 0x00, + // 2439 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4b, 0x8f, 0x13, 0xc9, + 0xd9, 0x6d, 0xb7, 0x5f, 0x9f, 0x3d, 0xc3, 0x50, 0x63, 0x06, 0xcb, 0xcb, 0xda, 0x43, 0x29, 0x0b, + 0x13, 0xc2, 0xda, 0xcb, 0x10, 0xc8, 0x2e, 0x84, 0x6c, 0xf0, 0xcc, 0xc2, 0x0e, 0x0c, 0x8f, 0xad, + 0x21, 0x64, 0x13, 0x09, 0xa1, 0xc6, 0xae, 0xf1, 0xb4, 0xb0, 0xbb, 0x4d, 0x77, 0x19, 0xb0, 0x94, + 0x43, 0xfe, 0x40, 0x94, 0x95, 0x72, 0x88, 0x72, 0x89, 0x14, 0x25, 0x52, 0xa2, 0xe4, 0x12, 0xe5, + 0x07, 0x24, 0x97, 0x1c, 0xc8, 0x8d, 0xbd, 0xad, 0x38, 0x38, 0x61, 0xb8, 0x44, 0x73, 0xda, 0xf3, + 0x9e, 0xa2, 0x7a, 0xf5, 0xc3, 0xe3, 0x09, 0xeb, 0x59, 0x56, 0x09, 0x17, 0x77, 0xd5, 0x57, 0x5f, + 0x7d, 0xf5, 0xbd, 0xea, 0x7b, 0x94, 0xe1, 0x8d, 0xfe, 0xfd, 0x4e, 0xa3, 0xeb, 0x76, 0xfa, 0x9e, + 0xcb, 0xdc, 0x60, 0x50, 0x17, 0xbf, 0x28, 0xa7, 0xe7, 0x95, 0x52, 0xc7, 0xed, 0xb8, 0x12, 0x87, + 0x8f, 0xe4, 0x7a, 0xa5, 0xd6, 0x71, 0xdd, 0x4e, 0x97, 0x36, 0xc4, 0xec, 0xde, 0x60, 0xb3, 0xc1, + 0xec, 0x1e, 0xf5, 0x99, 0xd5, 0xeb, 0x2b, 0x84, 0x45, 0x45, 0xfd, 0x41, 0xb7, 0xe7, 0xb6, 0x69, + 0xb7, 0xe1, 0x33, 0x8b, 0xf9, 0xf2, 0x57, 0x61, 0xcc, 0x73, 0x8c, 0xfe, 0xc0, 0xdf, 0x12, 0x3f, + 0x12, 0x88, 0x4b, 0x80, 0x36, 0x98, 0x47, 0xad, 0x1e, 0xb1, 0x18, 0xf5, 0x09, 0x7d, 0x30, 0xa0, + 0x3e, 0xc3, 0xd7, 0x60, 0x3e, 0x06, 0xf5, 0xfb, 0xae, 0xe3, 0x53, 0x74, 0x16, 0x0a, 0x7e, 0x08, + 0x2e, 0x1b, 0x8b, 0xa9, 0xa5, 0xc2, 0x72, 0xa9, 0x1e, 0x88, 0x12, 0xee, 0x21, 0x51, 0x44, 0xfc, + 0x6b, 0x03, 0x20, 0x5c, 0x43, 0x55, 0x00, 0xb9, 0xfa, 0xa1, 0xe5, 0x6f, 0x95, 0x8d, 0x45, 0x63, + 0xc9, 0x24, 0x11, 0x08, 0x3a, 0x09, 0x07, 0xc3, 0xd9, 0x75, 0x77, 0x63, 0xcb, 0xf2, 0xda, 0xe5, + 0xa4, 0x40, 0xdb, 0xbd, 0x80, 0x10, 0x98, 0x9e, 0xc5, 0x68, 0x39, 0xb5, 0x68, 0x2c, 0xa5, 0x88, + 0x18, 0xa3, 0x05, 0xc8, 0x30, 0xea, 0x58, 0x0e, 0x2b, 0x9b, 0x8b, 0xc6, 0x52, 0x9e, 0xa8, 0x19, + 0x87, 0x73, 0xd9, 0xa9, 0x5f, 0x4e, 0x2f, 0x1a, 0x4b, 0x33, 0x44, 0xcd, 0xf0, 0x1f, 0x53, 0x50, + 0xfc, 0x68, 0x40, 0xbd, 0xa1, 0x52, 0x00, 0xaa, 0x42, 0xce, 0xa7, 0x5d, 0xda, 0x62, 0xae, 0x27, + 0x18, 0xcc, 0x37, 0x93, 0x65, 0x83, 0x04, 0x30, 0x54, 0x82, 0x74, 0xd7, 0xee, 0xd9, 0x4c, 0xb0, + 0x35, 0x43, 0xe4, 0x04, 0x9d, 0x83, 0xb4, 0xcf, 0x2c, 0x8f, 0x09, 0x5e, 0x0a, 0xcb, 0x95, 0xba, + 0x34, 0x5a, 0x5d, 0x1b, 0xad, 0x7e, 0x4b, 0x1b, 0xad, 0x99, 0x7b, 0x32, 0xaa, 0x25, 0x3e, 0xf9, + 0x67, 0xcd, 0x20, 0x72, 0x0b, 0x3a, 0x0b, 0x29, 0xea, 0xb4, 0x05, 0xbf, 0x5f, 0x76, 0x27, 0xdf, + 0x80, 0x4e, 0x41, 0xbe, 0x6d, 0x7b, 0xb4, 0xc5, 0x6c, 0xd7, 0x11, 0x52, 0xcd, 0x2e, 0xcf, 0x87, + 0x16, 0x59, 0xd5, 0x4b, 0x24, 0xc4, 0x42, 0x27, 0x21, 0xe3, 0x73, 0xd5, 0xf9, 0xe5, 0xec, 0x62, + 0x6a, 0x29, 0xdf, 0x2c, 0xed, 0x8c, 0x6a, 0x73, 0x12, 0x72, 0xd2, 0xed, 0xd9, 0x8c, 0xf6, 0xfa, + 0x6c, 0x48, 0x14, 0x0e, 0x3a, 0x01, 0xd9, 0x36, 0xed, 0x52, 0x6e, 0xf0, 0x9c, 0x30, 0xf8, 0x5c, + 0x84, 0xbc, 0x58, 0x20, 0x1a, 0x01, 0xdd, 0x01, 0xb3, 0xdf, 0xb5, 0x9c, 0x72, 0x5e, 0x48, 0x31, + 0x1b, 0x22, 0xde, 0xec, 0x5a, 0x4e, 0xf3, 0xbd, 0x67, 0xa3, 0xda, 0x99, 0x8e, 0xcd, 0xb6, 0x06, + 0xf7, 0xea, 0x2d, 0xb7, 0xd7, 0xe8, 0x78, 0xd6, 0xa6, 0xe5, 0x58, 0x8d, 0xae, 0x7b, 0xdf, 0x6e, + 0x3c, 0x3c, 0xdd, 0xe0, 0xfe, 0xf9, 0x60, 0x40, 0x3d, 0x9b, 0x7a, 0x0d, 0x4e, 0xa6, 0x2e, 0x4c, + 0xc2, 0xb7, 0x12, 0x41, 0xf6, 0x8a, 0x99, 0xcb, 0xcc, 0x65, 0xf1, 0xf3, 0x24, 0xa0, 0x0d, 0xab, + 0xd7, 0xef, 0xd2, 0xa9, 0x4c, 0x16, 0x18, 0x27, 0xb9, 0x6f, 0xe3, 0xa4, 0xa6, 0x35, 0x4e, 0xa8, + 0x69, 0x73, 0x3a, 0x4d, 0xa7, 0xbf, 0xac, 0xa6, 0x33, 0x5f, 0x8b, 0xa6, 0x71, 0x19, 0x4c, 0x3e, + 0x43, 0x73, 0x90, 0xf2, 0xac, 0x47, 0x42, 0x9f, 0x45, 0xc2, 0x87, 0x78, 0x1d, 0x32, 0x92, 0x17, + 0x54, 0x19, 0x57, 0x78, 0xfc, 0x7e, 0x84, 0xca, 0x4e, 0x69, 0x35, 0xce, 0x85, 0x6a, 0x4c, 0x09, + 0x05, 0xe1, 0xdf, 0x1a, 0x30, 0xa3, 0xac, 0xa8, 0x62, 0x0c, 0x85, 0xac, 0xbc, 0xe3, 0x3a, 0xbe, + 0x1c, 0x1e, 0x8f, 0x2f, 0x17, 0xdb, 0x56, 0x9f, 0x51, 0xaf, 0xb9, 0xfc, 0x64, 0x54, 0x33, 0x9e, + 0x8d, 0x6a, 0x27, 0x5e, 0x22, 0xa8, 0x88, 0x76, 0x2a, 0xfc, 0x68, 0xda, 0xe8, 0x5b, 0x82, 0x41, + 0xe6, 0x2b, 0x6f, 0x38, 0x50, 0x97, 0x91, 0x72, 0xcd, 0xe9, 0x50, 0x9f, 0x13, 0x37, 0xb9, 0x21, + 0x89, 0xc4, 0xc1, 0x3f, 0x81, 0xf9, 0x98, 0xc3, 0x29, 0x56, 0xdf, 0x85, 0x8c, 0xcf, 0x75, 0xa8, + 0x39, 0x8d, 0x98, 0x6b, 0x43, 0xc0, 0x9b, 0xb3, 0x8a, 0xc5, 0x8c, 0x9c, 0x13, 0x85, 0x3f, 0xdd, + 0xe9, 0x7f, 0x37, 0xa0, 0xb8, 0x6e, 0xdd, 0xa3, 0x5d, 0xed, 0xe9, 0x08, 0x4c, 0xc7, 0xea, 0x51, + 0xa5, 0x74, 0x31, 0xe6, 0x91, 0xed, 0xa1, 0xd5, 0x1d, 0x50, 0x49, 0x32, 0x47, 0xd4, 0x6c, 0xda, + 0x90, 0x64, 0xec, 0x3b, 0x24, 0x19, 0xa1, 0xd7, 0x97, 0x20, 0xcd, 0x9d, 0x6b, 0x28, 0xc2, 0x51, + 0x9e, 0xc8, 0x09, 0x3e, 0x0e, 0x33, 0x4a, 0x0a, 0xa5, 0xbe, 0x90, 0x65, 0xae, 0xbe, 0xbc, 0x66, + 0x19, 0xf7, 0x20, 0x23, 0xb5, 0x8d, 0xbe, 0x01, 0xf9, 0x20, 0xcd, 0x09, 0x69, 0x53, 0xcd, 0xcc, + 0xce, 0xa8, 0x96, 0x64, 0x3e, 0x09, 0x17, 0x50, 0x0d, 0xd2, 0x62, 0xa7, 0x90, 0xdc, 0x68, 0xe6, + 0x77, 0x46, 0x35, 0x09, 0x20, 0xf2, 0x83, 0x8e, 0x80, 0xb9, 0xc5, 0x33, 0x0d, 0x57, 0x81, 0xd9, + 0xcc, 0xed, 0x8c, 0x6a, 0x62, 0x4e, 0xc4, 0x2f, 0xbe, 0x0c, 0xc5, 0x75, 0xda, 0xb1, 0x5a, 0x43, + 0x75, 0x68, 0x49, 0x93, 0xe3, 0x07, 0x1a, 0x9a, 0xc6, 0x51, 0x28, 0x06, 0x27, 0xde, 0xed, 0xf9, + 0xca, 0xaf, 0x0b, 0x01, 0xec, 0x9a, 0x8f, 0x7f, 0x65, 0x80, 0xb2, 0x33, 0xc2, 0x90, 0xe9, 0x72, + 0x59, 0x7d, 0x15, 0x89, 0x60, 0x67, 0x54, 0x53, 0x10, 0xa2, 0xbe, 0xe8, 0x3c, 0x64, 0x7d, 0x71, + 0x22, 0x27, 0x36, 0xee, 0x3e, 0x62, 0xa1, 0x79, 0x80, 0xbb, 0xc1, 0xce, 0xa8, 0xa6, 0x11, 0x89, + 0x1e, 0xa0, 0x7a, 0x2c, 0x85, 0x4a, 0xc1, 0x66, 0x77, 0x46, 0xb5, 0x08, 0x34, 0x9a, 0x52, 0xf1, + 0x17, 0x06, 0x14, 0x6e, 0x59, 0x76, 0xe0, 0x42, 0x65, 0x6d, 0xa2, 0x30, 0x52, 0x4a, 0x00, 0xbf, + 0xd5, 0x6d, 0xda, 0xb5, 0x86, 0x97, 0x5c, 0x4f, 0xd0, 0x9d, 0x21, 0xc1, 0x3c, 0xcc, 0x7a, 0xe6, + 0xc4, 0xac, 0x97, 0x9e, 0x3e, 0xb0, 0x7e, 0xbd, 0x61, 0xec, 0x8a, 0x99, 0x4b, 0xce, 0xa5, 0xf0, + 0x9f, 0x0d, 0x28, 0x4a, 0xe1, 0x95, 0xe7, 0xdd, 0x85, 0x8c, 0xd4, 0x8d, 0x10, 0xff, 0xbf, 0x84, + 0x98, 0xfa, 0x94, 0xe1, 0x45, 0x91, 0x45, 0xef, 0xc3, 0x6c, 0xdb, 0x73, 0xfb, 0x7d, 0xda, 0xde, + 0x50, 0xb1, 0x2c, 0x39, 0x1e, 0xcb, 0x56, 0xa3, 0xeb, 0x64, 0x0c, 0x1d, 0xff, 0xc3, 0x80, 0x19, + 0x15, 0x33, 0x94, 0xc5, 0x02, 0x2d, 0x1b, 0xfb, 0x4e, 0x5f, 0xc9, 0x69, 0xd3, 0xd7, 0x02, 0x64, + 0x3a, 0x9e, 0x3b, 0xe8, 0xfb, 0xe5, 0x94, 0xbc, 0xa1, 0x72, 0x36, 0x5d, 0x5a, 0xc3, 0x57, 0x60, + 0x56, 0x8b, 0xb2, 0x47, 0xe0, 0xac, 0x8c, 0x07, 0xce, 0xb5, 0x36, 0x75, 0x98, 0xbd, 0x69, 0x07, + 0xa1, 0x50, 0xe1, 0xe3, 0x9f, 0x1b, 0x30, 0x37, 0x8e, 0x82, 0x56, 0x23, 0xb7, 0x8d, 0x93, 0x3b, + 0xb6, 0x37, 0xb9, 0xba, 0x08, 0x41, 0xfe, 0x07, 0x0e, 0xf3, 0x86, 0x9a, 0xb4, 0xdc, 0x5b, 0x39, + 0x03, 0x85, 0xc8, 0x22, 0xcf, 0x55, 0xf7, 0xa9, 0xba, 0x1f, 0x84, 0x0f, 0xc3, 0xc0, 0x90, 0x94, + 0x61, 0x4d, 0x4c, 0xf0, 0x2f, 0x0d, 0x98, 0x89, 0xd9, 0x12, 0xbd, 0x0b, 0xe6, 0xa6, 0xe7, 0xf6, + 0xa6, 0x32, 0x94, 0xd8, 0x81, 0xbe, 0x0d, 0x49, 0xe6, 0x4e, 0x65, 0xa6, 0x24, 0x73, 0xb9, 0x95, + 0x94, 0xf8, 0x29, 0x59, 0xec, 0xca, 0x19, 0x3e, 0x03, 0x79, 0x21, 0xd0, 0x4d, 0xcb, 0xf6, 0x26, + 0xe6, 0x8c, 0xc9, 0x02, 0x9d, 0x87, 0x03, 0x32, 0x1e, 0x4e, 0xde, 0x5c, 0x9c, 0xb4, 0xb9, 0xa8, + 0x37, 0xbf, 0x01, 0xe9, 0x95, 0xad, 0x81, 0x73, 0x9f, 0x6f, 0x69, 0x5b, 0xcc, 0xd2, 0x5b, 0xf8, + 0x18, 0x1f, 0x82, 0x79, 0x7e, 0x0d, 0xa9, 0xe7, 0xaf, 0xb8, 0x03, 0x87, 0xe9, 0x66, 0xe3, 0x24, + 0x94, 0xe2, 0x60, 0xe5, 0x25, 0x25, 0x48, 0xb7, 0x38, 0x40, 0xd0, 0x98, 0x21, 0x72, 0x82, 0x7f, + 0x67, 0x00, 0xba, 0x4c, 0x99, 0x38, 0x65, 0x6d, 0x35, 0xb8, 0x1e, 0x15, 0xc8, 0xf5, 0x2c, 0xd6, + 0xda, 0xa2, 0x9e, 0xaf, 0x8b, 0x11, 0x3d, 0xff, 0x5f, 0x54, 0x7e, 0xf8, 0x14, 0xcc, 0xc7, 0xb8, + 0x54, 0x32, 0x55, 0x20, 0xd7, 0x52, 0x30, 0x95, 0xf5, 0x82, 0x39, 0xfe, 0x4b, 0x12, 0x72, 0x62, + 0x03, 0xa1, 0x9b, 0xe8, 0x14, 0x14, 0x36, 0x6d, 0xa7, 0x43, 0xbd, 0xbe, 0x67, 0x2b, 0x15, 0x98, + 0xcd, 0x03, 0x3b, 0xa3, 0x5a, 0x14, 0x4c, 0xa2, 0x13, 0xf4, 0x36, 0x64, 0x07, 0x3e, 0xf5, 0xee, + 0xda, 0xf2, 0xa6, 0xe7, 0x9b, 0xa5, 0xed, 0x51, 0x2d, 0xf3, 0x03, 0x9f, 0x7a, 0x6b, 0xab, 0x3c, + 0xff, 0x0c, 0xc4, 0x88, 0xc8, 0x6f, 0x1b, 0x5d, 0x55, 0x6e, 0x2a, 0xaa, 0xb1, 0xe6, 0x77, 0x38, + 0xfb, 0xcf, 0x46, 0xb5, 0xe3, 0x91, 0x68, 0xd7, 0xf7, 0xdc, 0x1e, 0x65, 0x5b, 0x74, 0xe0, 0x37, + 0x5a, 0x6e, 0xaf, 0xe7, 0x3a, 0x0d, 0xd1, 0x5a, 0x0a, 0xa1, 0x79, 0x12, 0xe5, 0xdb, 0x95, 0xe7, + 0xde, 0x82, 0x2c, 0xdb, 0xf2, 0xdc, 0x41, 0x67, 0x4b, 0xe4, 0x86, 0x54, 0xf3, 0xdc, 0xf4, 0xf4, + 0x34, 0x05, 0xa2, 0x07, 0xe8, 0x28, 0xd7, 0x16, 0x6d, 0xdd, 0xf7, 0x07, 0x3d, 0xd9, 0xb0, 0x35, + 0xd3, 0x3b, 0xa3, 0x9a, 0xf1, 0x36, 0x09, 0xc0, 0xf8, 0x67, 0x49, 0xa8, 0x09, 0x47, 0xbd, 0x2d, + 0x8a, 0x87, 0x4b, 0xae, 0x77, 0x8d, 0x32, 0xcf, 0x6e, 0x5d, 0xb7, 0x7a, 0x54, 0xfb, 0x46, 0x0d, + 0x0a, 0x3d, 0x01, 0xbc, 0x1b, 0xb9, 0x02, 0xd0, 0x0b, 0xf0, 0xd0, 0x9b, 0x00, 0xe2, 0xce, 0xc8, + 0x75, 0x79, 0x1b, 0xf2, 0x02, 0x22, 0x96, 0x57, 0x62, 0x9a, 0x6a, 0x4c, 0x29, 0x99, 0xd2, 0xd0, + 0xda, 0xb8, 0x86, 0xa6, 0xa6, 0x13, 0xa8, 0x25, 0xea, 0xeb, 0xe9, 0xb8, 0xaf, 0xe3, 0x4f, 0x0d, + 0xa8, 0xae, 0x6b, 0xce, 0xf7, 0xa9, 0x0e, 0x2d, 0x6f, 0xf2, 0x15, 0xc9, 0x9b, 0xfa, 0x6a, 0xf2, + 0xe2, 0x2a, 0xc0, 0xba, 0xed, 0xd0, 0x4b, 0x76, 0x97, 0x51, 0x6f, 0x42, 0x4b, 0xf2, 0x8b, 0x54, + 0x18, 0x12, 0x08, 0xdd, 0xd4, 0x72, 0xae, 0x44, 0xe2, 0xf0, 0xab, 0x10, 0x23, 0xf9, 0x0a, 0xcd, + 0x96, 0x1a, 0x0b, 0x51, 0x0e, 0x64, 0x37, 0x85, 0x78, 0x32, 0xa5, 0xc6, 0x5e, 0x55, 0x42, 0xd9, + 0x9b, 0xdf, 0x53, 0x87, 0x9f, 0x7d, 0x49, 0x4d, 0x22, 0xde, 0x81, 0x1a, 0xfe, 0xd0, 0x61, 0xd6, + 0xe3, 0xc8, 0x7e, 0xa2, 0x0f, 0x41, 0x96, 0xaa, 0xbb, 0xd2, 0x13, 0xeb, 0xae, 0x0b, 0xea, 0x98, + 0xaf, 0xd4, 0x42, 0x5e, 0x08, 0x23, 0xa0, 0x30, 0x8a, 0x8a, 0x80, 0xc7, 0xc0, 0xf4, 0xe8, 0xa6, + 0x4e, 0xd5, 0x28, 0x3c, 0x39, 0xc0, 0x14, 0xeb, 0xf8, 0xaf, 0x06, 0xcc, 0x5d, 0xa6, 0x2c, 0x5e, + 0x04, 0xbd, 0x46, 0x26, 0xc5, 0x1f, 0xc2, 0xc1, 0x08, 0xff, 0x4a, 0xfa, 0xd3, 0x63, 0x95, 0xcf, + 0xa1, 0x50, 0xfe, 0x35, 0xa7, 0x4d, 0x1f, 0xab, 0xbe, 0x31, 0x5e, 0xf4, 0xdc, 0x84, 0x42, 0x64, + 0x11, 0x5d, 0x1c, 0x2b, 0x77, 0x22, 0xcf, 0x3d, 0x41, 0xca, 0x6e, 0x96, 0x94, 0x4c, 0xb2, 0x73, + 0x54, 0xf5, 0x6c, 0x50, 0x1a, 0x6c, 0x00, 0x12, 0xe6, 0x12, 0x64, 0xa3, 0xc9, 0x49, 0x40, 0xaf, + 0x06, 0x75, 0x4f, 0x30, 0x47, 0x47, 0xc1, 0xf4, 0xdc, 0x47, 0xba, 0x8e, 0x9d, 0x09, 0x8f, 0x24, + 0xee, 0x23, 0x22, 0x96, 0xf0, 0x79, 0x48, 0x11, 0xf7, 0x11, 0xaa, 0x02, 0x78, 0x96, 0xd3, 0xa1, + 0xb7, 0x83, 0x26, 0xaa, 0x48, 0x22, 0x90, 0x3d, 0x0a, 0x87, 0x15, 0x38, 0x18, 0xe5, 0x48, 0x9a, + 0xbb, 0x0e, 0xd9, 0x8f, 0x06, 0x51, 0x75, 0x95, 0xc6, 0xd4, 0x25, 0xfb, 0x71, 0x8d, 0xc4, 0x7d, + 0x06, 0x42, 0x38, 0x3a, 0x02, 0x79, 0x66, 0xdd, 0xeb, 0xd2, 0xeb, 0x61, 0x98, 0x0b, 0x01, 0x7c, + 0x95, 0xf7, 0x7f, 0xb7, 0x23, 0x15, 0x50, 0x08, 0x40, 0x27, 0x60, 0x2e, 0xe4, 0xf9, 0xa6, 0x47, + 0x37, 0xed, 0xc7, 0xc2, 0xc2, 0x45, 0xb2, 0x0b, 0x8e, 0x96, 0xe0, 0x40, 0x08, 0xdb, 0x10, 0x95, + 0x86, 0x29, 0x50, 0xc7, 0xc1, 0x5c, 0x37, 0x42, 0xdc, 0x0f, 0x1e, 0x0c, 0xac, 0xae, 0xb8, 0x7c, + 0x45, 0x12, 0x81, 0xe0, 0xbf, 0x19, 0x70, 0x50, 0x9a, 0x9a, 0x77, 0xfe, 0xaf, 0xa3, 0xd7, 0xff, + 0xde, 0x00, 0x14, 0x95, 0x40, 0xb9, 0xd6, 0x5b, 0xd1, 0x57, 0x1d, 0x5e, 0xca, 0x14, 0x44, 0x5b, + 0x2b, 0x41, 0xe1, 0xab, 0x0c, 0x86, 0x8c, 0x28, 0x87, 0x64, 0x7f, 0x6d, 0xca, 0xbe, 0x59, 0x42, + 0x88, 0xfa, 0xf2, 0x76, 0xff, 0xde, 0x90, 0x51, 0x5f, 0x75, 0xbd, 0xa2, 0xdd, 0x17, 0x00, 0x22, + 0x3f, 0xfc, 0x2c, 0xea, 0x30, 0xe1, 0x35, 0x66, 0x78, 0x96, 0x02, 0x11, 0x3d, 0xc0, 0x7f, 0x4a, + 0xc2, 0xcc, 0x6d, 0xb7, 0x3b, 0x08, 0x13, 0xe3, 0xeb, 0x94, 0x30, 0x62, 0xad, 0x78, 0x5a, 0xb7, + 0xe2, 0x08, 0x4c, 0x9f, 0xd1, 0xbe, 0xf0, 0xac, 0x14, 0x11, 0x63, 0x84, 0xa1, 0xc8, 0x2c, 0xaf, + 0x43, 0x99, 0xec, 0x6e, 0xca, 0x19, 0x51, 0x76, 0xc6, 0x60, 0x68, 0x11, 0x0a, 0x56, 0xa7, 0xe3, + 0xd1, 0x8e, 0xc5, 0x68, 0x73, 0x58, 0xce, 0x8a, 0xc3, 0xa2, 0x20, 0xfc, 0x31, 0xcc, 0x6a, 0x65, + 0x29, 0x93, 0xbe, 0x03, 0xd9, 0x87, 0x02, 0x32, 0xe1, 0xf9, 0x4b, 0xa2, 0xaa, 0x30, 0xa6, 0xd1, + 0xe2, 0x8f, 0xe6, 0x9a, 0x67, 0x7c, 0x05, 0x32, 0x12, 0x1d, 0x1d, 0x89, 0xf6, 0x28, 0xf2, 0x9d, + 0x86, 0xcf, 0x55, 0xc3, 0x81, 0x21, 0x23, 0x09, 0x29, 0xc3, 0x0b, 0xdf, 0x90, 0x10, 0xa2, 0xbe, + 0xf8, 0x37, 0x06, 0x1c, 0x5a, 0xa5, 0x8c, 0xb6, 0x18, 0x6d, 0x5f, 0xb2, 0x69, 0xb7, 0xbd, 0xdf, + 0xf6, 0xd9, 0xd8, 0x77, 0xfb, 0x3c, 0xe9, 0x1d, 0x2c, 0x15, 0x7d, 0x07, 0x5b, 0x83, 0x85, 0x71, + 0x16, 0x95, 0x46, 0x1b, 0x90, 0xd9, 0x14, 0x90, 0xdd, 0x2f, 0x9f, 0xb1, 0x1d, 0x44, 0xa1, 0x61, + 0x0f, 0x66, 0x62, 0x0b, 0x42, 0xc3, 0xdc, 0xa2, 0x2a, 0xda, 0xc9, 0x09, 0xfa, 0x26, 0x98, 0x6c, + 0xd8, 0x57, 0x41, 0xae, 0x79, 0xe8, 0x8b, 0x51, 0xed, 0x60, 0x6c, 0xdb, 0xad, 0x61, 0x9f, 0x12, + 0x81, 0xc2, 0x1d, 0xa1, 0x65, 0x79, 0x6d, 0xdb, 0xb1, 0xba, 0x36, 0x93, 0x8c, 0x9b, 0x24, 0x0a, + 0x8a, 0xa9, 0x58, 0x7a, 0xcf, 0xff, 0x9f, 0x8a, 0x7f, 0x14, 0xaa, 0x58, 0xb3, 0xa8, 0x54, 0xfc, + 0x3e, 0xcc, 0xb6, 0x63, 0x2b, 0x7b, 0xab, 0x5a, 0x3e, 0x56, 0x8e, 0xa1, 0xe3, 0xb7, 0x42, 0x95, + 0x0b, 0xc8, 0x64, 0x95, 0x9f, 0x38, 0x06, 0xf9, 0xe0, 0xaf, 0x17, 0x54, 0x80, 0xec, 0xa5, 0x1b, + 0xe4, 0x87, 0x17, 0xc9, 0xea, 0x5c, 0x02, 0x15, 0x21, 0xd7, 0xbc, 0xb8, 0x72, 0x55, 0xcc, 0x8c, + 0xe5, 0x4f, 0xd3, 0x3a, 0xc5, 0x79, 0xe8, 0xbb, 0x90, 0x96, 0x79, 0x6b, 0x21, 0x64, 0x26, 0xfa, + 0x0f, 0x47, 0xe5, 0xf0, 0x2e, 0xb8, 0x94, 0x0a, 0x27, 0xde, 0x31, 0xd0, 0x75, 0x28, 0x08, 0xa0, + 0x7a, 0xc5, 0x3c, 0x32, 0xfe, 0x98, 0x18, 0xa3, 0xf4, 0xe6, 0x1e, 0xab, 0x11, 0x7a, 0xe7, 0x20, + 0x2d, 0x05, 0x5c, 0x18, 0x2b, 0x2f, 0x26, 0x70, 0x13, 0x7b, 0xd7, 0xc5, 0x09, 0xf4, 0x1e, 0x98, + 0xbc, 0xa3, 0x47, 0x91, 0xea, 0x26, 0xf2, 0xf8, 0x58, 0x59, 0x18, 0x07, 0x47, 0x8e, 0xbd, 0x10, + 0xbc, 0xa1, 0x1e, 0x1e, 0x7f, 0xc5, 0xd1, 0xdb, 0xcb, 0xbb, 0x17, 0x82, 0x93, 0x6f, 0xc8, 0x97, + 0x3e, 0xfd, 0x96, 0x80, 0xde, 0x8c, 0x1f, 0x35, 0xf6, 0xf4, 0x50, 0xa9, 0xee, 0xb5, 0x1c, 0x10, + 0x5c, 0x87, 0x42, 0xa4, 0x8f, 0x8f, 0xaa, 0x75, 0xf7, 0x23, 0x44, 0x54, 0xad, 0x13, 0x9a, 0x7f, + 0x9c, 0x40, 0x97, 0x21, 0xc7, 0x6b, 0x42, 0x9e, 0x1a, 0xd1, 0x1b, 0xe3, 0xa5, 0x5f, 0x24, 0xe5, + 0x57, 0x8e, 0x4c, 0x5e, 0x0c, 0x08, 0x7d, 0x1f, 0xf2, 0x97, 0x29, 0x53, 0x71, 0xf3, 0xf0, 0x78, + 0xe0, 0x9d, 0xa0, 0xa9, 0x78, 0xf0, 0xc6, 0x09, 0xf4, 0xb1, 0x28, 0x4f, 0xe3, 0x91, 0x08, 0xd5, + 0xf6, 0x88, 0x38, 0x01, 0x5f, 0x8b, 0x7b, 0x23, 0x68, 0xca, 0xcb, 0x77, 0xf4, 0x9f, 0xbd, 0xab, + 0x16, 0xb3, 0xd0, 0x0d, 0x98, 0x15, 0x22, 0x07, 0xff, 0x06, 0xc7, 0x5c, 0x73, 0xd7, 0x5f, 0xcf, + 0x31, 0xd7, 0xdc, 0xfd, 0x17, 0x34, 0x4e, 0x34, 0xef, 0x3c, 0x7d, 0x5e, 0x4d, 0x7c, 0xf6, 0xbc, + 0x9a, 0xf8, 0xfc, 0x79, 0xd5, 0xf8, 0xe9, 0x76, 0xd5, 0xf8, 0xc3, 0x76, 0xd5, 0x78, 0xb2, 0x5d, + 0x35, 0x9e, 0x6e, 0x57, 0x8d, 0x7f, 0x6d, 0x57, 0x8d, 0x7f, 0x6f, 0x57, 0x13, 0x9f, 0x6f, 0x57, + 0x8d, 0x4f, 0x5e, 0x54, 0x13, 0x4f, 0x5f, 0x54, 0x13, 0x9f, 0xbd, 0xa8, 0x26, 0x7e, 0x7c, 0xfc, + 0xe5, 0x1d, 0x93, 0x8c, 0x36, 0x19, 0xf1, 0x39, 0xfd, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x15, + 0x81, 0x7e, 0x4a, 0xae, 0x1f, 0x00, 0x00, } func (x Direction) String() string { @@ -10434,7 +10435,7 @@ func (m *QueryRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -10687,7 +10688,7 @@ func (m *SampleQueryRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -10985,7 +10986,7 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Streams = append(m.Streams, github_com_grafana_loki_pkg_push.Stream{}) + m.Streams = append(m.Streams, github_com_grafana_loki_v3_pkg_push.Stream{}) if err := m.Streams[len(m.Streams)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -11946,7 +11947,7 @@ func (m *TailRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -12035,7 +12036,7 @@ func (m *TailResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Stream == nil { - m.Stream = &github_com_grafana_loki_pkg_push.Stream{} + m.Stream = &github_com_grafana_loki_v3_pkg_push.Stream{} } if err := m.Stream.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -14096,7 +14097,7 @@ func (m *GetChunkRefRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Filters = append(m.Filters, github_com_grafana_loki_pkg_logql_syntax.LineFilter{}) + m.Filters = append(m.Filters, github_com_grafana_loki_v3_pkg_logql_syntax.LineFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 9dd58a8b5f3a4..2bd45890dd447 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -7,7 +7,7 @@ import "google/protobuf/timestamp.proto"; import "pkg/logqlmodel/stats/stats.proto"; import "pkg/push/push.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; service Querier { rpc Query(QueryRequest) returns (stream QueryResponse) {} @@ -67,7 +67,7 @@ message QueryRequest { reserved 6; repeated string shards = 7 [(gogoproto.jsontag) = "shards,omitempty"]; repeated Delete deletes = 8; - Plan plan = 9 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 9 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message SampleQueryRequest { @@ -82,7 +82,7 @@ message SampleQueryRequest { ]; repeated string shards = 4 [(gogoproto.jsontag) = "shards,omitempty"]; repeated Delete deletes = 5; - Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } // TODO(owen-d): fix. This will break rollouts as soon as the internal repr is changed. @@ -98,7 +98,7 @@ message Delete { message QueryResponse { repeated StreamAdapter streams = 1 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream", (gogoproto.nullable) = true ]; stats.Ingester stats = 2 [(gogoproto.nullable) = false]; @@ -165,11 +165,11 @@ message TailRequest { (gogoproto.stdtime) = true, (gogoproto.nullable) = false ]; - Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 6 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message TailResponse { - StreamAdapter stream = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream"]; + StreamAdapter stream = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream"]; repeated DroppedStream droppedStreams = 2; } @@ -317,11 +317,11 @@ message GetChunkRefRequest { string matchers = 3; // TODO(salvacorts): Delete this field once the weekly release is done. repeated LineFilter filters = 4 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logql/syntax.LineFilter", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logql/syntax.LineFilter", (gogoproto.nullable) = false ]; Plan plan = 5 [ - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan", + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan", (gogoproto.nullable) = false ]; } diff --git a/pkg/logproto/metrics.pb.go b/pkg/logproto/metrics.pb.go index 49e45e03a9b40..facb92220d05c 100644 --- a/pkg/logproto/metrics.pb.go +++ b/pkg/logproto/metrics.pb.go @@ -342,46 +342,47 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/metrics.proto", fileDescriptor_d2388e514bd0aa0e) } var fileDescriptor_d2388e514bd0aa0e = []byte{ - // 623 bytes of a gzipped FileDescriptorProto + // 627 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0xcf, 0x6e, 0xd3, 0x4e, - 0x10, 0xf6, 0x26, 0x69, 0x92, 0x4e, 0xff, 0xfc, 0xac, 0x55, 0xf5, 0xc3, 0x04, 0x69, 0x53, 0x0c, - 0x87, 0x1e, 0x50, 0x22, 0x15, 0x09, 0x04, 0xe2, 0x92, 0xa0, 0x34, 0x54, 0x34, 0x7f, 0x58, 0x3b, - 0x54, 0xf4, 0x12, 0x6d, 0xd3, 0xad, 0xbb, 0xaa, 0x1d, 0x1b, 0xdb, 0x41, 0xca, 0x8d, 0x17, 0x40, - 0xe2, 0xcc, 0x13, 0xf0, 0x04, 0x3c, 0x43, 0x8f, 0x3d, 0x56, 0x1c, 0x2a, 0xea, 0x5e, 0x7a, 0xec, - 0x23, 0x20, 0xaf, 0x9d, 0x38, 0x45, 0xe2, 0xc6, 0xc9, 0x33, 0xf3, 0x7d, 0xdf, 0xcc, 0xe8, 0x1b, - 0x2f, 0x54, 0xbc, 0x53, 0xab, 0x6e, 0xbb, 0x96, 0xe7, 0xbb, 0xa1, 0x5b, 0x77, 0x78, 0xe8, 0x8b, - 0x51, 0x50, 0x93, 0x19, 0x2e, 0xcf, 0xea, 0x95, 0x0d, 0xcb, 0xb5, 0xdc, 0x84, 0x12, 0x47, 0x09, - 0x5e, 0x79, 0x70, 0x47, 0x3b, 0x0b, 0x12, 0x50, 0xff, 0x91, 0x83, 0xd5, 0x7d, 0x5f, 0x84, 0x9c, - 0xf2, 0x8f, 0x13, 0x1e, 0x84, 0xb8, 0x0f, 0x10, 0x0a, 0x87, 0x07, 0xdc, 0x17, 0x3c, 0xd0, 0xd0, - 0x66, 0x7e, 0x6b, 0x65, 0x7b, 0xa3, 0x36, 0x57, 0x99, 0xc2, 0xe1, 0x86, 0xc4, 0x9a, 0x95, 0xb3, - 0xcb, 0xaa, 0xf2, 0xf3, 0xb2, 0x8a, 0xfb, 0x3e, 0x67, 0xb6, 0xed, 0x8e, 0xcc, 0xb9, 0x8e, 0x2e, - 0xf4, 0xc0, 0x2f, 0xa0, 0x68, 0xb8, 0x13, 0x7f, 0xc4, 0xb5, 0xdc, 0x26, 0xda, 0x5a, 0xdf, 0x7e, - 0x98, 0x75, 0x5b, 0x9c, 0x5c, 0x4b, 0x48, 0xad, 0xf1, 0xc4, 0xa1, 0xa9, 0x00, 0xbf, 0x84, 0xb2, - 0xc3, 0x43, 0x76, 0xc4, 0x42, 0xa6, 0xe5, 0xe5, 0x2a, 0x5a, 0x26, 0xee, 0x48, 0x17, 0x3a, 0x29, - 0xde, 0x2c, 0x9c, 0x5d, 0x56, 0x11, 0x9d, 0xf3, 0xf1, 0x2b, 0xa8, 0x04, 0xa7, 0xc2, 0x1b, 0xda, - 0xec, 0x90, 0xdb, 0xc3, 0x31, 0x73, 0xf8, 0xf0, 0x13, 0xb3, 0xc5, 0x11, 0x0b, 0x85, 0x3b, 0xd6, - 0x6e, 0x4a, 0x9b, 0x68, 0xab, 0x4c, 0xef, 0xc5, 0x94, 0xbd, 0x98, 0xd1, 0x65, 0x0e, 0x7f, 0x3f, - 0xc7, 0xf5, 0x2a, 0x40, 0xb6, 0x0f, 0x2e, 0x41, 0xbe, 0xd1, 0xdf, 0x55, 0x15, 0x5c, 0x86, 0x02, - 0x1d, 0xec, 0xb5, 0x54, 0xa4, 0xff, 0x07, 0x6b, 0xe9, 0xf6, 0x81, 0xe7, 0x8e, 0x03, 0xae, 0x7f, - 0x41, 0x00, 0x99, 0x3b, 0xb8, 0x0d, 0x45, 0x39, 0x79, 0xe6, 0xe1, 0xfd, 0x6c, 0xf1, 0x3d, 0x6e, - 0xb1, 0xd1, 0x54, 0x4e, 0xed, 0x33, 0xe1, 0x37, 0x37, 0x52, 0x23, 0x57, 0x65, 0xa9, 0x71, 0xc4, - 0xbc, 0x90, 0xfb, 0x34, 0x95, 0xe3, 0x67, 0x50, 0x0a, 0x98, 0xe3, 0xd9, 0x3c, 0xd0, 0x72, 0xb2, - 0xd3, 0xff, 0x7f, 0x76, 0x32, 0x24, 0x2c, 0x0d, 0x50, 0xe8, 0x8c, 0xac, 0x7f, 0xcb, 0xc1, 0xfa, - 0x5d, 0x8b, 0xf0, 0x73, 0x28, 0x84, 0x53, 0x8f, 0x6b, 0x48, 0xde, 0xe1, 0xd1, 0xdf, 0xac, 0x4c, - 0x53, 0x73, 0xea, 0x71, 0x2a, 0x05, 0xf8, 0x09, 0xe0, 0xe4, 0x9f, 0x1b, 0x1e, 0x33, 0x47, 0xd8, - 0x53, 0x69, 0xa7, 0x3c, 0xe7, 0x32, 0x55, 0x13, 0x64, 0x47, 0x02, 0xb1, 0x8b, 0x18, 0x43, 0xe1, - 0x84, 0xdb, 0x9e, 0x56, 0x90, 0xb8, 0x8c, 0xe3, 0xda, 0x64, 0x2c, 0x42, 0x6d, 0x29, 0xa9, 0xc5, - 0xb1, 0x3e, 0x05, 0xc8, 0x26, 0xe1, 0x15, 0x28, 0x0d, 0xba, 0x6f, 0xbb, 0xbd, 0xfd, 0xae, 0xaa, - 0xc4, 0xc9, 0xeb, 0xde, 0xa0, 0x6b, 0xb6, 0xa8, 0x8a, 0xf0, 0x32, 0x2c, 0xb5, 0x1b, 0x83, 0x76, - 0x4b, 0xcd, 0xe1, 0x35, 0x58, 0x7e, 0xb3, 0x6b, 0x98, 0xbd, 0x36, 0x6d, 0x74, 0xd4, 0x3c, 0xc6, - 0xb0, 0x2e, 0x91, 0xac, 0x56, 0x88, 0xa5, 0xc6, 0xa0, 0xd3, 0x69, 0xd0, 0x0f, 0xea, 0x52, 0x7c, - 0xaf, 0xdd, 0xee, 0x4e, 0x4f, 0x2d, 0xe2, 0x55, 0x28, 0x1b, 0x66, 0xc3, 0x6c, 0x19, 0x2d, 0x53, - 0x2d, 0xe9, 0xef, 0xa0, 0x98, 0x8c, 0xfe, 0x67, 0x77, 0x6a, 0x1e, 0x9c, 0x5f, 0x11, 0xe5, 0xe2, - 0x8a, 0x28, 0xb7, 0x57, 0x04, 0x7d, 0x8e, 0x08, 0xfa, 0x1e, 0x11, 0x74, 0x16, 0x11, 0x74, 0x1e, - 0x11, 0xf4, 0x2b, 0x22, 0xe8, 0x26, 0x22, 0xca, 0x6d, 0x44, 0xd0, 0xd7, 0x6b, 0xa2, 0x9c, 0x5f, - 0x13, 0xe5, 0xe2, 0x9a, 0x28, 0x07, 0x8f, 0x2d, 0x11, 0x9e, 0x4c, 0x0e, 0x6b, 0x23, 0xd7, 0xa9, - 0x5b, 0x3e, 0x3b, 0x66, 0x63, 0x56, 0xb7, 0xdd, 0x53, 0x51, 0x5f, 0x7c, 0xb4, 0x87, 0x45, 0xf9, - 0x79, 0xfa, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x8b, 0x68, 0x28, 0x2b, 0x07, 0x04, 0x00, 0x00, + 0x10, 0xf6, 0x26, 0x69, 0x92, 0x4e, 0xff, 0xfc, 0xac, 0x55, 0xf5, 0xc3, 0x04, 0x69, 0x53, 0xcc, + 0x81, 0x1e, 0x50, 0x22, 0xb5, 0x12, 0x08, 0xc4, 0x25, 0x41, 0x69, 0xa8, 0x68, 0xfe, 0xb0, 0x76, + 0xa8, 0x40, 0x42, 0xd1, 0x36, 0xdd, 0xba, 0xab, 0xda, 0xb1, 0xb1, 0x9d, 0x4a, 0xb9, 0xf1, 0x02, + 0x48, 0x9c, 0x79, 0x02, 0x9e, 0x80, 0x67, 0xe8, 0xb1, 0xc7, 0x8a, 0x43, 0x45, 0xdd, 0x4b, 0x8f, + 0x7d, 0x04, 0xe4, 0xb5, 0x13, 0xb7, 0x48, 0xdc, 0x38, 0x79, 0x66, 0xbe, 0xef, 0x9b, 0x19, 0x7d, + 0xe3, 0x85, 0x8a, 0x77, 0x6c, 0xd5, 0x6d, 0xd7, 0xf2, 0x7c, 0x37, 0x74, 0xeb, 0x0e, 0x0f, 0x7d, + 0x31, 0x0a, 0x6a, 0x32, 0xc3, 0xe5, 0x59, 0xbd, 0xb2, 0x66, 0xb9, 0x96, 0x9b, 0x50, 0xe2, 0x28, + 0xc1, 0x2b, 0x0f, 0xee, 0x68, 0x67, 0x41, 0x02, 0xea, 0x3f, 0x72, 0xb0, 0xbc, 0xe7, 0x8b, 0x90, + 0x53, 0xfe, 0x69, 0xc2, 0x83, 0x10, 0xf7, 0x01, 0x42, 0xe1, 0xf0, 0x80, 0xfb, 0x82, 0x07, 0x1a, + 0x5a, 0xcf, 0x6f, 0x2c, 0x6d, 0xae, 0xd5, 0xe6, 0x2a, 0x53, 0x38, 0xdc, 0x90, 0x58, 0xb3, 0x72, + 0x7a, 0x51, 0x55, 0x7e, 0x5e, 0x54, 0x71, 0xdf, 0xe7, 0xcc, 0xb6, 0xdd, 0x91, 0x39, 0xd7, 0xd1, + 0x5b, 0x3d, 0xf0, 0x73, 0x28, 0x1a, 0xee, 0xc4, 0x1f, 0x71, 0x2d, 0xb7, 0x8e, 0x36, 0x56, 0x37, + 0x1f, 0x66, 0xdd, 0x6e, 0x4f, 0xae, 0x25, 0xa4, 0xd6, 0x78, 0xe2, 0xd0, 0x54, 0x80, 0x5f, 0x40, + 0xd9, 0xe1, 0x21, 0x3b, 0x60, 0x21, 0xd3, 0xf2, 0x72, 0x15, 0x2d, 0x13, 0x77, 0xa4, 0x0b, 0x9d, + 0x14, 0x6f, 0x16, 0x4e, 0x2f, 0xaa, 0x88, 0xce, 0xf9, 0xf8, 0x25, 0x54, 0x82, 0x63, 0xe1, 0x0d, + 0x6d, 0xb6, 0xcf, 0xed, 0xe1, 0x98, 0x39, 0x7c, 0x78, 0xc2, 0x6c, 0x71, 0xc0, 0x42, 0xe1, 0x8e, + 0xb5, 0xeb, 0xd2, 0x3a, 0xda, 0x28, 0xd3, 0x7b, 0x31, 0x65, 0x37, 0x66, 0x74, 0x99, 0xc3, 0xdf, + 0xcd, 0x71, 0xbd, 0x0a, 0x90, 0xed, 0x83, 0x4b, 0x90, 0x6f, 0xf4, 0x77, 0x54, 0x05, 0x97, 0xa1, + 0x40, 0x07, 0xbb, 0x2d, 0x15, 0xe9, 0xff, 0xc1, 0x4a, 0xba, 0x7d, 0xe0, 0xb9, 0xe3, 0x80, 0xeb, + 0x5f, 0x10, 0x40, 0xe6, 0x0e, 0x6e, 0x43, 0x51, 0x4e, 0x9e, 0x79, 0x78, 0x3f, 0x5b, 0x7c, 0x97, + 0x5b, 0x6c, 0x34, 0x95, 0x53, 0xfb, 0x4c, 0xf8, 0xcd, 0xb5, 0xd4, 0xc8, 0x65, 0x59, 0x6a, 0x1c, + 0x30, 0x2f, 0xe4, 0x3e, 0x4d, 0xe5, 0xf8, 0x29, 0x94, 0x02, 0xe6, 0x78, 0x36, 0x0f, 0xb4, 0x9c, + 0xec, 0xf4, 0xff, 0x9f, 0x9d, 0x0c, 0x09, 0x4b, 0x03, 0x14, 0x3a, 0x23, 0xeb, 0xdf, 0x72, 0xb0, + 0x7a, 0xd7, 0x22, 0xfc, 0x0c, 0x0a, 0xe1, 0xd4, 0xe3, 0x1a, 0x92, 0x77, 0x78, 0xf4, 0x37, 0x2b, + 0xd3, 0xd4, 0x9c, 0x7a, 0x9c, 0x4a, 0x01, 0x7e, 0x02, 0x38, 0xf9, 0xe7, 0x86, 0x87, 0xcc, 0x11, + 0xf6, 0x54, 0xda, 0x29, 0xcf, 0xb9, 0x48, 0xd5, 0x04, 0xd9, 0x96, 0x40, 0xec, 0x22, 0xc6, 0x50, + 0x38, 0xe2, 0xb6, 0xa7, 0x15, 0x24, 0x2e, 0xe3, 0xb8, 0x36, 0x19, 0x8b, 0x50, 0x5b, 0x48, 0x6a, + 0x71, 0xac, 0x4f, 0x01, 0xb2, 0x49, 0x78, 0x09, 0x4a, 0x83, 0xee, 0x9b, 0x6e, 0x6f, 0xaf, 0xab, + 0x2a, 0x71, 0xf2, 0xaa, 0x37, 0xe8, 0x9a, 0x2d, 0xaa, 0x22, 0xbc, 0x08, 0x0b, 0xed, 0xc6, 0xa0, + 0xdd, 0x52, 0x73, 0x78, 0x05, 0x16, 0x5f, 0xef, 0x18, 0x66, 0xaf, 0x4d, 0x1b, 0x1d, 0x35, 0x8f, + 0x31, 0xac, 0x4a, 0x24, 0xab, 0x15, 0x62, 0xa9, 0x31, 0xe8, 0x74, 0x1a, 0xf4, 0xbd, 0xba, 0x10, + 0xdf, 0x6b, 0xa7, 0xbb, 0xdd, 0x53, 0x8b, 0x78, 0x19, 0xca, 0x86, 0xd9, 0x30, 0x5b, 0x46, 0xcb, + 0x54, 0x4b, 0xfa, 0x5b, 0x28, 0x26, 0xa3, 0xff, 0xd9, 0x9d, 0x9a, 0x1f, 0xcf, 0x2e, 0x89, 0x72, + 0x7e, 0x49, 0x94, 0x9b, 0x4b, 0x82, 0x3e, 0x47, 0x04, 0x7d, 0x8f, 0x08, 0x3a, 0x8d, 0x08, 0x3a, + 0x8b, 0x08, 0xfa, 0x15, 0x11, 0x74, 0x1d, 0x11, 0xe5, 0x26, 0x22, 0xe8, 0xeb, 0x15, 0x51, 0xce, + 0xae, 0x88, 0x72, 0x7e, 0x45, 0x94, 0x0f, 0x8f, 0x2d, 0x11, 0x1e, 0x4d, 0xf6, 0x6b, 0x23, 0xd7, + 0xa9, 0x5b, 0x3e, 0x3b, 0x64, 0x63, 0x56, 0xb7, 0xdd, 0x63, 0x51, 0x3f, 0xd9, 0xaa, 0xdf, 0x7e, + 0xb7, 0xfb, 0x45, 0xf9, 0xd9, 0xfa, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x39, 0xa1, 0x2c, 0xef, 0x0a, + 0x04, 0x00, 0x00, } func (x WriteRequest_SourceEnum) String() string { diff --git a/pkg/logproto/metrics.proto b/pkg/logproto/metrics.proto index d0cfef0a69f58..1fc92f836312d 100644 --- a/pkg/logproto/metrics.proto +++ b/pkg/logproto/metrics.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/logproto/sketch.pb.go b/pkg/logproto/sketch.pb.go index c555d64d55970..ef0fa986b1e33 100644 --- a/pkg/logproto/sketch.pb.go +++ b/pkg/logproto/sketch.pb.go @@ -656,46 +656,47 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/sketch.proto", fileDescriptor_7f9fd40e59b87ff3) } var fileDescriptor_7f9fd40e59b87ff3 = []byte{ - // 623 bytes of a gzipped FileDescriptorProto + // 626 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0x41, 0x4f, 0xd4, 0x4e, - 0x14, 0xef, 0xfc, 0x77, 0xff, 0xcb, 0xf2, 0x16, 0x88, 0x8e, 0xc4, 0xd4, 0xc5, 0x4c, 0xd6, 0xc6, - 0x28, 0xd1, 0xb8, 0x9b, 0x40, 0x42, 0x38, 0x83, 0x07, 0x12, 0x45, 0x71, 0x20, 0xc6, 0x70, 0x31, - 0x43, 0x3b, 0x74, 0x27, 0xdb, 0x76, 0x9a, 0xce, 0x2c, 0xe0, 0xcd, 0x4f, 0x60, 0x8c, 0x9f, 0xc2, - 0xab, 0x1f, 0xc1, 0x9b, 0x47, 0x8e, 0x1c, 0xa5, 0x5c, 0x3c, 0xf2, 0x11, 0xcc, 0x4c, 0xdb, 0x85, - 0x2e, 0x31, 0x7a, 0xda, 0x79, 0xbf, 0xf7, 0x7b, 0xbf, 0xf9, 0xcd, 0x7b, 0x7d, 0x0b, 0xf7, 0xd2, - 0x51, 0x38, 0x88, 0x64, 0x98, 0x66, 0x52, 0xcb, 0x81, 0x1a, 0x71, 0xed, 0x0f, 0xfb, 0x36, 0xc0, - 0xed, 0x0a, 0xee, 0x2e, 0xd5, 0x48, 0xd5, 0xa1, 0xa0, 0x79, 0xaf, 0x60, 0xf1, 0xcd, 0x98, 0x25, - 0x5a, 0x44, 0x7c, 0xd7, 0x96, 0x6f, 0x33, 0x9d, 0x89, 0x13, 0xbc, 0x06, 0xad, 0x23, 0x16, 0x8d, - 0xb9, 0x72, 0x51, 0xaf, 0xb1, 0xdc, 0x59, 0x21, 0xfd, 0x49, 0x61, 0x9d, 0xff, 0x96, 0xfb, 0x5a, - 0x66, 0xb4, 0x64, 0x7b, 0x3b, 0xd3, 0x7a, 0x45, 0x1e, 0xaf, 0xc3, 0x8c, 0x62, 0x71, 0x1a, 0xfd, - 0x5d, 0x70, 0xd7, 0xd2, 0x68, 0x45, 0xf7, 0x3e, 0xa1, 0x69, 0xc9, 0x82, 0x81, 0x1f, 0x01, 0x3a, - 0x74, 0x51, 0x0f, 0x2d, 0x77, 0x56, 0xdc, 0x3f, 0x89, 0x51, 0x74, 0x88, 0x1f, 0xc0, 0x9c, 0x16, - 0x31, 0x57, 0x9a, 0xc5, 0xe9, 0xfb, 0x58, 0xb9, 0xff, 0xf5, 0xd0, 0x72, 0x83, 0x76, 0x26, 0xd8, - 0xb6, 0xc2, 0x4f, 0xa1, 0x15, 0x73, 0x9d, 0x09, 0xdf, 0x6d, 0x58, 0x73, 0x77, 0xae, 0xf4, 0x5e, - 0xb2, 0x03, 0x1e, 0xed, 0x30, 0x91, 0xd1, 0x92, 0xe2, 0x85, 0xb0, 0x50, 0xbf, 0x04, 0x3f, 0x83, - 0x19, 0x1d, 0x88, 0x90, 0x2b, 0x5d, 0xfa, 0xb9, 0x7d, 0x55, 0xbf, 0xf7, 0xdc, 0x26, 0xb6, 0x1c, - 0x5a, 0x71, 0xf0, 0x7d, 0x68, 0x07, 0x41, 0x31, 0x2c, 0x6b, 0x66, 0x6e, 0xcb, 0xa1, 0x13, 0x64, - 0xa3, 0x0d, 0xad, 0xe2, 0xe4, 0x7d, 0x47, 0x30, 0x53, 0x96, 0xe3, 0x5b, 0xd0, 0x88, 0x45, 0x62, - 0xe5, 0x11, 0x35, 0x47, 0x8b, 0xb0, 0x13, 0x2b, 0x60, 0x10, 0x76, 0x82, 0x7b, 0xd0, 0xf1, 0x65, - 0x9c, 0x66, 0x5c, 0x29, 0x21, 0x13, 0xb7, 0x61, 0x33, 0xd7, 0x21, 0xbc, 0x0e, 0xb3, 0x69, 0x26, - 0x7d, 0xae, 0x14, 0x0f, 0xdc, 0xa6, 0x7d, 0x6a, 0xf7, 0x86, 0xd5, 0xfe, 0x26, 0x4f, 0x74, 0x26, - 0x45, 0x40, 0xaf, 0xc8, 0xdd, 0x35, 0x68, 0x57, 0x30, 0xc6, 0xd0, 0x8c, 0x39, 0xab, 0xcc, 0xd8, - 0x33, 0xbe, 0x0b, 0xad, 0x63, 0x2e, 0xc2, 0xa1, 0x2e, 0x0d, 0x95, 0x91, 0xf7, 0x0e, 0x16, 0x36, - 0xe5, 0x38, 0xd1, 0xdb, 0x22, 0x29, 0x9b, 0xb5, 0x08, 0xff, 0x07, 0x3c, 0xd5, 0x43, 0x5b, 0x3e, - 0x4f, 0x8b, 0xc0, 0xa0, 0xc7, 0x22, 0xd0, 0x45, 0x43, 0xe6, 0x69, 0x11, 0xe0, 0x2e, 0xb4, 0x7d, - 0x53, 0xcd, 0x33, 0x65, 0x27, 0x33, 0x4f, 0x27, 0xb1, 0xf7, 0x0d, 0x41, 0x73, 0x4f, 0xa6, 0x2f, - 0xf0, 0x13, 0x68, 0xf8, 0xb1, 0xba, 0xf9, 0x25, 0xd4, 0xef, 0xa5, 0x86, 0x84, 0x1f, 0x43, 0x33, - 0x12, 0xca, 0x98, 0x9c, 0x1a, 0xb3, 0x51, 0xea, 0xdb, 0x31, 0x5b, 0x82, 0xe9, 0xe5, 0xf0, 0x43, - 0xca, 0xb3, 0x48, 0x86, 0x91, 0x0c, 0x6d, 0x2f, 0xe7, 0xe8, 0x75, 0xa8, 0xbb, 0x02, 0x4d, 0xc3, - 0x37, 0xce, 0xf9, 0x11, 0x4f, 0x8a, 0xd1, 0xcf, 0xd2, 0x22, 0x30, 0xa8, 0x75, 0x5a, 0xbd, 0xc7, - 0x06, 0xde, 0x17, 0x04, 0x60, 0x6e, 0x2a, 0x97, 0x6c, 0x75, 0x6a, 0xc9, 0x96, 0xea, 0x7e, 0x0a, - 0x56, 0xbf, 0xbe, 0x61, 0xdd, 0xd7, 0xd0, 0x2a, 0x77, 0xca, 0x83, 0xa6, 0x96, 0xe9, 0xa8, 0x7c, - 0xf9, 0x42, 0xbd, 0x98, 0xda, 0xdc, 0x3f, 0x7c, 0xfc, 0x1b, 0xfb, 0xa7, 0xe7, 0xc4, 0x39, 0x3b, - 0x27, 0xce, 0xe5, 0x39, 0x41, 0x1f, 0x73, 0x82, 0xbe, 0xe6, 0x04, 0xfd, 0xc8, 0x09, 0x3a, 0xcd, - 0x09, 0xfa, 0x99, 0x13, 0xf4, 0x2b, 0x27, 0xce, 0x65, 0x4e, 0xd0, 0xe7, 0x0b, 0xe2, 0x9c, 0x5e, - 0x10, 0xe7, 0xec, 0x82, 0x38, 0xfb, 0x0f, 0x43, 0xa1, 0x87, 0xe3, 0x83, 0xbe, 0x2f, 0xe3, 0x41, - 0x98, 0xb1, 0x43, 0x96, 0xb0, 0x41, 0x24, 0x47, 0x62, 0x70, 0xfd, 0xdf, 0xe6, 0xa0, 0x65, 0x7f, - 0x56, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x24, 0x9c, 0x74, 0xb7, 0xa9, 0x04, 0x00, 0x00, + 0x14, 0xef, 0xfc, 0x77, 0xff, 0xcb, 0xf2, 0x16, 0x88, 0x8e, 0xc4, 0xd4, 0xc5, 0x4c, 0xd6, 0x1e, + 0x84, 0x68, 0xdc, 0x4d, 0x20, 0x21, 0x9c, 0xc1, 0x03, 0x89, 0xa2, 0x38, 0x10, 0x63, 0x4c, 0x8c, + 0x19, 0xda, 0xa1, 0x3b, 0xd9, 0xb6, 0xd3, 0x74, 0x66, 0x01, 0x6f, 0x7e, 0x02, 0x63, 0xfc, 0x14, + 0x5e, 0xfd, 0x08, 0xde, 0x3c, 0x72, 0xe4, 0x28, 0xe5, 0xe2, 0x91, 0x8f, 0x60, 0x66, 0xda, 0x2e, + 0x74, 0x89, 0xd1, 0xd3, 0xce, 0xfb, 0xbd, 0xdf, 0xfb, 0xcd, 0x6f, 0xde, 0xeb, 0x5b, 0xb8, 0x97, + 0x8e, 0xc2, 0x41, 0x24, 0xc3, 0x34, 0x93, 0x5a, 0x0e, 0xd4, 0x88, 0x6b, 0x7f, 0xd8, 0xb7, 0x01, + 0x6e, 0x57, 0x70, 0x77, 0xa9, 0x46, 0xaa, 0x0e, 0x05, 0xcd, 0x7b, 0x01, 0x8b, 0xaf, 0xc6, 0x2c, + 0xd1, 0x22, 0xe2, 0x7b, 0xb6, 0x7c, 0x87, 0xe9, 0x4c, 0x9c, 0xe0, 0x75, 0x68, 0x1d, 0xb1, 0x68, + 0xcc, 0x95, 0x8b, 0x7a, 0x8d, 0x95, 0xce, 0x2a, 0xe9, 0x4f, 0x0a, 0xeb, 0xfc, 0xd7, 0xdc, 0xd7, + 0x32, 0xa3, 0x25, 0xdb, 0xdb, 0x9d, 0xd6, 0x2b, 0xf2, 0x78, 0x03, 0x66, 0x14, 0x8b, 0xd3, 0xe8, + 0xef, 0x82, 0x7b, 0x96, 0x46, 0x2b, 0xba, 0xf7, 0x09, 0x4d, 0x4b, 0x16, 0x0c, 0xfc, 0x10, 0xd0, + 0xa1, 0x8b, 0x7a, 0x68, 0xa5, 0xb3, 0xea, 0xfe, 0x49, 0x8c, 0xa2, 0x43, 0xfc, 0x00, 0xe6, 0xb4, + 0x88, 0xb9, 0xd2, 0x2c, 0x4e, 0xdf, 0xc7, 0xca, 0xfd, 0xaf, 0x87, 0x56, 0x1a, 0xb4, 0x33, 0xc1, + 0x76, 0x14, 0x7e, 0x0c, 0xad, 0x98, 0xeb, 0x4c, 0xf8, 0x6e, 0xc3, 0x9a, 0xbb, 0x73, 0xa5, 0xf7, + 0x9c, 0x1d, 0xf0, 0x68, 0x97, 0x89, 0x8c, 0x96, 0x14, 0x2f, 0x84, 0x85, 0xfa, 0x25, 0xf8, 0x09, + 0xcc, 0xe8, 0x40, 0x84, 0x5c, 0xe9, 0xd2, 0xcf, 0xed, 0xab, 0xfa, 0xfd, 0xa7, 0x36, 0xb1, 0xed, + 0xd0, 0x8a, 0x83, 0xef, 0x43, 0x3b, 0x08, 0x8a, 0x61, 0x59, 0x33, 0x73, 0xdb, 0x0e, 0x9d, 0x20, + 0x9b, 0x6d, 0x68, 0x15, 0x27, 0xef, 0x3b, 0x82, 0x99, 0xb2, 0x1c, 0xdf, 0x82, 0x46, 0x2c, 0x12, + 0x2b, 0x8f, 0xa8, 0x39, 0x5a, 0x84, 0x9d, 0x58, 0x01, 0x83, 0xb0, 0x13, 0xdc, 0x83, 0x8e, 0x2f, + 0xe3, 0x34, 0xe3, 0x4a, 0x09, 0x99, 0xb8, 0x0d, 0x9b, 0xb9, 0x0e, 0xe1, 0x0d, 0x98, 0x4d, 0x33, + 0xe9, 0x73, 0xa5, 0x78, 0xe0, 0x36, 0xed, 0x53, 0xbb, 0x37, 0xac, 0xf6, 0xb7, 0x78, 0xa2, 0x33, + 0x29, 0x02, 0x7a, 0x45, 0xee, 0xae, 0x43, 0xbb, 0x82, 0x31, 0x86, 0x66, 0xcc, 0x59, 0x65, 0xc6, + 0x9e, 0xf1, 0x5d, 0x68, 0x1d, 0x73, 0x11, 0x0e, 0x75, 0x69, 0xa8, 0x8c, 0xbc, 0x37, 0xb0, 0xb0, + 0x25, 0xc7, 0x89, 0xde, 0x11, 0x49, 0xd9, 0xac, 0x45, 0xf8, 0x3f, 0xe0, 0xa9, 0x1e, 0xda, 0xf2, + 0x79, 0x5a, 0x04, 0x06, 0x3d, 0x16, 0x81, 0x2e, 0x1a, 0x32, 0x4f, 0x8b, 0x00, 0x77, 0xa1, 0xed, + 0x9b, 0x6a, 0x9e, 0x29, 0x3b, 0x99, 0x79, 0x3a, 0x89, 0xbd, 0x6f, 0x08, 0x9a, 0xfb, 0x32, 0x7d, + 0x86, 0x1f, 0x41, 0xc3, 0x8f, 0xd5, 0xcd, 0x2f, 0xa1, 0x7e, 0x2f, 0x35, 0x24, 0xbc, 0x0c, 0xcd, + 0x48, 0x28, 0x63, 0x72, 0x6a, 0xcc, 0x46, 0xa9, 0x6f, 0xc7, 0x6c, 0x09, 0xa6, 0x97, 0xc3, 0x0f, + 0x29, 0xcf, 0x22, 0x19, 0x46, 0x32, 0xb4, 0xbd, 0x9c, 0xa3, 0xd7, 0xa1, 0xee, 0x2a, 0x34, 0x0d, + 0xdf, 0x38, 0xe7, 0x47, 0x3c, 0x29, 0x46, 0x3f, 0x4b, 0x8b, 0xc0, 0xa0, 0xd6, 0x69, 0xf5, 0x1e, + 0x1b, 0x78, 0x5f, 0x10, 0x80, 0xb9, 0xa9, 0x5c, 0xb2, 0xb5, 0xa9, 0x25, 0x5b, 0xaa, 0xfb, 0x29, + 0x58, 0xfd, 0xfa, 0x86, 0x75, 0x5f, 0x42, 0xab, 0xdc, 0x29, 0x0f, 0x9a, 0x5a, 0xa6, 0xa3, 0xf2, + 0xe5, 0x0b, 0xf5, 0x62, 0x6a, 0x73, 0xff, 0xf0, 0xf1, 0x6f, 0xbe, 0x3b, 0x3d, 0x27, 0xce, 0xd9, + 0x39, 0x71, 0x2e, 0xcf, 0x09, 0xfa, 0x98, 0x13, 0xf4, 0x35, 0x27, 0xe8, 0x47, 0x4e, 0xd0, 0x69, + 0x4e, 0xd0, 0xcf, 0x9c, 0xa0, 0x5f, 0x39, 0x71, 0x2e, 0x73, 0x82, 0x3e, 0x5f, 0x10, 0xe7, 0xf4, + 0x82, 0x38, 0x67, 0x17, 0xc4, 0x79, 0xbb, 0x1c, 0x0a, 0x3d, 0x1c, 0x1f, 0xf4, 0x7d, 0x19, 0x0f, + 0xc2, 0x8c, 0x1d, 0xb2, 0x84, 0x0d, 0x22, 0x39, 0x12, 0x83, 0xa3, 0xb5, 0xc1, 0xf5, 0x3f, 0x9c, + 0x83, 0x96, 0xfd, 0x59, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x9e, 0x82, 0x05, 0x2f, 0xac, 0x04, + 0x00, 0x00, } func (this *QuantileSketchMatrix) Equal(that interface{}) bool { diff --git a/pkg/logproto/sketch.proto b/pkg/logproto/sketch.proto index d8ffeb0110340..e551716db3742 100644 --- a/pkg/logproto/sketch.proto +++ b/pkg/logproto/sketch.proto @@ -4,7 +4,7 @@ package logproto; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/logproto"; +option go_package = "github.com/grafana/loki/v3/pkg/logproto"; message QuantileSketchMatrix { repeated QuantileSketchVector values = 1; diff --git a/pkg/logql/accumulator.go b/pkg/logql/accumulator.go index 9e9784cb037ef..613a99fc43fd9 100644 --- a/pkg/logql/accumulator.go +++ b/pkg/logql/accumulator.go @@ -7,12 +7,12 @@ import ( "sort" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/util/math" ) // NewBufferedAccumulator returns an accumulator which aggregates all query diff --git a/pkg/logql/accumulator_test.go b/pkg/logql/accumulator_test.go index d827e3ea02e71..b9b8b86760d04 100644 --- a/pkg/logql/accumulator_test.go +++ b/pkg/logql/accumulator_test.go @@ -10,9 +10,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestAccumulatedStreams(t *testing.T) { diff --git a/pkg/logql/blocker.go b/pkg/logql/blocker.go index 9a07113c40dd3..eaa6e1d7b1291 100644 --- a/pkg/logql/blocker.go +++ b/pkg/logql/blocker.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/regexp" - "github.com/grafana/loki/pkg/util" - logutil "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + logutil "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) type queryBlocker struct { diff --git a/pkg/logql/blocker_test.go b/pkg/logql/blocker_test.go index 9fa586a02db80..c39d77c074031 100644 --- a/pkg/logql/blocker_test.go +++ b/pkg/logql/blocker_test.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) func TestEngine_ExecWithBlockedQueries(t *testing.T) { diff --git a/pkg/logql/downstream.go b/pkg/logql/downstream.go index 5dea1144d9a18..eea9b28d3059b 100644 --- a/pkg/logql/downstream.go +++ b/pkg/logql/downstream.go @@ -10,14 +10,14 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) /* diff --git a/pkg/logql/downstream_test.go b/pkg/logql/downstream_test.go index 9dbf261668a40..fa179502d6b7b 100644 --- a/pkg/logql/downstream_test.go +++ b/pkg/logql/downstream_test.go @@ -13,9 +13,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var nilShardMetrics = NewShardMapperMetrics(nil) diff --git a/pkg/logql/engine.go b/pkg/logql/engine.go index a9f3dabe14eed..fd89591794359 100644 --- a/pkg/logql/engine.go +++ b/pkg/logql/engine.go @@ -13,7 +13,7 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -26,18 +26,18 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - logutil "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + logutil "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 1391b40ff4248..2e354bdf5b8d3 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -11,9 +11,9 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/logqlmodel/metadata" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/logqlmodel/metadata" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" "github.com/go-kit/log" "github.com/grafana/dskit/user" @@ -24,13 +24,13 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) var ( diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 903c4a5555b2d..eb7958691ac64 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/prometheus/promql" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/util" ) type QueryRangeType string diff --git a/pkg/logql/evaluator_test.go b/pkg/logql/evaluator_test.go index e31d587252066..7a9eae4e3506f 100644 --- a/pkg/logql/evaluator_test.go +++ b/pkg/logql/evaluator_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestDefaultEvaluator_DivideByZero(t *testing.T) { diff --git a/pkg/logql/explain_test.go b/pkg/logql/explain_test.go index 84364b633c228..d6984683aec1b 100644 --- a/pkg/logql/explain_test.go +++ b/pkg/logql/explain_test.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestExplain(t *testing.T) { diff --git a/pkg/logql/limits.go b/pkg/logql/limits.go index 9075d9320ca43..f9742dac53ef0 100644 --- a/pkg/logql/limits.go +++ b/pkg/logql/limits.go @@ -5,7 +5,7 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util/validation" ) var ( diff --git a/pkg/logql/log/drop_labels.go b/pkg/logql/log/drop_labels.go index 7e6b5e0b14155..0f0fcdee942f0 100644 --- a/pkg/logql/log/drop_labels.go +++ b/pkg/logql/log/drop_labels.go @@ -3,7 +3,7 @@ package log import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type DropLabels struct { diff --git a/pkg/logql/log/drop_labels_test.go b/pkg/logql/log/drop_labels_test.go index 9eee5f55dd201..bce8487fd93a7 100644 --- a/pkg/logql/log/drop_labels_test.go +++ b/pkg/logql/log/drop_labels_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_DropLabels(t *testing.T) { diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 8a9a162828638..dbe5c5e99ce29 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log/pattern" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/util" ) // LineMatchType is an enum for line matching types. diff --git a/pkg/logql/log/fmt.go b/pkg/logql/log/fmt.go index 34a1bb32c5e48..c69aa3d40bb01 100644 --- a/pkg/logql/log/fmt.go +++ b/pkg/logql/log/fmt.go @@ -13,7 +13,7 @@ import ( "github.com/Masterminds/sprig/v3" "github.com/grafana/regexp" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const ( diff --git a/pkg/logql/log/fmt_test.go b/pkg/logql/log/fmt_test.go index 637caec29a469..2028d2e00bf8f 100644 --- a/pkg/logql/log/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_lineFormatter_Format(t *testing.T) { diff --git a/pkg/logql/log/keep_labels.go b/pkg/logql/log/keep_labels.go index 43ed2ab666abd..67c93ecca8fd2 100644 --- a/pkg/logql/log/keep_labels.go +++ b/pkg/logql/log/keep_labels.go @@ -3,7 +3,7 @@ package log import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type KeepLabels struct { diff --git a/pkg/logql/log/keep_labels_test.go b/pkg/logql/log/keep_labels_test.go index 3f502e76c901c..11d70f0ac6549 100644 --- a/pkg/logql/log/keep_labels_test.go +++ b/pkg/logql/log/keep_labels_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func Test_KeepLabels(t *testing.T) { diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go index e7e10b404d612..49e8cbf092378 100644 --- a/pkg/logql/log/label_filter.go +++ b/pkg/logql/log/label_filter.go @@ -10,7 +10,7 @@ import ( "github.com/dustin/go-humanize" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var ( diff --git a/pkg/logql/log/label_filter_test.go b/pkg/logql/log/label_filter_test.go index 3a2e2480c33f2..b6364dc0c3fd0 100644 --- a/pkg/logql/log/label_filter_test.go +++ b/pkg/logql/log/label_filter_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestBinary_Filter(t *testing.T) { diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index d93af8a845e83..c68fe1af0e5b5 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const MaxInternedStrings = 1024 diff --git a/pkg/logql/log/labels_test.go b/pkg/logql/log/labels_test.go index e42a330dcbc3a..97c9a8899c223 100644 --- a/pkg/logql/log/labels_test.go +++ b/pkg/logql/log/labels_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestLabelsBuilder_Get(t *testing.T) { diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index afdc7f91a4f9d..9a5ae1395069c 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -8,10 +8,10 @@ import ( "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logql/log/jsonexpr" - "github.com/grafana/loki/pkg/logql/log/logfmt" - "github.com/grafana/loki/pkg/logql/log/pattern" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log/jsonexpr" + "github.com/grafana/loki/v3/pkg/logql/log/logfmt" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/logqlmodel" "github.com/grafana/regexp" jsoniter "github.com/json-iterator/go" diff --git a/pkg/logql/log/parser_hints.go b/pkg/logql/log/parser_hints.go index 3fd4cff2b3323..32a789250df94 100644 --- a/pkg/logql/log/parser_hints.go +++ b/pkg/logql/log/parser_hints.go @@ -3,7 +3,7 @@ package log import ( "strings" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func NoParserHints() ParserHint { diff --git a/pkg/logql/log/parser_hints_test.go b/pkg/logql/log/parser_hints_test.go index 42d0134bc1d8f..96bfc15b38639 100644 --- a/pkg/logql/log/parser_hints_test.go +++ b/pkg/logql/log/parser_hints_test.go @@ -4,12 +4,12 @@ package log_test import ( "testing" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) var ( diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index f8cf6373a152f..3e5de0f709418 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -5,7 +5,7 @@ import ( "sort" "testing" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" diff --git a/pkg/logql/log/pipeline_test.go b/pkg/logql/log/pipeline_test.go index 9b2aff1332d47..ffa5df0d50b98 100644 --- a/pkg/logql/log/pipeline_test.go +++ b/pkg/logql/log/pipeline_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestNoopPipeline(t *testing.T) { diff --git a/pkg/logql/mapper_metrics.go b/pkg/logql/mapper_metrics.go index 4ec8cb8454c78..33cc406dd1ab3 100644 --- a/pkg/logql/mapper_metrics.go +++ b/pkg/logql/mapper_metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // expression type used in metrics diff --git a/pkg/logql/matchers.go b/pkg/logql/matchers.go index 2fbf14573069c..f0bdef46a9d41 100644 --- a/pkg/logql/matchers.go +++ b/pkg/logql/matchers.go @@ -4,7 +4,7 @@ import ( "github.com/pkg/errors" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) // MatchForSeriesRequest extracts and parses multiple matcher groups from a slice of strings. diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index ee23dc8f3f5fa..e9921a07c2944 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -15,16 +15,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - logql_stats "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + logql_stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/logql/metrics_test.go b/pkg/logql/metrics_test.go index c08844eabeabc..44094e27f5d4b 100644 --- a/pkg/logql/metrics_test.go +++ b/pkg/logql/metrics_test.go @@ -15,13 +15,13 @@ import ( "github.com/stretchr/testify/require" "github.com/uber/jaeger-client-go" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestQueryType(t *testing.T) { diff --git a/pkg/logql/optimize.go b/pkg/logql/optimize.go index 9b885b0fd229c..ef930be799664 100644 --- a/pkg/logql/optimize.go +++ b/pkg/logql/optimize.go @@ -1,6 +1,6 @@ package logql -import "github.com/grafana/loki/pkg/logql/syntax" +import "github.com/grafana/loki/v3/pkg/logql/syntax" // optimizeSampleExpr Attempt to optimize the SampleExpr to another that will run faster but will produce the same result. func optimizeSampleExpr(expr syntax.SampleExpr) (syntax.SampleExpr, error) { diff --git a/pkg/logql/optimize_test.go b/pkg/logql/optimize_test.go index b4005e6d1f158..a457f180d4fa3 100644 --- a/pkg/logql/optimize_test.go +++ b/pkg/logql/optimize_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func Test_optimizeSampleExpr(t *testing.T) { diff --git a/pkg/logql/quantile_over_time_sketch.go b/pkg/logql/quantile_over_time_sketch.go index 24a8a05d89ede..42288830c2ddc 100644 --- a/pkg/logql/quantile_over_time_sketch.go +++ b/pkg/logql/quantile_over_time_sketch.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/promql" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const ( diff --git a/pkg/logql/quantile_over_time_sketch_test.go b/pkg/logql/quantile_over_time_sketch_test.go index 488ebdec26f06..5692575bd2904 100644 --- a/pkg/logql/quantile_over_time_sketch_test.go +++ b/pkg/logql/quantile_over_time_sketch_test.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func TestProbabilisticMQuantileMatrixSerialization(t *testing.T) { diff --git a/pkg/logql/range_vector.go b/pkg/logql/range_vector.go index 484949718f090..44a8651577549 100644 --- a/pkg/logql/range_vector.go +++ b/pkg/logql/range_vector.go @@ -11,9 +11,9 @@ import ( "github.com/prometheus/prometheus/promql" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logql/vector" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/vector" ) // BatchRangeVectorAggregator aggregates samples for a given range of samples. diff --git a/pkg/logql/range_vector_test.go b/pkg/logql/range_vector_test.go index c7176bed2ab90..fb28ea5c9c0c9 100644 --- a/pkg/logql/range_vector_test.go +++ b/pkg/logql/range_vector_test.go @@ -13,11 +13,11 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logql/vector" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/vector" ) var samples = []logproto.Sample{ diff --git a/pkg/logql/rangemapper.go b/pkg/logql/rangemapper.go index f898e19d2ea1e..bec1711226109 100644 --- a/pkg/logql/rangemapper.go +++ b/pkg/logql/rangemapper.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logql/syntax" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var splittableVectorOp = map[string]struct{}{ diff --git a/pkg/logql/rangemapper_test.go b/pkg/logql/rangemapper_test.go index 5e95486a8c8e2..5365c7b2b73f0 100644 --- a/pkg/logql/rangemapper_test.go +++ b/pkg/logql/rangemapper_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func Test_SplitRangeInterval(t *testing.T) { diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index 3095fc0a1aafd..fbd0dbaa83ebb 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -7,9 +7,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 9f5757b7d8eed..355f839bac55f 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestShardedStringer(t *testing.T) { diff --git a/pkg/logql/shards.go b/pkg/logql/shards.go index 9265dac5f0e8a..75281aa3c95b3 100644 --- a/pkg/logql/shards.go +++ b/pkg/logql/shards.go @@ -7,13 +7,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type Shards []Shard diff --git a/pkg/logql/shards_test.go b/pkg/logql/shards_test.go index 1a2d78889cc5a..d9f5b62ebb178 100644 --- a/pkg/logql/shards_test.go +++ b/pkg/logql/shards_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestShardString(t *testing.T) { diff --git a/pkg/logql/sketch/quantile.go b/pkg/logql/sketch/quantile.go index 3b8b0f22fc8e0..093923a591366 100644 --- a/pkg/logql/sketch/quantile.go +++ b/pkg/logql/sketch/quantile.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/sketches-go/ddsketch/store" "github.com/influxdata/tdigest" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // QuantileSketch estimates quantiles over time. diff --git a/pkg/logql/sketch/series.go b/pkg/logql/sketch/series.go index 684e3080d6df5..0f434bd341c52 100644 --- a/pkg/logql/sketch/series.go +++ b/pkg/logql/sketch/series.go @@ -3,7 +3,7 @@ package sketch import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ValueTypeTopKMatrix = "topk_matrix" diff --git a/pkg/logql/sketch/topk.go b/pkg/logql/sketch/topk.go index 021ab632ab552..e5efad409727b 100644 --- a/pkg/logql/sketch/topk.go +++ b/pkg/logql/sketch/topk.go @@ -6,7 +6,7 @@ import ( "sort" "unsafe" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/axiomhq/hyperloglog" "github.com/go-kit/log" diff --git a/pkg/logql/syntax/ast.go b/pkg/logql/syntax/ast.go index 78b6330809e5d..b0649570e8334 100644 --- a/pkg/logql/syntax/ast.go +++ b/pkg/logql/syntax/ast.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" "github.com/pkg/errors" "github.com/prometheus/common/model" @@ -17,8 +17,8 @@ import ( "github.com/grafana/regexp/syntax" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr diff --git a/pkg/logql/syntax/ast_test.go b/pkg/logql/syntax/ast_test.go index ce2dca62f9d43..2ba435e0fe2de 100644 --- a/pkg/logql/syntax/ast_test.go +++ b/pkg/logql/syntax/ast_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) var labelBar, _ = ParseLabels("{app=\"bar\"}") diff --git a/pkg/logql/syntax/clone.go b/pkg/logql/syntax/clone.go index a93aa53d599ac..d047218b0b607 100644 --- a/pkg/logql/syntax/clone.go +++ b/pkg/logql/syntax/clone.go @@ -3,7 +3,7 @@ package syntax import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type cloneVisitor struct { diff --git a/pkg/logql/syntax/clone_test.go b/pkg/logql/syntax/clone_test.go index 58dc6efb03e2c..cfed2134c6bbc 100644 --- a/pkg/logql/syntax/clone_test.go +++ b/pkg/logql/syntax/clone_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) func TestClone(t *testing.T) { diff --git a/pkg/logql/syntax/expr.y b/pkg/logql/syntax/expr.y index 0386406a87f81..7f443831159bb 100644 --- a/pkg/logql/syntax/expr.y +++ b/pkg/logql/syntax/expr.y @@ -4,7 +4,7 @@ package syntax import ( "time" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) %} diff --git a/pkg/logql/syntax/expr.y.go b/pkg/logql/syntax/expr.y.go index 48ba393ad5e2a..2d322514a75fc 100644 --- a/pkg/logql/syntax/expr.y.go +++ b/pkg/logql/syntax/expr.y.go @@ -5,7 +5,7 @@ package syntax import __yyfmt__ "fmt" import ( - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" "github.com/prometheus/prometheus/model/labels" "time" ) diff --git a/pkg/logql/syntax/extractor.go b/pkg/logql/syntax/extractor.go index 922cd25ce9a70..8d79202968da0 100644 --- a/pkg/logql/syntax/extractor.go +++ b/pkg/logql/syntax/extractor.go @@ -4,7 +4,7 @@ import ( "fmt" "sort" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) const UnsupportedErr = "unsupported range vector aggregation operation: %s" diff --git a/pkg/logql/syntax/lex.go b/pkg/logql/syntax/lex.go index dc806c921090e..dffb05ab0189b 100644 --- a/pkg/logql/syntax/lex.go +++ b/pkg/logql/syntax/lex.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/util/strutil" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) var tokens = map[string]int{ diff --git a/pkg/logql/syntax/linefilter.go b/pkg/logql/syntax/linefilter.go index 9b07e95deb12d..e48c847d79a6f 100644 --- a/pkg/logql/syntax/linefilter.go +++ b/pkg/logql/syntax/linefilter.go @@ -1,8 +1,8 @@ package syntax import ( - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Binary encoding of the LineFilter diff --git a/pkg/logql/syntax/linefilter_test.go b/pkg/logql/syntax/linefilter_test.go index 55fc0fc39179c..129aaac9d8a8f 100644 --- a/pkg/logql/syntax/linefilter_test.go +++ b/pkg/logql/syntax/linefilter_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) func TestLineFilterSerialization(t *testing.T) { diff --git a/pkg/logql/syntax/parser.go b/pkg/logql/syntax/parser.go index 79213049f376c..524c86109afb4 100644 --- a/pkg/logql/syntax/parser.go +++ b/pkg/logql/syntax/parser.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/logql/syntax/parser_test.go b/pkg/logql/syntax/parser_test.go index faa55015e5838..3851013f4be92 100644 --- a/pkg/logql/syntax/parser_test.go +++ b/pkg/logql/syntax/parser_test.go @@ -9,8 +9,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) func NewStringLabelFilter(s string) *string { diff --git a/pkg/logql/syntax/serialize.go b/pkg/logql/syntax/serialize.go index 84af7e803d0d3..4e4362683543e 100644 --- a/pkg/logql/syntax/serialize.go +++ b/pkg/logql/syntax/serialize.go @@ -8,7 +8,7 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" ) type JSONSerializer struct { diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 8154b18fb691a..d141f39bf0778 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -14,11 +14,11 @@ import ( "github.com/prometheus/prometheus/model/labels" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier { diff --git a/pkg/logqlanalyzer/analyzer.go b/pkg/logqlanalyzer/analyzer.go index 5d32680744289..ceff726507f1b 100644 --- a/pkg/logqlanalyzer/analyzer.go +++ b/pkg/logqlanalyzer/analyzer.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type logQLAnalyzer struct { diff --git a/pkg/logqlanalyzer/http.go b/pkg/logqlanalyzer/http.go index 6f1324348892a..c3cff9763a3f8 100644 --- a/pkg/logqlanalyzer/http.go +++ b/pkg/logqlanalyzer/http.go @@ -9,7 +9,7 @@ import ( "github.com/go-kit/log/level" "github.com/gorilla/mux" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func CorsMiddleware() mux.MiddlewareFunc { diff --git a/pkg/logqlmodel/logqlmodel.go b/pkg/logqlmodel/logqlmodel.go index 8ba0e198c403a..9de0d5be9f756 100644 --- a/pkg/logqlmodel/logqlmodel.go +++ b/pkg/logqlmodel/logqlmodel.go @@ -3,10 +3,11 @@ package logqlmodel import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/push" + + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) // ValueTypeStreams promql.ValueType for log streams diff --git a/pkg/logqlmodel/metadata/context.go b/pkg/logqlmodel/metadata/context.go index b819893679a4f..f4d7dca265da1 100644 --- a/pkg/logqlmodel/metadata/context.go +++ b/pkg/logqlmodel/metadata/context.go @@ -10,7 +10,7 @@ import ( "sort" "sync" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" ) type ( diff --git a/pkg/logqlmodel/metadata/context_test.go b/pkg/logqlmodel/metadata/context_test.go index 256abdb18ef77..2f4e3316ece13 100644 --- a/pkg/logqlmodel/metadata/context_test.go +++ b/pkg/logqlmodel/metadata/context_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" ) func TestHeaders(t *testing.T) { diff --git a/pkg/logqlmodel/stats/context_test.go b/pkg/logqlmodel/stats/context_test.go index b7e37e311718c..55f5b93c70b05 100644 --- a/pkg/logqlmodel/stats/context_test.go +++ b/pkg/logqlmodel/stats/context_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestResult(t *testing.T) { diff --git a/pkg/logqlmodel/stats/stats.pb.go b/pkg/logqlmodel/stats/stats.pb.go index facdcb2a910e9..9a728c1612671 100644 --- a/pkg/logqlmodel/stats/stats.pb.go +++ b/pkg/logqlmodel/stats/stats.pb.go @@ -864,92 +864,92 @@ func init() { func init() { proto.RegisterFile("pkg/logqlmodel/stats/stats.proto", fileDescriptor_6cdfe5d2aea33ebb) } var fileDescriptor_6cdfe5d2aea33ebb = []byte{ - // 1357 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0x4f, 0x6f, 0xdc, 0x44, - 0x14, 0xdf, 0xcd, 0xd6, 0x9b, 0x74, 0xf2, 0xaf, 0x9d, 0xa4, 0x74, 0x4b, 0x2b, 0x3b, 0x2c, 0x54, - 0x14, 0x81, 0x12, 0x15, 0x90, 0x10, 0x88, 0x4a, 0xc8, 0x29, 0x91, 0x2a, 0xa5, 0xa2, 0xbc, 0x80, - 0x40, 0x70, 0xf2, 0xda, 0x93, 0x5d, 0xab, 0x5e, 0x7b, 0x63, 0x8f, 0x4b, 0x23, 0x21, 0xc1, 0x47, - 0xe0, 0xce, 0x1d, 0x71, 0xe1, 0xc4, 0x89, 0x33, 0x97, 0x1e, 0x7b, 0xec, 0xc9, 0xa2, 0xdb, 0x0b, - 0xf2, 0xa9, 0x1f, 0x80, 0x03, 0x9a, 0x37, 0xb3, 0xf6, 0xd8, 0xeb, 0x4d, 0x73, 0x59, 0xcf, 0xfb, - 0xbd, 0xf7, 0x7b, 0x33, 0xf3, 0x66, 0xde, 0x7b, 0xb3, 0x64, 0x67, 0xf2, 0x70, 0xb8, 0x17, 0x44, - 0xc3, 0x93, 0x60, 0x1c, 0x79, 0x2c, 0xd8, 0x4b, 0xb8, 0xc3, 0x13, 0xf9, 0xbb, 0x3b, 0x89, 0x23, - 0x1e, 0x51, 0x03, 0x85, 0xd7, 0xb7, 0x87, 0xd1, 0x30, 0x42, 0x64, 0x4f, 0x8c, 0xa4, 0xb2, 0xff, - 0xdb, 0x12, 0xe9, 0x02, 0x4b, 0xd2, 0x80, 0xd3, 0x8f, 0xc9, 0x72, 0x92, 0x8e, 0xc7, 0x4e, 0x7c, - 0xda, 0x6b, 0xef, 0xb4, 0x6f, 0xad, 0xbe, 0xbf, 0xb1, 0x2b, 0xdd, 0x1c, 0x49, 0xd4, 0xde, 0x7c, - 0x92, 0x59, 0xad, 0x3c, 0xb3, 0x66, 0x66, 0x30, 0x1b, 0x08, 0xea, 0x49, 0xca, 0x62, 0x9f, 0xc5, - 0xbd, 0xa5, 0x0a, 0xf5, 0x4b, 0x89, 0x96, 0x54, 0x65, 0x06, 0xb3, 0x01, 0xbd, 0x43, 0x56, 0xfc, - 0x70, 0xc8, 0x12, 0xce, 0xe2, 0x5e, 0x07, 0xb9, 0x9b, 0x8a, 0x7b, 0x4f, 0xc1, 0xf6, 0x25, 0x45, - 0x2e, 0x0c, 0xa1, 0x18, 0xd1, 0x0f, 0x49, 0xd7, 0x75, 0xdc, 0x11, 0x4b, 0x7a, 0x17, 0x90, 0xbc, - 0xae, 0xc8, 0xfb, 0x08, 0xda, 0xeb, 0x8a, 0x6a, 0xa0, 0x11, 0x28, 0x5b, 0x7a, 0x9b, 0x18, 0x7e, - 0xe8, 0xb1, 0xc7, 0x3d, 0x03, 0x49, 0x6b, 0xc5, 0x8c, 0x1e, 0x7b, 0x5c, 0x72, 0xd0, 0x04, 0xe4, - 0xa7, 0xff, 0xeb, 0x05, 0xd2, 0xdd, 0x2f, 0xd8, 0xee, 0x28, 0x0d, 0x1f, 0xaa, 0x30, 0xad, 0xe9, - 0x53, 0x6a, 0x33, 0x0a, 0x13, 0x90, 0x9f, 0x72, 0xc2, 0xa5, 0xb3, 0x28, 0xfa, 0x84, 0x62, 0x67, - 0x31, 0x1e, 0x8c, 0x0a, 0x4b, 0x95, 0xb3, 0xa1, 0x38, 0xca, 0x06, 0xd4, 0x97, 0xee, 0x93, 0x55, - 0x34, 0x93, 0x67, 0xaa, 0x82, 0x52, 0xa5, 0x6e, 0x29, 0xaa, 0x6e, 0x08, 0xba, 0x40, 0x0f, 0xc8, - 0xda, 0xa3, 0x28, 0x48, 0xc7, 0x4c, 0x79, 0x31, 0x1a, 0xbc, 0x6c, 0x2b, 0x2f, 0x15, 0x4b, 0xa8, - 0x48, 0xc2, 0x4f, 0x22, 0x4e, 0x79, 0xb6, 0x9a, 0xee, 0x59, 0x7e, 0x74, 0x4b, 0xa8, 0x48, 0x62, - 0x53, 0x81, 0x33, 0x60, 0x81, 0x72, 0xb3, 0x7c, 0xd6, 0xa6, 0x34, 0x43, 0xd0, 0x05, 0xfa, 0x3d, - 0xd9, 0xf2, 0xc3, 0x84, 0x3b, 0x21, 0xbf, 0xcf, 0x78, 0xec, 0xbb, 0xca, 0xd9, 0x4a, 0x83, 0xb3, - 0xeb, 0xca, 0x59, 0x13, 0x01, 0x9a, 0xc0, 0xfe, 0x5f, 0x5d, 0xb2, 0xac, 0xd2, 0x84, 0x7e, 0x4d, - 0xae, 0x0e, 0x4e, 0x39, 0x4b, 0x1e, 0xc4, 0x91, 0xcb, 0x92, 0x84, 0x79, 0x0f, 0x58, 0x7c, 0xc4, - 0xdc, 0x28, 0xf4, 0xf0, 0xc2, 0x74, 0xec, 0xeb, 0x79, 0x66, 0x2d, 0x32, 0x81, 0x45, 0x0a, 0xe1, - 0x36, 0xf0, 0xc3, 0x46, 0xb7, 0x4b, 0xa5, 0xdb, 0x05, 0x26, 0xb0, 0x48, 0x41, 0xef, 0x91, 0x2d, - 0x1e, 0x71, 0x27, 0xb0, 0x2b, 0xd3, 0xe2, 0x9d, 0xeb, 0xd8, 0x57, 0x45, 0x10, 0x1a, 0xd4, 0xd0, - 0x04, 0x16, 0xae, 0x0e, 0x2b, 0x53, 0xe1, 0x1d, 0xd4, 0x5d, 0x55, 0xd5, 0xd0, 0x04, 0xd2, 0x5b, - 0x64, 0x85, 0x3d, 0x66, 0xee, 0x57, 0xfe, 0x98, 0xe1, 0xed, 0x6b, 0xdb, 0x6b, 0xa2, 0x00, 0xcc, - 0x30, 0x28, 0x46, 0xf4, 0x5d, 0x72, 0xf1, 0x24, 0x65, 0x29, 0x43, 0xd3, 0x2e, 0x9a, 0xae, 0xe7, - 0x99, 0x55, 0x82, 0x50, 0x0e, 0xe9, 0x2e, 0x21, 0x49, 0x3a, 0x90, 0xa5, 0x27, 0xc1, 0x7b, 0xd4, - 0xb1, 0x37, 0xf2, 0xcc, 0xd2, 0x50, 0xd0, 0xc6, 0xf4, 0x90, 0x6c, 0xe3, 0xea, 0x3e, 0x0f, 0xb9, - 0xbc, 0x8e, 0x3c, 0x8d, 0x43, 0xe6, 0xe1, 0xa5, 0xe9, 0xd8, 0xbd, 0x3c, 0xb3, 0x1a, 0xf5, 0xd0, - 0x88, 0xd2, 0x3e, 0xe9, 0x26, 0x93, 0xc0, 0xe7, 0x49, 0xef, 0x22, 0xf2, 0x89, 0xc8, 0x5f, 0x89, - 0x80, 0xfa, 0xa2, 0xcd, 0xc8, 0x89, 0xbd, 0xa4, 0x47, 0x34, 0x1b, 0x44, 0x40, 0x7d, 0x8b, 0x55, - 0x3d, 0x88, 0x12, 0x7e, 0xe0, 0x07, 0x9c, 0xc5, 0x18, 0xbd, 0xde, 0x6a, 0x6d, 0x55, 0x35, 0x3d, - 0x34, 0xa2, 0xf4, 0x27, 0x72, 0x13, 0xf1, 0x23, 0x1e, 0xa7, 0x2e, 0x4f, 0x63, 0xe6, 0xdd, 0x67, - 0xdc, 0xf1, 0x1c, 0xee, 0xd4, 0xae, 0xc4, 0x1a, 0xba, 0x7f, 0x27, 0xcf, 0xac, 0xf3, 0x11, 0xe0, - 0x7c, 0x66, 0xfd, 0x1f, 0x89, 0x81, 0x85, 0x97, 0xde, 0x26, 0xab, 0xc8, 0xd8, 0x17, 0x25, 0x33, - 0x51, 0xc9, 0xb2, 0x29, 0x92, 0x5a, 0x83, 0x41, 0x17, 0xe8, 0x67, 0xe4, 0xd2, 0xa4, 0xd8, 0x8f, - 0xe2, 0xc9, 0x6c, 0xd8, 0xce, 0x33, 0x6b, 0x4e, 0x07, 0x73, 0x48, 0xff, 0x53, 0xb2, 0xac, 0x9a, - 0x94, 0x28, 0xd2, 0x09, 0x8f, 0x62, 0x56, 0xab, 0xeb, 0x47, 0x02, 0x2b, 0x8b, 0x34, 0x9a, 0x80, - 0xfc, 0xf4, 0xff, 0x58, 0x22, 0x2b, 0xf7, 0xca, 0x5e, 0xb4, 0x86, 0x6b, 0x03, 0x26, 0xaa, 0x88, - 0xcc, 0x76, 0xc3, 0xbe, 0x24, 0x8a, 0x9b, 0x8e, 0x43, 0x45, 0xa2, 0x07, 0x84, 0x6a, 0x3b, 0xba, - 0xef, 0x70, 0xe4, 0xca, 0x4d, 0xbc, 0x96, 0x67, 0x56, 0x83, 0x16, 0x1a, 0xb0, 0x62, 0x76, 0x1b, - 0xe5, 0x44, 0x65, 0x70, 0x39, 0xbb, 0xc2, 0xa1, 0x22, 0xd1, 0x4f, 0xc8, 0x46, 0x99, 0x7f, 0x47, - 0x2c, 0xe4, 0x2a, 0x5d, 0x69, 0x9e, 0x59, 0x35, 0x0d, 0xd4, 0xe4, 0x32, 0x5e, 0xc6, 0xb9, 0xe3, - 0xf5, 0xdf, 0x05, 0x62, 0xa0, 0xbe, 0x98, 0x58, 0x1d, 0x0c, 0x3b, 0x56, 0xe7, 0x5d, 0x4e, 0x5c, - 0x68, 0xa0, 0x26, 0xd3, 0x2f, 0xc8, 0x15, 0x0d, 0xb9, 0x1b, 0xfd, 0x10, 0x06, 0x91, 0xe3, 0x15, - 0x51, 0xbb, 0x96, 0x67, 0x56, 0xb3, 0x01, 0x34, 0xc3, 0xe2, 0x0c, 0xdc, 0x0a, 0x86, 0xd5, 0xa4, - 0x53, 0x9e, 0xc1, 0xbc, 0x16, 0x1a, 0x30, 0xea, 0x92, 0x6b, 0xa2, 0x74, 0x9c, 0x02, 0x3b, 0x66, - 0x31, 0x0b, 0x5d, 0xe6, 0x95, 0xb7, 0xbf, 0xb7, 0xbe, 0xd3, 0xbe, 0xb5, 0x62, 0xdf, 0xcc, 0x33, - 0xeb, 0x8d, 0x85, 0x46, 0xb3, 0x14, 0x81, 0xc5, 0x7e, 0xca, 0xe7, 0x47, 0xad, 0xb9, 0x0b, 0x6c, - 0xc1, 0xf3, 0x63, 0xb6, 0x3f, 0x60, 0xc7, 0xc9, 0x01, 0xe3, 0xee, 0xa8, 0x28, 0xac, 0xfa, 0xfe, - 0x2a, 0x5a, 0x68, 0xc0, 0xe8, 0xb7, 0xa4, 0xe7, 0x46, 0x78, 0xdd, 0xfd, 0x28, 0xdc, 0x8f, 0x42, - 0x1e, 0x47, 0xc1, 0xa1, 0xc3, 0x59, 0xe8, 0x9e, 0x62, 0xed, 0xed, 0xd8, 0x37, 0xf2, 0xcc, 0x5a, - 0x68, 0x03, 0x0b, 0x35, 0xd4, 0x23, 0x37, 0x26, 0xfe, 0x84, 0x89, 0x2e, 0xf5, 0x4d, 0xec, 0x4c, - 0x26, 0x2c, 0x96, 0x59, 0xca, 0x3c, 0x59, 0xdb, 0x64, 0xad, 0xde, 0xc9, 0x33, 0xeb, 0x4c, 0x3b, - 0x38, 0x53, 0xdb, 0xff, 0xd3, 0x20, 0x06, 0xc6, 0x49, 0x5c, 0xbf, 0x11, 0x73, 0x3c, 0x19, 0x34, - 0x51, 0x8f, 0xf4, 0x7b, 0x5f, 0xd5, 0x40, 0x4d, 0xae, 0x70, 0xe5, 0xea, 0x8c, 0x06, 0xae, 0x5c, - 0x4f, 0x4d, 0xa6, 0xfb, 0xe4, 0xb2, 0xc7, 0xdc, 0x68, 0x3c, 0x89, 0xb1, 0xf8, 0xc9, 0xa9, 0x65, - 0xe8, 0xae, 0xe4, 0x99, 0x35, 0xaf, 0x84, 0x79, 0xa8, 0xee, 0x44, 0x8f, 0xd0, 0x9c, 0x13, 0xb9, - 0x8c, 0x79, 0x88, 0xde, 0x21, 0x9b, 0xf5, 0x75, 0xc8, 0xb6, 0xb6, 0x95, 0x67, 0x56, 0x5d, 0x05, - 0x75, 0x40, 0xd0, 0x31, 0x97, 0xee, 0xa6, 0x93, 0xc0, 0x77, 0x1d, 0x41, 0xbf, 0x58, 0xd2, 0x6b, - 0x2a, 0xa8, 0x03, 0x82, 0x3e, 0xa9, 0xb5, 0x2f, 0x52, 0xd2, 0x6b, 0x2a, 0xa8, 0x03, 0x74, 0x42, - 0x76, 0x8a, 0xc0, 0x2e, 0x68, 0x30, 0xaa, 0x1d, 0xbe, 0x95, 0x67, 0xd6, 0x2b, 0x6d, 0xe1, 0x95, - 0x16, 0xf4, 0x94, 0xbc, 0xa9, 0xc7, 0x70, 0xd1, 0xa4, 0xb2, 0x49, 0xbe, 0x9d, 0x67, 0xd6, 0x79, - 0xcc, 0xe1, 0x3c, 0x46, 0xfd, 0xbf, 0x3b, 0xc4, 0xc0, 0x87, 0xa9, 0xa8, 0xf1, 0x4c, 0x3e, 0x2a, - 0x0e, 0xa2, 0x34, 0xac, 0x74, 0x18, 0x1d, 0x87, 0x8a, 0x24, 0x9a, 0x24, 0x9b, 0x3d, 0x45, 0x4e, - 0x52, 0xd1, 0xab, 0x64, 0xa5, 0x34, 0x64, 0x93, 0xac, 0xeb, 0x60, 0x0e, 0xa1, 0x1f, 0x91, 0x75, - 0x85, 0x61, 0xf1, 0x96, 0xcf, 0x43, 0xc3, 0xbe, 0x9c, 0x67, 0x56, 0x55, 0x01, 0x55, 0x51, 0x10, - 0xf1, 0x3d, 0x0b, 0xcc, 0x65, 0xfe, 0xa3, 0xe2, 0x31, 0x88, 0xc4, 0x8a, 0x02, 0xaa, 0xa2, 0x78, - 0xd6, 0x21, 0x80, 0x2d, 0x49, 0xa6, 0x17, 0x3e, 0xeb, 0x0a, 0x10, 0xca, 0xa1, 0x78, 0x2d, 0xc6, - 0x72, 0xad, 0x32, 0x97, 0x0c, 0xf9, 0x5a, 0x9c, 0x61, 0x50, 0x8c, 0x44, 0x00, 0x3d, 0xbd, 0xc4, - 0x2f, 0x97, 0x4d, 0x52, 0xc7, 0xa1, 0x22, 0x89, 0x7c, 0xc3, 0x72, 0x7c, 0xc8, 0xc2, 0x21, 0x1f, - 0x1d, 0xb1, 0xf8, 0x51, 0xf1, 0x06, 0xc4, 0x7c, 0x9b, 0x53, 0xc2, 0x3c, 0x64, 0x0f, 0x9e, 0x3e, - 0x37, 0x5b, 0xcf, 0x9e, 0x9b, 0xad, 0x97, 0xcf, 0xcd, 0xf6, 0xcf, 0x53, 0xb3, 0xfd, 0xfb, 0xd4, - 0x6c, 0x3f, 0x99, 0x9a, 0xed, 0xa7, 0x53, 0xb3, 0xfd, 0xcf, 0xd4, 0x6c, 0xff, 0x3b, 0x35, 0x5b, - 0x2f, 0xa7, 0x66, 0xfb, 0x97, 0x17, 0x66, 0xeb, 0xe9, 0x0b, 0xb3, 0xf5, 0xec, 0x85, 0xd9, 0xfa, - 0xee, 0xbd, 0xa1, 0xcf, 0x47, 0xe9, 0x60, 0xd7, 0x8d, 0xc6, 0x7b, 0xc3, 0xd8, 0x39, 0x76, 0x42, - 0x67, 0x2f, 0x88, 0x1e, 0xfa, 0x7b, 0x4d, 0x7f, 0xfb, 0x07, 0x5d, 0xfc, 0x53, 0xff, 0xc1, 0xff, - 0x01, 0x00, 0x00, 0xff, 0xff, 0x44, 0xbc, 0x0a, 0x32, 0x15, 0x10, 0x00, 0x00, + // 1358 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xcd, 0x6f, 0xdc, 0x44, + 0x14, 0xdf, 0xcd, 0xd6, 0x9b, 0x74, 0xf2, 0xd5, 0x4e, 0x52, 0xba, 0xa5, 0x95, 0x1d, 0x16, 0x2a, + 0x8a, 0x90, 0xb2, 0x2a, 0x45, 0x42, 0x20, 0x2a, 0x21, 0xa7, 0x44, 0xaa, 0x94, 0x8a, 0xf2, 0x16, + 0x04, 0x82, 0x93, 0x63, 0xbf, 0xec, 0x5a, 0xf5, 0xda, 0x8e, 0x3d, 0x0e, 0x8d, 0x84, 0x04, 0x7f, + 0x02, 0x77, 0xee, 0x88, 0x0b, 0x27, 0x4e, 0x9c, 0xb9, 0xf4, 0xd8, 0x63, 0x4f, 0x16, 0xdd, 0x5c, + 0x90, 0x4f, 0xfd, 0x03, 0x38, 0xa0, 0xf9, 0x58, 0x7f, 0xad, 0x37, 0xcd, 0x65, 0x3d, 0xef, 0xf7, + 0xde, 0xef, 0xcd, 0xcc, 0x9b, 0x79, 0xef, 0xcd, 0x92, 0x9d, 0xf0, 0xc9, 0x68, 0xe0, 0x05, 0xa3, + 0x63, 0x6f, 0x12, 0x38, 0xe8, 0x0d, 0x62, 0x66, 0xb1, 0x58, 0xfe, 0xee, 0x86, 0x51, 0xc0, 0x02, + 0xaa, 0x09, 0xe1, 0xcd, 0xed, 0x51, 0x30, 0x0a, 0x04, 0x32, 0xe0, 0x23, 0xa9, 0xec, 0xff, 0xb6, + 0x44, 0xba, 0x80, 0x71, 0xe2, 0x31, 0xfa, 0x31, 0x59, 0x8e, 0x93, 0xc9, 0xc4, 0x8a, 0x4e, 0x7b, + 0xed, 0x9d, 0xf6, 0x9d, 0xd5, 0x0f, 0x36, 0x76, 0xa5, 0x9b, 0xa1, 0x44, 0xcd, 0xcd, 0x67, 0xa9, + 0xd1, 0xca, 0x52, 0x63, 0x66, 0x06, 0xb3, 0x01, 0xa7, 0x1e, 0x27, 0x18, 0xb9, 0x18, 0xf5, 0x96, + 0x2a, 0xd4, 0x2f, 0x25, 0x5a, 0x50, 0x95, 0x19, 0xcc, 0x06, 0xf4, 0x3e, 0x59, 0x71, 0xfd, 0x11, + 0xc6, 0x0c, 0xa3, 0x5e, 0x47, 0x70, 0x37, 0x15, 0xf7, 0xa1, 0x82, 0xcd, 0x2b, 0x8a, 0x9c, 0x1b, + 0x42, 0x3e, 0xa2, 0x1f, 0x92, 0xae, 0x6d, 0xd9, 0x63, 0x8c, 0x7b, 0x97, 0x04, 0x79, 0x5d, 0x91, + 0xf7, 0x04, 0x68, 0xae, 0x2b, 0xaa, 0x26, 0x8c, 0x40, 0xd9, 0xd2, 0xbb, 0x44, 0x73, 0x7d, 0x07, + 0x9f, 0xf6, 0x34, 0x41, 0x5a, 0xcb, 0x67, 0x74, 0xf0, 0x69, 0xc1, 0x11, 0x26, 0x20, 0x3f, 0xfd, + 0x5f, 0x2f, 0x91, 0xee, 0x5e, 0xce, 0xb6, 0xc7, 0x89, 0xff, 0x44, 0x85, 0x69, 0xad, 0x3c, 0x65, + 0x69, 0x46, 0x6e, 0x02, 0xf2, 0x53, 0x4c, 0xb8, 0x74, 0x1e, 0xa5, 0x3c, 0x21, 0xdf, 0x59, 0x24, + 0x0e, 0x46, 0x85, 0xa5, 0xca, 0xd9, 0x50, 0x1c, 0x65, 0x03, 0xea, 0x4b, 0xf7, 0xc8, 0xaa, 0x30, + 0x93, 0x67, 0xaa, 0x82, 0x52, 0xa5, 0x6e, 0x29, 0x6a, 0xd9, 0x10, 0xca, 0x02, 0xdd, 0x27, 0x6b, + 0x27, 0x81, 0x97, 0x4c, 0x50, 0x79, 0xd1, 0x1a, 0xbc, 0x6c, 0x2b, 0x2f, 0x15, 0x4b, 0xa8, 0x48, + 0xdc, 0x4f, 0xcc, 0x4f, 0x79, 0xb6, 0x9a, 0xee, 0x79, 0x7e, 0xca, 0x96, 0x50, 0x91, 0xf8, 0xa6, + 0x3c, 0xeb, 0x10, 0x3d, 0xe5, 0x66, 0xf9, 0xbc, 0x4d, 0x95, 0x0c, 0xa1, 0x2c, 0xd0, 0xef, 0xc9, + 0x96, 0xeb, 0xc7, 0xcc, 0xf2, 0xd9, 0x23, 0x64, 0x91, 0x6b, 0x2b, 0x67, 0x2b, 0x0d, 0xce, 0x6e, + 0x2a, 0x67, 0x4d, 0x04, 0x68, 0x02, 0xfb, 0x7f, 0x75, 0xc9, 0xb2, 0x4a, 0x13, 0xfa, 0x35, 0xb9, + 0x7e, 0x78, 0xca, 0x30, 0x7e, 0x1c, 0x05, 0x36, 0xc6, 0x31, 0x3a, 0x8f, 0x31, 0x1a, 0xa2, 0x1d, + 0xf8, 0x8e, 0xb8, 0x30, 0x1d, 0xf3, 0x66, 0x96, 0x1a, 0x8b, 0x4c, 0x60, 0x91, 0x82, 0xbb, 0xf5, + 0x5c, 0xbf, 0xd1, 0xed, 0x52, 0xe1, 0x76, 0x81, 0x09, 0x2c, 0x52, 0xd0, 0x87, 0x64, 0x8b, 0x05, + 0xcc, 0xf2, 0xcc, 0xca, 0xb4, 0xe2, 0xce, 0x75, 0xcc, 0xeb, 0x3c, 0x08, 0x0d, 0x6a, 0x68, 0x02, + 0x73, 0x57, 0x07, 0x95, 0xa9, 0xc4, 0x1d, 0x2c, 0xbb, 0xaa, 0xaa, 0xa1, 0x09, 0xa4, 0x77, 0xc8, + 0x0a, 0x3e, 0x45, 0xfb, 0x2b, 0x77, 0x82, 0xe2, 0xf6, 0xb5, 0xcd, 0x35, 0x5e, 0x00, 0x66, 0x18, + 0xe4, 0x23, 0xfa, 0x3e, 0xb9, 0x7c, 0x9c, 0x60, 0x82, 0xc2, 0xb4, 0x2b, 0x4c, 0xd7, 0xb3, 0xd4, + 0x28, 0x40, 0x28, 0x86, 0x74, 0x97, 0x90, 0x38, 0x39, 0x94, 0xa5, 0x27, 0x16, 0xf7, 0xa8, 0x63, + 0x6e, 0x64, 0xa9, 0x51, 0x42, 0xa1, 0x34, 0xa6, 0x07, 0x64, 0x5b, 0xac, 0xee, 0x73, 0x9f, 0xc9, + 0xeb, 0xc8, 0x92, 0xc8, 0x47, 0x47, 0x5c, 0x9a, 0x8e, 0xd9, 0xcb, 0x52, 0xa3, 0x51, 0x0f, 0x8d, + 0x28, 0xed, 0x93, 0x6e, 0x1c, 0x7a, 0x2e, 0x8b, 0x7b, 0x97, 0x05, 0x9f, 0xf0, 0xfc, 0x95, 0x08, + 0xa8, 0xaf, 0xb0, 0x19, 0x5b, 0x91, 0x13, 0xf7, 0x48, 0xc9, 0x46, 0x20, 0xa0, 0xbe, 0xf9, 0xaa, + 0x1e, 0x07, 0x31, 0xdb, 0x77, 0x3d, 0x86, 0x91, 0x88, 0x5e, 0x6f, 0xb5, 0xb6, 0xaa, 0x9a, 0x1e, + 0x1a, 0x51, 0xfa, 0x13, 0xb9, 0x2d, 0xf0, 0x21, 0x8b, 0x12, 0x9b, 0x25, 0x11, 0x3a, 0x8f, 0x90, + 0x59, 0x8e, 0xc5, 0xac, 0xda, 0x95, 0x58, 0x13, 0xee, 0xdf, 0xcb, 0x52, 0xe3, 0x62, 0x04, 0xb8, + 0x98, 0x59, 0xff, 0x47, 0xa2, 0x89, 0xc2, 0x4b, 0xef, 0x92, 0x55, 0xc1, 0xd8, 0xe3, 0x25, 0x33, + 0x56, 0xc9, 0xb2, 0xc9, 0x93, 0xba, 0x04, 0x43, 0x59, 0xa0, 0x9f, 0x91, 0x2b, 0x61, 0xbe, 0x1f, + 0xc5, 0x93, 0xd9, 0xb0, 0x9d, 0xa5, 0xc6, 0x9c, 0x0e, 0xe6, 0x90, 0xfe, 0xa7, 0x64, 0x59, 0x35, + 0x29, 0x5e, 0xa4, 0x63, 0x16, 0x44, 0x58, 0xab, 0xeb, 0x43, 0x8e, 0x15, 0x45, 0x5a, 0x98, 0x80, + 0xfc, 0xf4, 0xff, 0x58, 0x22, 0x2b, 0x0f, 0x8b, 0x5e, 0xb4, 0x26, 0xd6, 0x06, 0xc8, 0xab, 0x88, + 0xcc, 0x76, 0xcd, 0xbc, 0xc2, 0x8b, 0x5b, 0x19, 0x87, 0x8a, 0x44, 0xf7, 0x09, 0x2d, 0xed, 0xe8, + 0x91, 0xc5, 0x04, 0x57, 0x6e, 0xe2, 0x8d, 0x2c, 0x35, 0x1a, 0xb4, 0xd0, 0x80, 0xe5, 0xb3, 0x9b, + 0x42, 0x8e, 0x55, 0x06, 0x17, 0xb3, 0x2b, 0x1c, 0x2a, 0x12, 0xfd, 0x84, 0x6c, 0x14, 0xf9, 0x37, + 0x44, 0x9f, 0xa9, 0x74, 0xa5, 0x59, 0x6a, 0xd4, 0x34, 0x50, 0x93, 0x8b, 0x78, 0x69, 0x17, 0x8e, + 0xd7, 0x7f, 0x97, 0x88, 0x26, 0xf4, 0xf9, 0xc4, 0xea, 0x60, 0xf0, 0x48, 0x9d, 0x77, 0x31, 0x71, + 0xae, 0x81, 0x9a, 0x4c, 0xbf, 0x20, 0xd7, 0x4a, 0xc8, 0x83, 0xe0, 0x07, 0xdf, 0x0b, 0x2c, 0x27, + 0x8f, 0xda, 0x8d, 0x2c, 0x35, 0x9a, 0x0d, 0xa0, 0x19, 0xe6, 0x67, 0x60, 0x57, 0x30, 0x51, 0x4d, + 0x3a, 0xc5, 0x19, 0xcc, 0x6b, 0xa1, 0x01, 0xa3, 0x36, 0xb9, 0xc1, 0x4b, 0xc7, 0x29, 0xe0, 0x11, + 0x46, 0xe8, 0xdb, 0xe8, 0x14, 0xb7, 0xbf, 0xb7, 0xbe, 0xd3, 0xbe, 0xb3, 0x62, 0xde, 0xce, 0x52, + 0xe3, 0xad, 0x85, 0x46, 0xb3, 0x14, 0x81, 0xc5, 0x7e, 0x8a, 0xe7, 0x47, 0xad, 0xb9, 0x73, 0x6c, + 0xc1, 0xf3, 0x63, 0xb6, 0x3f, 0xc0, 0xa3, 0x78, 0x1f, 0x99, 0x3d, 0xce, 0x0b, 0x6b, 0x79, 0x7f, + 0x15, 0x2d, 0x34, 0x60, 0xf4, 0x5b, 0xd2, 0xb3, 0x03, 0x71, 0xdd, 0xdd, 0xc0, 0xdf, 0x0b, 0x7c, + 0x16, 0x05, 0xde, 0x81, 0xc5, 0xd0, 0xb7, 0x4f, 0x45, 0xed, 0xed, 0x98, 0xb7, 0xb2, 0xd4, 0x58, + 0x68, 0x03, 0x0b, 0x35, 0xd4, 0x21, 0xb7, 0x42, 0x37, 0x44, 0xde, 0xa5, 0xbe, 0x89, 0xac, 0x30, + 0xc4, 0x48, 0x66, 0x29, 0x3a, 0xb2, 0xb6, 0xc9, 0x5a, 0xbd, 0x93, 0xa5, 0xc6, 0xb9, 0x76, 0x70, + 0xae, 0xb6, 0xff, 0xa7, 0x46, 0x34, 0x11, 0x27, 0x7e, 0xfd, 0xc6, 0x68, 0x39, 0x32, 0x68, 0xbc, + 0x1e, 0x95, 0xef, 0x7d, 0x55, 0x03, 0x35, 0xb9, 0xc2, 0x95, 0xab, 0xd3, 0x1a, 0xb8, 0x72, 0x3d, + 0x35, 0x99, 0xee, 0x91, 0xab, 0x0e, 0xda, 0xc1, 0x24, 0x8c, 0x44, 0xf1, 0x93, 0x53, 0xcb, 0xd0, + 0x5d, 0xcb, 0x52, 0x63, 0x5e, 0x09, 0xf3, 0x50, 0xdd, 0x49, 0x39, 0x42, 0x73, 0x4e, 0xe4, 0x32, + 0xe6, 0x21, 0x7a, 0x9f, 0x6c, 0xd6, 0xd7, 0x21, 0xdb, 0xda, 0x56, 0x96, 0x1a, 0x75, 0x15, 0xd4, + 0x01, 0x4e, 0x17, 0xb9, 0xf4, 0x20, 0x09, 0x3d, 0xd7, 0xb6, 0x38, 0xfd, 0x72, 0x41, 0xaf, 0xa9, + 0xa0, 0x0e, 0x70, 0x7a, 0x58, 0x6b, 0x5f, 0xa4, 0xa0, 0xd7, 0x54, 0x50, 0x07, 0x68, 0x48, 0x76, + 0xf2, 0xc0, 0x2e, 0x68, 0x30, 0xaa, 0x1d, 0xbe, 0x93, 0xa5, 0xc6, 0x6b, 0x6d, 0xe1, 0xb5, 0x16, + 0xf4, 0x94, 0xbc, 0x5d, 0x8e, 0xe1, 0xa2, 0x49, 0x65, 0x93, 0x7c, 0x37, 0x4b, 0x8d, 0x8b, 0x98, + 0xc3, 0x45, 0x8c, 0xfa, 0x7f, 0x77, 0x88, 0x26, 0x1e, 0xa6, 0xbc, 0xc6, 0xa3, 0x7c, 0x54, 0xec, + 0x07, 0x89, 0x5f, 0xe9, 0x30, 0x65, 0x1c, 0x2a, 0x12, 0x6f, 0x92, 0x38, 0x7b, 0x8a, 0x1c, 0x27, + 0xbc, 0x57, 0xc9, 0x4a, 0xa9, 0xc9, 0x26, 0x59, 0xd7, 0xc1, 0x1c, 0x42, 0x3f, 0x22, 0xeb, 0x0a, + 0x13, 0xc5, 0x5b, 0x3e, 0x0f, 0x35, 0xf3, 0x6a, 0x96, 0x1a, 0x55, 0x05, 0x54, 0x45, 0x4e, 0x14, + 0xef, 0x59, 0x40, 0x1b, 0xdd, 0x93, 0xfc, 0x31, 0x28, 0x88, 0x15, 0x05, 0x54, 0x45, 0xfe, 0xac, + 0x13, 0x80, 0x68, 0x49, 0x32, 0xbd, 0xc4, 0xb3, 0x2e, 0x07, 0xa1, 0x18, 0xf2, 0xd7, 0x62, 0x24, + 0xd7, 0x2a, 0x73, 0x49, 0x93, 0xaf, 0xc5, 0x19, 0x06, 0xf9, 0x88, 0x07, 0xd0, 0x29, 0x97, 0xf8, + 0xe5, 0xa2, 0x49, 0x96, 0x71, 0xa8, 0x48, 0x3c, 0xdf, 0x44, 0x39, 0x3e, 0x40, 0x7f, 0xc4, 0xc6, + 0x43, 0x8c, 0x4e, 0xf2, 0x37, 0xa0, 0xc8, 0xb7, 0x39, 0x25, 0xcc, 0x43, 0x26, 0x3e, 0x7f, 0xa9, + 0xb7, 0x5e, 0xbc, 0xd4, 0x5b, 0xaf, 0x5e, 0xea, 0xed, 0x9f, 0xa7, 0x7a, 0xfb, 0xf7, 0xa9, 0xde, + 0x7e, 0x36, 0xd5, 0xdb, 0xcf, 0xa7, 0x7a, 0xfb, 0x9f, 0xa9, 0xde, 0xfe, 0x77, 0xaa, 0xb7, 0x5e, + 0x4d, 0xf5, 0xf6, 0x2f, 0x67, 0x7a, 0xeb, 0xf9, 0x99, 0xde, 0x7a, 0x71, 0xa6, 0xb7, 0xbe, 0x1b, + 0x8c, 0x5c, 0x36, 0x4e, 0x0e, 0x77, 0xed, 0x60, 0x32, 0x18, 0x45, 0xd6, 0x91, 0xe5, 0x5b, 0x03, + 0x2f, 0x78, 0xe2, 0x0e, 0x4e, 0xee, 0x0d, 0x9a, 0xfe, 0xf9, 0x1f, 0x76, 0xc5, 0xff, 0xfa, 0x7b, + 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0x19, 0x80, 0x75, 0xde, 0x18, 0x10, 0x00, 0x00, } func (this *Result) Equal(that interface{}) bool { diff --git a/pkg/logqlmodel/stats/stats.proto b/pkg/logqlmodel/stats/stats.proto index df21e4a2ee9a6..b53747b7941fd 100644 --- a/pkg/logqlmodel/stats/stats.proto +++ b/pkg/logqlmodel/stats/stats.proto @@ -4,7 +4,7 @@ package stats; import "gogoproto/gogo.proto"; -option go_package = "github.com/grafana/loki/pkg/logqlmodel/stats"; +option go_package = "github.com/grafana/loki/v3/pkg/logqlmodel/stats"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/loki/common/common.go b/pkg/loki/common/common.go index 6f7fd1c768edf..b7bb08e2cd46e 100644 --- a/pkg/loki/common/common.go +++ b/pkg/loki/common/common.go @@ -6,17 +6,17 @@ import ( "github.com/grafana/dskit/flagext" "github.com/grafana/dskit/netutil" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/ring" ) // Config holds common config that can be shared between multiple other config sections. diff --git a/pkg/loki/config_compat.go b/pkg/loki/config_compat.go index 1e4f800c46476..fff357453be6c 100644 --- a/pkg/loki/config_compat.go +++ b/pkg/loki/config_compat.go @@ -4,9 +4,9 @@ import ( "errors" "fmt" - "github.com/grafana/loki/pkg/ingester/index" - frontend "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/ingester/index" + frontend "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/storage/config" ) func ValidateConfigCompatibility(c Config) error { diff --git a/pkg/loki/config_test.go b/pkg/loki/config_test.go index 73fc2cbb46aba..7a29f80bf02b9 100644 --- a/pkg/loki/config_test.go +++ b/pkg/loki/config_test.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/storage/config" ) func TestCrossComponentValidation(t *testing.T) { diff --git a/pkg/loki/config_wrapper.go b/pkg/loki/config_wrapper.go index c602f53cc6dd1..e10618e88c4ff 100644 --- a/pkg/loki/config_wrapper.go +++ b/pkg/loki/config_wrapper.go @@ -11,14 +11,14 @@ import ( "github.com/grafana/dskit/flagext" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/cfg" - lokiring "github.com/grafana/loki/pkg/util/ring" - - "github.com/grafana/loki/pkg/ruler/rulestore/local" - loki_net "github.com/grafana/loki/pkg/util/net" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/cfg" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + loki_net "github.com/grafana/loki/v3/pkg/util/net" ) const versionFlag = "version" diff --git a/pkg/loki/config_wrapper_test.go b/pkg/loki/config_wrapper_test.go index f6e22f74add51..1852846aa2998 100644 --- a/pkg/loki/config_wrapper_test.go +++ b/pkg/loki/config_wrapper_test.go @@ -14,21 +14,21 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - loki_net "github.com/grafana/loki/pkg/util/net" - lokiring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + loki_net "github.com/grafana/loki/v3/pkg/util/net" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) // Can't use a totally empty yaml file or it causes weird behavior in the unmarshalling. diff --git a/pkg/loki/delete_store_listener.go b/pkg/loki/delete_store_listener.go index 10fbc88a9899f..ec2d9978bab87 100644 --- a/pkg/loki/delete_store_listener.go +++ b/pkg/loki/delete_store_listener.go @@ -3,7 +3,7 @@ package loki import ( "github.com/grafana/dskit/services" - "github.com/grafana/loki/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/deletion" ) func deleteRequestsStoreListener(d deletion.DeleteRequestsClient) *listener { diff --git a/pkg/loki/format_query_handler.go b/pkg/loki/format_query_handler.go index 4e65999ebbb38..4b715215a283c 100644 --- a/pkg/loki/format_query_handler.go +++ b/pkg/loki/format_query_handler.go @@ -4,8 +4,8 @@ import ( "encoding/json" "net/http" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util/server" ) func formatQueryHandler() http.HandlerFunc { diff --git a/pkg/loki/loki.go b/pkg/loki/loki.go index 3d8bd9f9bec14..c77ef07892931 100644 --- a/pkg/loki/loki.go +++ b/pkg/loki/loki.go @@ -29,43 +29,43 @@ import ( "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - compactorclient "github.com/grafana/loki/pkg/compactor/client" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - ingester_client "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/loki/common" - "github.com/grafana/loki/pkg/lokifrontend" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/ruler" - base_ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/scheduler" - internalserver "github.com/grafana/loki/pkg/server" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/fakeauth" - "github.com/grafana/loki/pkg/util/limiter" - util_log "github.com/grafana/loki/pkg/util/log" - lokiring "github.com/grafana/loki/pkg/util/ring" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + compactorclient "github.com/grafana/loki/v3/pkg/compactor/client" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + ingester_client "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/loki/common" + "github.com/grafana/loki/v3/pkg/lokifrontend" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/ruler" + base_ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/scheduler" + internalserver "github.com/grafana/loki/v3/pkg/server" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/fakeauth" + "github.com/grafana/loki/v3/pkg/util/limiter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/validation" ) // Config is the root config for Loki. diff --git a/pkg/loki/loki_test.go b/pkg/loki/loki_test.go index 81d7c0384d7dc..a4e6ff73ca565 100644 --- a/pkg/loki/loki_test.go +++ b/pkg/loki/loki_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - internalserver "github.com/grafana/loki/pkg/server" + internalserver "github.com/grafana/loki/v3/pkg/server" ) func TestFlagDefaults(t *testing.T) { diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index cc616924c1f07..79c86836331aa 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -33,52 +33,52 @@ import ( "github.com/prometheus/client_golang/prometheus/collectors/version" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/logqlmodel/stats" - - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - compactorclient "github.com/grafana/loki/pkg/compactor/client" - "github.com/grafana/loki/pkg/compactor/client/grpc" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/compactor/generationnumber" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/lokifrontend/frontend" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/ruler" - base_ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/scheduler" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - boltdbcompactor "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/limiter" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/querylimits" - lokiring "github.com/grafana/loki/pkg/util/ring" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + compactorclient "github.com/grafana/loki/v3/pkg/compactor/client" + "github.com/grafana/loki/v3/pkg/compactor/client/grpc" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/compactor/generationnumber" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/ruler" + base_ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/scheduler" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + boltdbcompactor "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/limiter" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/querylimits" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/validation" ) const maxChunkAgeForTableManager = 12 * time.Hour diff --git a/pkg/loki/modules_test.go b/pkg/loki/modules_test.go index 4529eb7c23c88..989d8e588c0de 100644 --- a/pkg/loki/modules_test.go +++ b/pkg/loki/modules_test.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" ) func Test_calculateMaxLookBack(t *testing.T) { diff --git a/pkg/loki/runtime_config.go b/pkg/loki/runtime_config.go index 3432ee1b68b80..e8e3c7e315870 100644 --- a/pkg/loki/runtime_config.go +++ b/pkg/loki/runtime_config.go @@ -9,9 +9,9 @@ import ( "github.com/grafana/dskit/runtimeconfig" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/runtime" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/runtime" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // runtimeConfigValues are values that can be reloaded from configuration file while Loki is running. diff --git a/pkg/loki/runtime_config_test.go b/pkg/loki/runtime_config_test.go index d0fd2ffa41038..cf604455929c6 100644 --- a/pkg/loki/runtime_config_test.go +++ b/pkg/loki/runtime_config_test.go @@ -16,8 +16,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/runtime" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/runtime" + "github.com/grafana/loki/v3/pkg/validation" ) func Test_LoadRetentionRules(t *testing.T) { diff --git a/pkg/loki/version_handler.go b/pkg/loki/version_handler.go index 316d4825f7005..ef49d1b0f7de7 100644 --- a/pkg/loki/version_handler.go +++ b/pkg/loki/version_handler.go @@ -6,7 +6,7 @@ import ( prom "github.com/prometheus/prometheus/web/api/v1" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func versionHandler() http.HandlerFunc { diff --git a/pkg/loki/version_handler_test.go b/pkg/loki/version_handler_test.go index c7b9094b4ae28..fb39b63f37568 100644 --- a/pkg/loki/version_handler_test.go +++ b/pkg/loki/version_handler_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/build" ) func TestVersionHandler(t *testing.T) { diff --git a/pkg/lokifrontend/config.go b/pkg/lokifrontend/config.go index 30ab5cd29fecc..f53f17085c70a 100644 --- a/pkg/lokifrontend/config.go +++ b/pkg/lokifrontend/config.go @@ -5,9 +5,9 @@ import ( "github.com/grafana/dskit/crypto/tls" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - v1 "github.com/grafana/loki/pkg/lokifrontend/frontend/v1" - v2 "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + v1 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1" + v2 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" ) type Config struct { diff --git a/pkg/lokifrontend/frontend/config.go b/pkg/lokifrontend/frontend/config.go index 54eaa264d98f0..fb61a482563fd 100644 --- a/pkg/lokifrontend/frontend/config.go +++ b/pkg/lokifrontend/frontend/config.go @@ -9,11 +9,11 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - v1 "github.com/grafana/loki/pkg/lokifrontend/frontend/v1" - v2 "github.com/grafana/loki/pkg/lokifrontend/frontend/v2" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + v1 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1" + v2 "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" ) // This struct combines several configuration options together to preserve backwards compatibility. diff --git a/pkg/lokifrontend/frontend/downstream_roundtripper.go b/pkg/lokifrontend/frontend/downstream_roundtripper.go index 90f330900c32b..86010e127621f 100644 --- a/pkg/lokifrontend/frontend/downstream_roundtripper.go +++ b/pkg/lokifrontend/frontend/downstream_roundtripper.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/user" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // RoundTripper that forwards requests to downstream URL. diff --git a/pkg/lokifrontend/frontend/transport/handler.go b/pkg/lokifrontend/frontend/transport/handler.go index 1c271805bbdab..7c9e50daf8b59 100644 --- a/pkg/lokifrontend/frontend/transport/handler.go +++ b/pkg/lokifrontend/frontend/transport/handler.go @@ -23,11 +23,11 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/server" ) const ( diff --git a/pkg/lokifrontend/frontend/transport/roundtripper.go b/pkg/lokifrontend/frontend/transport/roundtripper.go index c6e38315930a2..d76512f5b00a6 100644 --- a/pkg/lokifrontend/frontend/transport/roundtripper.go +++ b/pkg/lokifrontend/frontend/transport/roundtripper.go @@ -5,8 +5,8 @@ import ( "github.com/grafana/dskit/httpgrpc" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // GrpcRoundTripper is similar to http.RoundTripper, but works with HTTP requests converted to protobuf messages. diff --git a/pkg/lokifrontend/frontend/v1/frontend.go b/pkg/lokifrontend/frontend/v1/frontend.go index cf17b62b03186..3caae56955ca4 100644 --- a/pkg/lokifrontend/frontend/v1/frontend.go +++ b/pkg/lokifrontend/frontend/v1/frontend.go @@ -18,12 +18,12 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/util" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/util" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" ) var errTooManyRequest = httpgrpc.Errorf(http.StatusTooManyRequests, "too many outstanding requests") diff --git a/pkg/lokifrontend/frontend/v1/frontend_test.go b/pkg/lokifrontend/frontend/v1/frontend_test.go index a10a55b37984f..2d26e9f188a3b 100644 --- a/pkg/lokifrontend/frontend/v1/frontend_test.go +++ b/pkg/lokifrontend/frontend/v1/frontend_test.go @@ -28,15 +28,15 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go b/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go index 10d525a0a829a..e31c88efa9e41 100644 --- a/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go +++ b/pkg/lokifrontend/frontend/v1/frontendv1pb/frontend.pb.go @@ -12,7 +12,7 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - stats "github.com/grafana/loki/pkg/querier/stats" + stats "github.com/grafana/loki/v3/pkg/querier/stats" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/lokifrontend/frontend/v1/queue_test.go b/pkg/lokifrontend/frontend/v1/queue_test.go index a6f380afd492d..bd429e11bccf3 100644 --- a/pkg/lokifrontend/frontend/v1/queue_test.go +++ b/pkg/lokifrontend/frontend/v1/queue_test.go @@ -17,8 +17,8 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + "github.com/grafana/loki/v3/pkg/util/constants" ) func setupFrontend(t *testing.T, config Config) *Frontend { diff --git a/pkg/lokifrontend/frontend/v2/frontend.go b/pkg/lokifrontend/frontend/v2/frontend.go index 99e3e05ad83c9..5311573020735 100644 --- a/pkg/lokifrontend/frontend/v2/frontend.go +++ b/pkg/lokifrontend/frontend/v2/frontend.go @@ -27,14 +27,14 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/lokifrontend/frontend/transport" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/stats" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/transport" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/stats" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go b/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go index b58c573b29136..b5cdf56f2d9a4 100644 --- a/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go +++ b/pkg/lokifrontend/frontend/v2/frontend_scheduler_worker.go @@ -15,9 +15,9 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util" - lokiutil "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util" + lokiutil "github.com/grafana/loki/v3/pkg/util" ) type frontendSchedulerWorkers struct { diff --git a/pkg/lokifrontend/frontend/v2/frontend_test.go b/pkg/lokifrontend/frontend/v2/frontend_test.go index 9a87c5ff1c7cc..41fa9653f6949 100644 --- a/pkg/lokifrontend/frontend/v2/frontend_test.go +++ b/pkg/lokifrontend/frontend/v2/frontend_test.go @@ -19,14 +19,14 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/test" ) const testFrontendWorkerConcurrency = 5 diff --git a/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go b/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go index 3773159c0cc37..8fdae39bf525e 100644 --- a/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go +++ b/pkg/lokifrontend/frontend/v2/frontendv2pb/frontend.pb.go @@ -9,8 +9,8 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - queryrange "github.com/grafana/loki/pkg/querier/queryrange" - stats "github.com/grafana/loki/pkg/querier/stats" + queryrange "github.com/grafana/loki/v3/pkg/querier/queryrange" + stats "github.com/grafana/loki/v3/pkg/querier/stats" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/push/go.mod b/pkg/push/go.mod index 067ca8f02c80c..f35acdfd50762 100644 --- a/pkg/push/go.mod +++ b/pkg/push/go.mod @@ -1,4 +1,4 @@ -module github.com/grafana/loki/pkg/push +module github.com/grafana/loki/v3/pkg/push go 1.19 diff --git a/pkg/push/push.pb.go b/pkg/push/push.pb.go index 3b07d850ff162..7979872929611 100644 --- a/pkg/push/push.pb.go +++ b/pkg/push/push.pb.go @@ -296,40 +296,41 @@ func init() { func init() { proto.RegisterFile("pkg/push/push.proto", fileDescriptor_35ec442956852c9e) } var fileDescriptor_35ec442956852c9e = []byte{ - // 527 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x40, - 0x10, 0xf5, 0x26, 0x6e, 0xda, 0x6e, 0x4a, 0xa9, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x47, 0x16, 0x87, - 0x1c, 0xc0, 0x96, 0xc2, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x20, 0x71, - 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0x6b, 0xa4, 0xde, 0xf8, 0x84, 0xf2, 0x17, 0x7c, - 0x01, 0xdf, 0xd0, 0x63, 0x8e, 0x15, 0x07, 0x43, 0x9c, 0x0b, 0xca, 0xa9, 0x9f, 0x80, 0xbc, 0xb6, - 0x49, 0x28, 0x48, 0x5c, 0x36, 0x6f, 0x66, 0x67, 0xde, 0x7b, 0x99, 0x1d, 0xc3, 0x07, 0xd9, 0x45, - 0xe0, 0x66, 0xb9, 0x08, 0xd5, 0xe1, 0x64, 0x9c, 0x49, 0x86, 0xb6, 0x62, 0x16, 0x28, 0x64, 0xee, - 0x07, 0x2c, 0x60, 0x0a, 0xba, 0x15, 0xaa, 0xef, 0x4d, 0x2b, 0x60, 0x2c, 0x88, 0xa9, 0xab, 0xa2, - 0x49, 0x7e, 0xee, 0xca, 0x28, 0xa1, 0x42, 0x92, 0x24, 0xab, 0x0b, 0xec, 0x77, 0xb0, 0x7f, 0x9a, - 0x8b, 0xd0, 0xa7, 0x1f, 0x72, 0x2a, 0x24, 0x3a, 0x86, 0x9b, 0x42, 0x72, 0x4a, 0x12, 0x61, 0x80, - 0x41, 0x77, 0xd8, 0x1f, 0x3d, 0x74, 0x5a, 0x05, 0xe7, 0xb5, 0xba, 0x18, 0x4f, 0x49, 0x26, 0x29, - 0xf7, 0x0e, 0xbe, 0x15, 0x56, 0xaf, 0x4e, 0x2d, 0x0b, 0xab, 0xed, 0xf2, 0x5b, 0x60, 0xef, 0xc2, - 0x9d, 0x9a, 0x58, 0x64, 0x2c, 0x15, 0xd4, 0xfe, 0x0c, 0xe0, 0xbd, 0x3f, 0x18, 0x90, 0x0d, 0x7b, - 0x31, 0x99, 0xd0, 0xb8, 0x92, 0x02, 0xc3, 0x6d, 0x0f, 0x2e, 0x0b, 0xab, 0xc9, 0xf8, 0xcd, 0x2f, - 0x1a, 0xc3, 0x4d, 0x9a, 0x4a, 0x1e, 0x51, 0x61, 0x74, 0x94, 0x9f, 0xc3, 0x95, 0x9f, 0x97, 0xa9, - 0xe4, 0x97, 0xad, 0x9d, 0xfb, 0xd7, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, 0xe8, 0x11, - 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x3b, 0x00, 0x43, 0xdd, 0xdb, 0x58, 0x16, 0x16, 0x78, 0xea, 0xab, - 0x94, 0xfd, 0x02, 0xee, 0x9d, 0x54, 0x3a, 0xa7, 0x24, 0xe2, 0xad, 0x2b, 0x04, 0xf5, 0x94, 0x24, - 0xb4, 0xf6, 0xe4, 0x2b, 0x8c, 0xf6, 0xe1, 0xc6, 0x47, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, 0x64, 0x1d, - 0xd8, 0x5f, 0x3b, 0x70, 0x67, 0xdd, 0x03, 0x3a, 0x86, 0xdb, 0xbf, 0xc7, 0xab, 0xfa, 0xfb, 0x23, - 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xbb, 0x8d, 0xe5, 0x8e, 0x14, - 0x57, 0xdf, 0x2d, 0xe0, 0xaf, 0x9a, 0xd1, 0x11, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, 0xad, 0x65, - 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, 0xfa, 0x8a, - 0x4a, 0x32, 0x25, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x35, 0x9f, 0xbb, 0x7f, 0xcd, 0x7b, 0xdc, - 0x08, 0x1e, 0xfd, 0xdd, 0xfd, 0x84, 0x25, 0x91, 0xa4, 0x49, 0x26, 0x2f, 0xfd, 0x7f, 0x70, 0xa3, - 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x6a, 0xe8, 0xff, 0x55, 0x31, 0x1a, 0x95, 0xbd, 0xba, 0x63, - 0x8d, 0xb9, 0xe1, 0x18, 0x8d, 0x61, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, 0x08, 0x1d, - 0xac, 0xf8, 0xd6, 0xb6, 0xd1, 0x3c, 0xbc, 0x9b, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0xce, 0xe6, 0x58, - 0xbb, 0x99, 0x63, 0xed, 0x76, 0x8e, 0xc1, 0xa7, 0x12, 0x83, 0x2f, 0x25, 0x06, 0xd7, 0x25, 0x06, - 0xb3, 0x12, 0x83, 0x1f, 0x25, 0x06, 0x3f, 0x4b, 0xac, 0xdd, 0x96, 0x18, 0x5c, 0x2d, 0xb0, 0x36, - 0x5b, 0x60, 0xed, 0x66, 0x81, 0xb5, 0xf7, 0x83, 0x20, 0x92, 0x61, 0x3e, 0x71, 0xce, 0x58, 0xe2, - 0x06, 0x9c, 0x9c, 0x93, 0x94, 0xb8, 0x31, 0xbb, 0x88, 0xdc, 0xf6, 0xd3, 0x9a, 0xf4, 0x94, 0xda, - 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, 0xaa, 0x57, 0xd3, 0x6d, 0x03, 0x00, 0x00, + // 532 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xf6, 0x26, 0x6e, 0xda, 0x6e, 0xfa, 0xf7, 0xaf, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x57, 0x86, + 0x43, 0x0e, 0x60, 0x4b, 0xe9, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x42, + 0x70, 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0xeb, 0x4a, 0xbd, 0xf1, 0x08, 0xe5, 0x2d, + 0x78, 0x02, 0x9e, 0xa1, 0xc7, 0x1c, 0x2b, 0x0e, 0x86, 0x38, 0x17, 0x94, 0x53, 0x1f, 0x01, 0x79, + 0x6d, 0x93, 0x50, 0x90, 0xb8, 0x6c, 0xbe, 0x99, 0x9d, 0xf9, 0xbe, 0x2f, 0xb3, 0x63, 0xf8, 0x20, + 0xbb, 0x08, 0xdc, 0x2c, 0x17, 0xa1, 0x3a, 0x9c, 0x8c, 0x33, 0xc9, 0xd0, 0x46, 0xcc, 0x02, 0x85, + 0xcc, 0xdd, 0x80, 0x05, 0x4c, 0x41, 0xb7, 0x42, 0xf5, 0xbd, 0x69, 0x05, 0x8c, 0x05, 0x31, 0x75, + 0x55, 0x34, 0xce, 0xcf, 0x5d, 0x19, 0x25, 0x54, 0x48, 0x92, 0x64, 0x75, 0x81, 0xfd, 0x16, 0xf6, + 0x4f, 0x73, 0x11, 0xfa, 0xf4, 0x43, 0x4e, 0x85, 0x44, 0xc7, 0x70, 0x5d, 0x48, 0x4e, 0x49, 0x22, + 0x0c, 0x70, 0xd8, 0x1d, 0xf4, 0x87, 0x0f, 0x9d, 0x56, 0xc1, 0x79, 0xad, 0x2e, 0x46, 0x13, 0x92, + 0x49, 0xca, 0xbd, 0xbd, 0xaf, 0x85, 0xd5, 0xab, 0x53, 0x8b, 0xc2, 0x6a, 0xbb, 0xfc, 0x16, 0xd8, + 0xdb, 0x70, 0xab, 0x26, 0x16, 0x19, 0x4b, 0x05, 0xb5, 0x3f, 0x01, 0xf8, 0xdf, 0x6f, 0x0c, 0xc8, + 0x86, 0xbd, 0x98, 0x8c, 0x69, 0x5c, 0x49, 0x81, 0xc1, 0xa6, 0x07, 0x17, 0x85, 0xd5, 0x64, 0xfc, + 0xe6, 0x17, 0x8d, 0xe0, 0x3a, 0x4d, 0x25, 0x8f, 0xa8, 0x30, 0x3a, 0xca, 0xcf, 0xfe, 0xd2, 0xcf, + 0xcb, 0x54, 0xf2, 0xab, 0xd6, 0xce, 0xff, 0x37, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, + 0xe8, 0x11, 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x7b, 0x08, 0x06, 0xba, 0xb7, 0xb6, 0x28, 0x2c, 0xf0, + 0xcc, 0x57, 0x29, 0xfb, 0x05, 0xdc, 0x39, 0xa9, 0x74, 0x4e, 0x49, 0xc4, 0x5b, 0x57, 0x08, 0xea, + 0x29, 0x49, 0x68, 0xed, 0xc9, 0x57, 0x18, 0xed, 0xc2, 0xb5, 0x4b, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, + 0x64, 0x1d, 0xd8, 0x5f, 0x3a, 0x70, 0x6b, 0xd5, 0x03, 0x3a, 0x86, 0x9b, 0xbf, 0xc6, 0xab, 0xfa, + 0xfb, 0x43, 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xdb, 0x8d, 0xe5, + 0x8e, 0x14, 0xd7, 0xdf, 0x2c, 0xe0, 0x2f, 0x9b, 0xd1, 0x01, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, + 0x8d, 0x45, 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, + 0xf2, 0x8a, 0x4a, 0x32, 0x21, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x39, 0x9f, 0xfb, 0x7f, 0xcd, + 0x7b, 0xd2, 0x08, 0x1e, 0xfc, 0xd9, 0xfd, 0x94, 0x25, 0x91, 0xa4, 0x49, 0x26, 0xaf, 0xfc, 0xbf, + 0x70, 0xa3, 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x62, 0xe8, 0xff, 0x54, 0x31, 0x1a, 0x95, 0x9d, + 0xba, 0x63, 0x85, 0xb9, 0xe1, 0x18, 0x8e, 0x60, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, + 0x08, 0xed, 0x2d, 0xf9, 0x56, 0xb6, 0xd1, 0xdc, 0xbf, 0x9f, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0x4d, + 0x67, 0x58, 0xbb, 0x9d, 0x61, 0xed, 0x6e, 0x86, 0xc1, 0xc7, 0x12, 0x83, 0xcf, 0x25, 0x06, 0x37, + 0x25, 0x06, 0xd3, 0x12, 0x83, 0xef, 0x25, 0x06, 0x3f, 0x4a, 0xac, 0xdd, 0x95, 0x18, 0x5c, 0xcf, + 0xb1, 0x36, 0x9d, 0x63, 0xed, 0x76, 0x8e, 0xb5, 0xf7, 0x8f, 0x83, 0x48, 0x86, 0xf9, 0xd8, 0x39, + 0x63, 0x89, 0x1b, 0x70, 0x72, 0x4e, 0x52, 0xe2, 0xc6, 0xec, 0x22, 0x72, 0x2f, 0x8f, 0xdc, 0xf6, + 0xeb, 0x1a, 0xf7, 0x94, 0xe0, 0xd1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xe6, 0x29, 0x58, + 0x70, 0x03, 0x00, 0x00, } func (this *PushRequest) Equal(that interface{}) bool { diff --git a/pkg/push/push.proto b/pkg/push/push.proto index 3bf8ad06a8a83..e538c66903eae 100644 --- a/pkg/push/push.proto +++ b/pkg/push/push.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; -option go_package = "github.com/grafana/loki/pkg/push"; +option go_package = "github.com/grafana/loki/v3/pkg/push"; service Pusher { rpc Push(PushRequest) returns (PushResponse) {} diff --git a/pkg/querier/astmapper/parallel.go b/pkg/querier/astmapper/parallel.go index 4ae5a5b5c7fbd..e935f14204521 100644 --- a/pkg/querier/astmapper/parallel.go +++ b/pkg/querier/astmapper/parallel.go @@ -6,7 +6,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/promql/parser" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var summableAggregates = map[parser.ItemType]struct{}{ diff --git a/pkg/querier/astmapper/shard_summer.go b/pkg/querier/astmapper/shard_summer.go index 8226f35804eef..12f7cf616f160 100644 --- a/pkg/querier/astmapper/shard_summer.go +++ b/pkg/querier/astmapper/shard_summer.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const ( diff --git a/pkg/querier/handler.go b/pkg/querier/handler.go index f5415344ab0c1..0f3feacc0087b 100644 --- a/pkg/querier/handler.go +++ b/pkg/querier/handler.go @@ -7,10 +7,10 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) type Handler struct { diff --git a/pkg/querier/http.go b/pkg/querier/http.go index 348de10d0e16d..e85fa2045ae9e 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -17,22 +17,22 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/loghttp" - loghttp_legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/httpreq" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" - serverutil "github.com/grafana/loki/pkg/util/server" - "github.com/grafana/loki/pkg/util/spanlogger" - util_validation "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + loghttp_legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" + serverutil "github.com/grafana/loki/v3/pkg/util/server" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/querier/http_test.go b/pkg/querier/http_test.go index 180e82c6b07d4..a97e55f882bab 100644 --- a/pkg/querier/http_test.go +++ b/pkg/querier/http_test.go @@ -11,9 +11,9 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/mock" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/validation" "github.com/go-kit/log" "github.com/grafana/dskit/user" diff --git a/pkg/querier/ingester_querier.go b/pkg/querier/ingester_querier.go index fb57a415ba7f6..386bcfb4be788 100644 --- a/pkg/querier/ingester_querier.go +++ b/pkg/querier/ingester_querier.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/gogo/status" "github.com/grafana/dskit/httpgrpc" @@ -18,15 +18,15 @@ import ( "github.com/prometheus/prometheus/model/labels" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type responseFromIngesters struct { diff --git a/pkg/querier/ingester_querier_test.go b/pkg/querier/ingester_querier_test.go index d5f4d872c5084..d2cb00d82ec59 100644 --- a/pkg/querier/ingester_querier_test.go +++ b/pkg/querier/ingester_querier_test.go @@ -19,9 +19,9 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestIngesterQuerier_earlyExitOnQuorum(t *testing.T) { diff --git a/pkg/querier/limits/definitions.go b/pkg/querier/limits/definitions.go index cda30b116976d..dec518a7fc7da 100644 --- a/pkg/querier/limits/definitions.go +++ b/pkg/querier/limits/definitions.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql" ) type TimeRangeLimits interface { diff --git a/pkg/querier/multi_tenant_querier.go b/pkg/querier/multi_tenant_querier.go index 0643caeb7b315..76c387c2f64bb 100644 --- a/pkg/querier/multi_tenant_querier.go +++ b/pkg/querier/multi_tenant_querier.go @@ -4,8 +4,8 @@ import ( "context" "fmt" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/go-kit/log" "github.com/grafana/dskit/user" @@ -13,12 +13,12 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) const ( diff --git a/pkg/querier/multi_tenant_querier_test.go b/pkg/querier/multi_tenant_querier_test.go index 0d17bcc9adffa..38f190562ea1a 100644 --- a/pkg/querier/multi_tenant_querier_test.go +++ b/pkg/querier/multi_tenant_querier_test.go @@ -15,11 +15,11 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" ) func TestMultiTenantQuerier_SelectLogs(t *testing.T) { diff --git a/pkg/querier/plan/plan.go b/pkg/querier/plan/plan.go index d6548537a394c..ea872ac11cc58 100644 --- a/pkg/querier/plan/plan.go +++ b/pkg/querier/plan/plan.go @@ -3,8 +3,8 @@ package plan import ( "bytes" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/util" ) type QueryPlan struct { diff --git a/pkg/querier/plan/plan_test.go b/pkg/querier/plan/plan_test.go index 60f7d3fad1806..5998448cfa56a 100644 --- a/pkg/querier/plan/plan_test.go +++ b/pkg/querier/plan/plan_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func TestMarshalTo(t *testing.T) { diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index 9527980e9d5f0..11a153e5a61a2 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -9,9 +9,9 @@ import ( "github.com/go-kit/log" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" "github.com/go-kit/log/level" "github.com/grafana/dskit/httpgrpc" @@ -23,19 +23,19 @@ import ( "golang.org/x/sync/errgroup" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - querier_limits "github.com/grafana/loki/pkg/querier/limits" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - listutil "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/spanlogger" - util_validation "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + querier_limits "github.com/grafana/loki/v3/pkg/querier/limits" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + listutil "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/querier/querier_mock_test.go b/pkg/querier/querier_mock_test.go index 7be2c0cefed0d..83b1b6e6a8a4e 100644 --- a/pkg/querier/querier_mock_test.go +++ b/pkg/querier/querier_mock_test.go @@ -7,9 +7,9 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" "github.com/grafana/dskit/grpcclient" "github.com/grafana/dskit/ring" @@ -21,19 +21,19 @@ import ( "google.golang.org/grpc/health/grpc_health_v1" grpc_metadata "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/validation" ) // querierClientMock is a mockable version of QuerierClient, used in querier diff --git a/pkg/querier/querier_test.go b/pkg/querier/querier_test.go index e9c36f7ae91e8..3848fc1746fc0 100644 --- a/pkg/querier/querier_test.go +++ b/pkg/querier/querier_test.go @@ -19,15 +19,15 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/querier/queryrange/benchmarkutils_test.go b/pkg/querier/queryrange/benchmarkutils_test.go index 80552f552d20a..afe5f2866c047 100644 --- a/pkg/querier/queryrange/benchmarkutils_test.go +++ b/pkg/querier/queryrange/benchmarkutils_test.go @@ -3,7 +3,7 @@ package queryrange import ( "sort" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type entry struct { diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index 6bcfb03b33364..c657730b2f483 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -14,8 +14,8 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/grafana/dskit/httpgrpc" "github.com/grafana/dskit/user" @@ -25,20 +25,20 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - indexStats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/marshal" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" - "github.com/grafana/loki/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + indexStats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/marshal" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" + "github.com/grafana/loki/v3/pkg/util/querylimits" ) var DefaultCodec = &Codec{} diff --git a/pkg/querier/queryrange/codec_test.go b/pkg/querier/queryrange/codec_test.go index cdc95865e12c7..35a101cb590aa 100644 --- a/pkg/querier/queryrange/codec_test.go +++ b/pkg/querier/queryrange/codec_test.go @@ -24,16 +24,16 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func init() { diff --git a/pkg/querier/queryrange/downstreamer.go b/pkg/querier/queryrange/downstreamer.go index 4db8034291f64..3d1485d5a77e0 100644 --- a/pkg/querier/queryrange/downstreamer.go +++ b/pkg/querier/queryrange/downstreamer.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/querier/queryrange/downstreamer_test.go b/pkg/querier/queryrange/downstreamer_test.go index 95b79d72d30af..0dddddab9b004 100644 --- a/pkg/querier/queryrange/downstreamer_test.go +++ b/pkg/querier/queryrange/downstreamer_test.go @@ -17,13 +17,13 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func testSampleStreams() []queryrangebase.SampleStream { diff --git a/pkg/querier/queryrange/extensions.go b/pkg/querier/queryrange/extensions.go index 40e5321e9db66..6e377295283f5 100644 --- a/pkg/querier/queryrange/extensions.go +++ b/pkg/querier/queryrange/extensions.go @@ -5,8 +5,8 @@ import ( "github.com/grafana/jsonparser" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // To satisfy queryrange.Response interface(https://github.com/cortexproject/cortex/blob/21bad57b346c730d684d6d0205efef133422ab28/pkg/querier/queryrange/query_range.go#L88) diff --git a/pkg/querier/queryrange/extensions_test.go b/pkg/querier/queryrange/extensions_test.go index a7354d57eb054..727931bed4586 100644 --- a/pkg/querier/queryrange/extensions_test.go +++ b/pkg/querier/queryrange/extensions_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_setHeader(t *testing.T) { diff --git a/pkg/querier/queryrange/index_stats_cache.go b/pkg/querier/queryrange/index_stats_cache.go index a91721bf36873..b536fe7963efd 100644 --- a/pkg/querier/queryrange/index_stats_cache.go +++ b/pkg/querier/queryrange/index_stats_cache.go @@ -11,12 +11,12 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type IndexStatsSplitter struct { diff --git a/pkg/querier/queryrange/index_stats_cache_test.go b/pkg/querier/queryrange/index_stats_cache_test.go index 1127b88576e11..4d0f4124788a4 100644 --- a/pkg/querier/queryrange/index_stats_cache_test.go +++ b/pkg/querier/queryrange/index_stats_cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestIndexStatsCache(t *testing.T) { diff --git a/pkg/querier/queryrange/ingester_query_window.go b/pkg/querier/queryrange/ingester_query_window.go index 7a161f40c0072..d2bae2233c38e 100644 --- a/pkg/querier/queryrange/ingester_query_window.go +++ b/pkg/querier/queryrange/ingester_query_window.go @@ -3,8 +3,8 @@ package queryrange import ( "time" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) // SplitIntervalForTimeRange returns the correct split interval to use. It accounts for the given upperBound value being diff --git a/pkg/querier/queryrange/instant_metric_cache.go b/pkg/querier/queryrange/instant_metric_cache.go index 6f505ebf8016f..37f97a3d032b4 100644 --- a/pkg/querier/queryrange/instant_metric_cache.go +++ b/pkg/querier/queryrange/instant_metric_cache.go @@ -8,9 +8,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) type InstantMetricSplitter struct { diff --git a/pkg/querier/queryrange/instrument.go b/pkg/querier/queryrange/instrument.go index 497cfb2dd8a1a..a2c3be1a733c6 100644 --- a/pkg/querier/queryrange/instrument.go +++ b/pkg/querier/queryrange/instrument.go @@ -12,7 +12,7 @@ import ( "github.com/grafana/dskit/server" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) const ( diff --git a/pkg/querier/queryrange/labels_cache.go b/pkg/querier/queryrange/labels_cache.go index 3a940e34fa034..5979a0c2f91e1 100644 --- a/pkg/querier/queryrange/labels_cache.go +++ b/pkg/querier/queryrange/labels_cache.go @@ -8,10 +8,10 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/validation" ) type cacheKeyLabels struct { diff --git a/pkg/querier/queryrange/labels_cache_test.go b/pkg/querier/queryrange/labels_cache_test.go index 90b85cb1faf82..22e967a113762 100644 --- a/pkg/querier/queryrange/labels_cache_test.go +++ b/pkg/querier/queryrange/labels_cache_test.go @@ -11,12 +11,12 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) func TestCacheKeyLabels_GenerateCacheKey(t *testing.T) { diff --git a/pkg/querier/queryrange/limits.go b/pkg/querier/queryrange/limits.go index ab7818460738f..e1b11fde3449d 100644 --- a/pkg/querier/queryrange/limits.go +++ b/pkg/querier/queryrange/limits.go @@ -22,18 +22,18 @@ import ( "github.com/prometheus/prometheus/model/timestamp" "golang.org/x/sync/semaphore" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - queryrange_limits "github.com/grafana/loki/pkg/querier/queryrange/limits" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + queryrange_limits "github.com/grafana/loki/v3/pkg/querier/queryrange/limits" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/querier/queryrange/limits/definitions.go b/pkg/querier/queryrange/limits/definitions.go index f2e55728a594e..be366fdc10a44 100644 --- a/pkg/querier/queryrange/limits/definitions.go +++ b/pkg/querier/queryrange/limits/definitions.go @@ -4,8 +4,8 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // Limits extends the cortex limits interface with support for per tenant splitby parameters diff --git a/pkg/querier/queryrange/limits_test.go b/pkg/querier/queryrange/limits_test.go index 07bf4d1f30a74..7591d5d170f76 100644 --- a/pkg/querier/queryrange/limits_test.go +++ b/pkg/querier/queryrange/limits_test.go @@ -17,16 +17,16 @@ import ( "go.uber.org/atomic" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/math" ) func TestLimits(t *testing.T) { diff --git a/pkg/querier/queryrange/log_result_cache.go b/pkg/querier/queryrange/log_result_cache.go index fd26b67412a6b..4a74b71d8d760 100644 --- a/pkg/querier/queryrange/log_result_cache.go +++ b/pkg/querier/queryrange/log_result_cache.go @@ -17,13 +17,13 @@ import ( "github.com/prometheus/common/model" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/validation" ) // LogResultCacheMetrics is the metrics wrapper used in log result cache. diff --git a/pkg/querier/queryrange/log_result_cache_test.go b/pkg/querier/queryrange/log_result_cache_test.go index 5da4aee7c4be3..608820d08f8a6 100644 --- a/pkg/querier/queryrange/log_result_cache_test.go +++ b/pkg/querier/queryrange/log_result_cache_test.go @@ -13,11 +13,11 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) const ( diff --git a/pkg/querier/queryrange/marshal.go b/pkg/querier/queryrange/marshal.go index 3640012f88a29..a47b51607c26a 100644 --- a/pkg/querier/queryrange/marshal.go +++ b/pkg/querier/queryrange/marshal.go @@ -17,17 +17,17 @@ import ( "github.com/prometheus/prometheus/promql" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/sketch" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/querylimits" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/sketch" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/querylimits" + "github.com/grafana/loki/v3/pkg/util/server" ) const ( diff --git a/pkg/querier/queryrange/marshal_test.go b/pkg/querier/queryrange/marshal_test.go index 6fa9bbe23897c..43d02f1071b6f 100644 --- a/pkg/querier/queryrange/marshal_test.go +++ b/pkg/querier/queryrange/marshal_test.go @@ -6,12 +6,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestResultToResponse(t *testing.T) { diff --git a/pkg/querier/queryrange/metrics.go b/pkg/querier/queryrange/metrics.go index 9482becf98817..bd9ce6fa79bac 100644 --- a/pkg/querier/queryrange/metrics.go +++ b/pkg/querier/queryrange/metrics.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) type Metrics struct { diff --git a/pkg/querier/queryrange/ordering.go b/pkg/querier/queryrange/ordering.go index 761ec9cc3fafe..7eb61a604ef03 100644 --- a/pkg/querier/queryrange/ordering.go +++ b/pkg/querier/queryrange/ordering.go @@ -3,7 +3,7 @@ package queryrange import ( "sort" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) /* diff --git a/pkg/querier/queryrange/prometheus.go b/pkg/querier/queryrange/prometheus.go index 2a8ff78c164e5..22ee8b3c1d4dd 100644 --- a/pkg/querier/queryrange/prometheus.go +++ b/pkg/querier/queryrange/prometheus.go @@ -11,10 +11,10 @@ import ( otlog "github.com/opentracing/opentracing-go/log" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) var ( diff --git a/pkg/querier/queryrange/prometheus_test.go b/pkg/querier/queryrange/prometheus_test.go index a5bea28684251..6e3a4ac3e4d4e 100644 --- a/pkg/querier/queryrange/prometheus_test.go +++ b/pkg/querier/queryrange/prometheus_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) var emptyStats = `"stats": { diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index 2312afbd71e7c..a78b9efbaee74 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -12,15 +12,15 @@ import ( github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - logproto "github.com/grafana/loki/pkg/logproto" - stats "github.com/grafana/loki/pkg/logqlmodel/stats" - _ "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" - github_com_grafana_loki_pkg_querier_plan "github.com/grafana/loki/pkg/querier/plan" - queryrangebase "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - _ "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + logproto "github.com/grafana/loki/v3/pkg/logproto" + stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + _ "github.com/grafana/loki/v3/pkg/push" + github_com_grafana_loki_v3_pkg_push "github.com/grafana/loki/v3/pkg/push" + github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" + queryrangebase "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + _ "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" io "io" math "math" math_bits "math/bits" @@ -42,16 +42,16 @@ var _ = time.Kitchen const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type LokiRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - Step int64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` - Interval int64 `protobuf:"varint,9,opt,name=interval,proto3" json:"interval,omitempty"` - StartTs time.Time `protobuf:"bytes,4,opt,name=startTs,proto3,stdtime" json:"startTs"` - EndTs time.Time `protobuf:"bytes,5,opt,name=endTs,proto3,stdtime" json:"endTs"` - Direction logproto.Direction `protobuf:"varint,6,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Path string `protobuf:"bytes,7,opt,name=path,proto3" json:"path,omitempty"` - Shards []string `protobuf:"bytes,8,rep,name=shards,proto3" json:"shards"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,10,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + Step int64 `protobuf:"varint,3,opt,name=step,proto3" json:"step,omitempty"` + Interval int64 `protobuf:"varint,9,opt,name=interval,proto3" json:"interval,omitempty"` + StartTs time.Time `protobuf:"bytes,4,opt,name=startTs,proto3,stdtime" json:"startTs"` + EndTs time.Time `protobuf:"bytes,5,opt,name=endTs,proto3,stdtime" json:"endTs"` + Direction logproto.Direction `protobuf:"varint,6,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Path string `protobuf:"bytes,7,opt,name=path,proto3" json:"path,omitempty"` + Shards []string `protobuf:"bytes,8,rep,name=shards,proto3" json:"shards"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,10,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *LokiRequest) Reset() { *m = LokiRequest{} } @@ -150,13 +150,13 @@ func (m *LokiRequest) GetShards() []string { } type LokiInstantRequest struct { - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` - Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` - TimeTs time.Time `protobuf:"bytes,3,opt,name=timeTs,proto3,stdtime" json:"timeTs"` - Direction logproto.Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` - Shards []string `protobuf:"bytes,6,rep,name=shards,proto3" json:"shards"` - Plan *github_com_grafana_loki_pkg_querier_plan.QueryPlan `protobuf:"bytes,7,opt,name=plan,proto3,customtype=github.com/grafana/loki/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + TimeTs time.Time `protobuf:"bytes,3,opt,name=timeTs,proto3,stdtime" json:"timeTs"` + Direction logproto.Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` + Shards []string `protobuf:"bytes,6,rep,name=shards,proto3" json:"shards"` + Plan *github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan `protobuf:"bytes,7,opt,name=plan,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan" json:"plan,omitempty"` } func (m *LokiInstantRequest) Reset() { *m = LokiInstantRequest{} } @@ -277,15 +277,15 @@ func (m *Plan) GetRaw() []byte { } type LokiResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data LokiData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` - ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` - Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` - Direction logproto.Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` - Limit uint32 `protobuf:"varint,6,opt,name=limit,proto3" json:"limit,omitempty"` - Version uint32 `protobuf:"varint,7,opt,name=version,proto3" json:"version,omitempty"` - Statistics stats.Result `protobuf:"bytes,8,opt,name=statistics,proto3" json:"statistics"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,9,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data LokiData `protobuf:"bytes,2,opt,name=Data,proto3" json:"data,omitempty"` + ErrorType string `protobuf:"bytes,3,opt,name=ErrorType,proto3" json:"errorType,omitempty"` + Error string `protobuf:"bytes,4,opt,name=Error,proto3" json:"error,omitempty"` + Direction logproto.Direction `protobuf:"varint,5,opt,name=direction,proto3,enum=logproto.Direction" json:"direction,omitempty"` + Limit uint32 `protobuf:"varint,6,opt,name=limit,proto3" json:"limit,omitempty"` + Version uint32 `protobuf:"varint,7,opt,name=version,proto3" json:"version,omitempty"` + Statistics stats.Result `protobuf:"bytes,8,opt,name=statistics,proto3" json:"statistics"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,9,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *LokiResponse) Reset() { *m = LokiResponse{} } @@ -452,11 +452,11 @@ func (m *LokiSeriesRequest) GetShards() []string { } type LokiSeriesResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data []logproto.SeriesIdentifier `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` - Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` - Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []logproto.SeriesIdentifier `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` + Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` } func (m *LokiSeriesResponse) Reset() { *m = LokiSeriesResponse{} } @@ -520,11 +520,11 @@ func (m *LokiSeriesResponse) GetStatistics() stats.Result { } type LokiLabelNamesResponse struct { - Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` - Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` - Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` - Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` + Status string `protobuf:"bytes,1,opt,name=Status,proto3" json:"status"` + Data []string `protobuf:"bytes,2,rep,name=Data,proto3" json:"data,omitempty"` + Version uint32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,4,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Statistics stats.Result `protobuf:"bytes,5,opt,name=statistics,proto3" json:"statistics"` } func (m *LokiLabelNamesResponse) Reset() { *m = LokiLabelNamesResponse{} } @@ -588,8 +588,8 @@ func (m *LokiLabelNamesResponse) GetStatistics() stats.Result { } type LokiData struct { - ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` - Result []github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,2,rep,name=Result,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"result"` + ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` + Result []github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,2,rep,name=Result,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"result"` } func (m *LokiData) Reset() { *m = LokiData{} } @@ -684,8 +684,8 @@ func (m *LokiPromResponse) GetStatistics() stats.Result { } type IndexStatsResponse struct { - Response *github_com_grafana_loki_pkg_logproto.IndexStatsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.IndexStatsResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.IndexStatsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.IndexStatsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *IndexStatsResponse) Reset() { *m = IndexStatsResponse{} } @@ -721,8 +721,8 @@ func (m *IndexStatsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_IndexStatsResponse proto.InternalMessageInfo type VolumeResponse struct { - Response *github_com_grafana_loki_pkg_logproto.VolumeResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.VolumeResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.VolumeResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.VolumeResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *VolumeResponse) Reset() { *m = VolumeResponse{} } @@ -758,8 +758,8 @@ func (m *VolumeResponse) XXX_DiscardUnknown() { var xxx_messageInfo_VolumeResponse proto.InternalMessageInfo type TopKSketchesResponse struct { - Response *github_com_grafana_loki_pkg_logproto.TopKMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.TopKMatrix" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.TopKMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.TopKMatrix" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *TopKSketchesResponse) Reset() { *m = TopKSketchesResponse{} } @@ -795,8 +795,8 @@ func (m *TopKSketchesResponse) XXX_DiscardUnknown() { var xxx_messageInfo_TopKSketchesResponse proto.InternalMessageInfo type QuantileSketchResponse struct { - Response *github_com_grafana_loki_pkg_logproto.QuantileSketchMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.QuantileSketchMatrix" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.QuantileSketchMatrix `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.QuantileSketchMatrix" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *QuantileSketchResponse) Reset() { *m = QuantileSketchResponse{} } @@ -832,8 +832,8 @@ func (m *QuantileSketchResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QuantileSketchResponse proto.InternalMessageInfo type ShardsResponse struct { - Response *github_com_grafana_loki_pkg_logproto.ShardsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.ShardsResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.ShardsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.ShardsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *ShardsResponse) Reset() { *m = ShardsResponse{} } @@ -869,8 +869,8 @@ func (m *ShardsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ShardsResponse proto.InternalMessageInfo type DetectedFieldsResponse struct { - Response *github_com_grafana_loki_pkg_logproto.DetectedFieldsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.DetectedFieldsResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.DetectedFieldsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.DetectedFieldsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *DetectedFieldsResponse) Reset() { *m = DetectedFieldsResponse{} } @@ -906,8 +906,8 @@ func (m *DetectedFieldsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_DetectedFieldsResponse proto.InternalMessageInfo type DetectedLabelsResponse struct { - Response *github_com_grafana_loki_pkg_logproto.DetectedLabelsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/pkg/logproto.DetectedLabelsResponse" json:"response,omitempty"` - Headers []github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` + Response *github_com_grafana_loki_v3_pkg_logproto.DetectedLabelsResponse `protobuf:"bytes,1,opt,name=response,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.DetectedLabelsResponse" json:"response,omitempty"` + Headers []github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader `protobuf:"bytes,2,rep,name=Headers,proto3,customtype=github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" json:"-"` } func (m *DetectedLabelsResponse) Reset() { *m = DetectedLabelsResponse{} } @@ -1363,114 +1363,115 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1712 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4d, 0x6f, 0xdb, 0x46, - 0x1a, 0x16, 0xf5, 0x69, 0x8d, 0x3f, 0xd6, 0x3b, 0x36, 0x1c, 0xae, 0x93, 0x88, 0x82, 0x80, 0x4d, - 0xbc, 0x8b, 0x5d, 0x6a, 0x63, 0x67, 0xf3, 0xbd, 0xdb, 0x86, 0x75, 0x02, 0x19, 0x4d, 0x8a, 0x84, - 0x36, 0x7a, 0x28, 0x7a, 0x19, 0x4b, 0x63, 0x99, 0x35, 0x25, 0xd2, 0x9c, 0x91, 0x13, 0x1f, 0x0a, - 0xf4, 0x07, 0xb4, 0x40, 0x80, 0xfe, 0x87, 0xa2, 0x40, 0x83, 0x9c, 0x7a, 0xea, 0xad, 0x3d, 0xb4, - 0x39, 0xe6, 0x18, 0x08, 0xa8, 0xda, 0x38, 0x97, 0xc2, 0xa7, 0xfc, 0x84, 0x62, 0x3e, 0x48, 0x0d, - 0x45, 0x39, 0x91, 0x52, 0x14, 0x88, 0x8b, 0x5e, 0xa4, 0xf9, 0x78, 0x9f, 0xe1, 0xf0, 0x79, 0x9f, - 0xf7, 0xe5, 0x3b, 0x03, 0xce, 0xfa, 0x3b, 0xcd, 0xea, 0x6e, 0x07, 0x07, 0x0e, 0x0e, 0xf8, 0xff, - 0x7e, 0x80, 0xda, 0x4d, 0xac, 0x34, 0x4d, 0x3f, 0xf0, 0xa8, 0x07, 0x41, 0x7f, 0x64, 0x71, 0xb9, - 0xe9, 0xd0, 0xed, 0xce, 0xa6, 0x59, 0xf7, 0x5a, 0xd5, 0xa6, 0xd7, 0xf4, 0xaa, 0x4d, 0xcf, 0x6b, - 0xba, 0x18, 0xf9, 0x0e, 0x91, 0xcd, 0x6a, 0xe0, 0xd7, 0xab, 0x84, 0x22, 0xda, 0x21, 0x02, 0xbf, - 0x38, 0xcf, 0x0c, 0x79, 0x93, 0x43, 0xe4, 0xa8, 0x21, 0xcd, 0x79, 0x6f, 0xb3, 0xb3, 0x55, 0xa5, - 0x4e, 0x0b, 0x13, 0x8a, 0x5a, 0x7e, 0x68, 0xc0, 0xf6, 0xe7, 0x7a, 0x4d, 0x81, 0x74, 0xda, 0x0d, - 0x7c, 0xbf, 0x89, 0x28, 0xbe, 0x87, 0xf6, 0xa5, 0xc1, 0xc9, 0x98, 0x41, 0xd8, 0x90, 0x93, 0x7f, - 0x8b, 0x4d, 0x92, 0x1d, 0x4c, 0xeb, 0xdb, 0x72, 0xaa, 0x2c, 0xa7, 0x76, 0xdd, 0x96, 0xd7, 0xc0, - 0x2e, 0xdf, 0x2c, 0x11, 0xbf, 0xd2, 0x62, 0x8e, 0x59, 0xf8, 0x1d, 0xb2, 0xcd, 0x7f, 0xe4, 0xe0, - 0x3b, 0xaf, 0xe4, 0x6b, 0x13, 0x11, 0x5c, 0x6d, 0xe0, 0x2d, 0xa7, 0xed, 0x50, 0xc7, 0x6b, 0x13, - 0xb5, 0x2d, 0x17, 0xb9, 0x30, 0xda, 0x22, 0x83, 0x3e, 0xa8, 0x3c, 0xca, 0x80, 0xc9, 0x5b, 0xde, - 0x8e, 0x63, 0xe3, 0xdd, 0x0e, 0x26, 0x14, 0xce, 0x83, 0x1c, 0xb7, 0xd1, 0xb5, 0xb2, 0xb6, 0x54, - 0xb4, 0x45, 0x87, 0x8d, 0xba, 0x4e, 0xcb, 0xa1, 0x7a, 0xba, 0xac, 0x2d, 0x4d, 0xdb, 0xa2, 0x03, - 0x21, 0xc8, 0x12, 0x8a, 0x7d, 0x3d, 0x53, 0xd6, 0x96, 0x32, 0x36, 0x6f, 0xc3, 0x45, 0x30, 0xe1, - 0xb4, 0x29, 0x0e, 0xf6, 0x90, 0xab, 0x17, 0xf9, 0x78, 0xd4, 0x87, 0xff, 0x07, 0x05, 0x42, 0x51, - 0x40, 0x37, 0x88, 0x9e, 0x2d, 0x6b, 0x4b, 0x93, 0xcb, 0x8b, 0xa6, 0xf0, 0x95, 0x19, 0xfa, 0xca, - 0xdc, 0x08, 0x7d, 0x65, 0x4d, 0x3c, 0xee, 0x19, 0xa9, 0x07, 0x3f, 0x19, 0x9a, 0x1d, 0x82, 0xe0, - 0x15, 0x90, 0xc3, 0xed, 0xc6, 0x06, 0xd1, 0x73, 0x63, 0xa0, 0x05, 0x04, 0x9e, 0x03, 0xc5, 0x86, - 0x13, 0xe0, 0x3a, 0xe3, 0x4c, 0xcf, 0x97, 0xb5, 0xa5, 0x99, 0xe5, 0x39, 0x33, 0x72, 0xed, 0x6a, - 0x38, 0x65, 0xf7, 0xad, 0xd8, 0xeb, 0xf9, 0x88, 0x6e, 0xeb, 0x05, 0xce, 0x04, 0x6f, 0xc3, 0x0a, - 0xc8, 0x93, 0x6d, 0x14, 0x34, 0x88, 0x3e, 0x51, 0xce, 0x2c, 0x15, 0x2d, 0x70, 0xd8, 0x33, 0xe4, - 0x88, 0x2d, 0xff, 0xe1, 0x87, 0x20, 0xeb, 0xbb, 0xa8, 0xad, 0x03, 0xbe, 0xcb, 0x59, 0x53, 0xe1, - 0xfc, 0x8e, 0x8b, 0xda, 0xd6, 0x85, 0x6e, 0xcf, 0x88, 0xc9, 0x3d, 0x40, 0x5b, 0xa8, 0x8d, 0xaa, - 0xae, 0xb7, 0xe3, 0x54, 0x55, 0x37, 0xb2, 0x55, 0xcc, 0xbb, 0x0c, 0xcd, 0x70, 0x36, 0x5f, 0xb5, - 0xf2, 0x43, 0x1a, 0x40, 0xe6, 0xb0, 0xb5, 0x36, 0xa1, 0xa8, 0x4d, 0x5f, 0xc7, 0x6f, 0xd7, 0x40, - 0x9e, 0xc5, 0xc4, 0x06, 0xe1, 0x9e, 0x1b, 0x95, 0x48, 0x89, 0x89, 0x33, 0x99, 0x1d, 0x8b, 0xc9, - 0xdc, 0x50, 0x26, 0xf3, 0xaf, 0x64, 0xb2, 0xf0, 0xbb, 0x30, 0xa9, 0x83, 0x2c, 0xeb, 0xc1, 0x59, - 0x90, 0x09, 0xd0, 0x3d, 0x4e, 0xdc, 0x94, 0xcd, 0x9a, 0x95, 0xaf, 0xb2, 0x60, 0x4a, 0x04, 0x05, - 0xf1, 0xbd, 0x36, 0xc1, 0x6c, 0xb3, 0xeb, 0x3c, 0xf3, 0x08, 0x7a, 0xe5, 0x66, 0xf9, 0x88, 0x2d, - 0x67, 0xe0, 0xdb, 0x20, 0xbb, 0x8a, 0x28, 0xe2, 0x54, 0x4f, 0x2e, 0xcf, 0xab, 0x9b, 0x65, 0x6b, - 0xb1, 0x39, 0x6b, 0x81, 0xb1, 0x79, 0xd8, 0x33, 0x66, 0x1a, 0x88, 0xa2, 0x7f, 0x79, 0x2d, 0x87, - 0xe2, 0x96, 0x4f, 0xf7, 0x6d, 0x8e, 0x84, 0xff, 0x05, 0xc5, 0x1b, 0x41, 0xe0, 0x05, 0x1b, 0xfb, - 0x3e, 0xe6, 0xae, 0x29, 0x5a, 0x27, 0x0e, 0x7b, 0xc6, 0x1c, 0x0e, 0x07, 0x15, 0x44, 0xdf, 0x12, - 0xfe, 0x03, 0xe4, 0x78, 0x87, 0x3b, 0xa3, 0x68, 0xcd, 0x1d, 0xf6, 0x8c, 0xbf, 0x70, 0x88, 0x62, - 0x2e, 0x2c, 0xe2, 0xbe, 0xcb, 0x8d, 0xe4, 0xbb, 0x48, 0x42, 0x79, 0x55, 0x42, 0x3a, 0x28, 0xec, - 0xe1, 0x80, 0xb0, 0x65, 0x0a, 0x7c, 0x3c, 0xec, 0xc2, 0xeb, 0x00, 0x30, 0x62, 0x1c, 0x42, 0x9d, - 0x3a, 0x8b, 0x12, 0x46, 0xc6, 0xb4, 0x29, 0x92, 0xa0, 0x8d, 0x49, 0xc7, 0xa5, 0x16, 0x94, 0x2c, - 0x28, 0x86, 0xb6, 0xd2, 0x86, 0x0f, 0x35, 0x50, 0xa8, 0x61, 0xd4, 0xc0, 0x01, 0xd1, 0x8b, 0xe5, - 0xcc, 0xd2, 0xe4, 0xf2, 0xdf, 0x4d, 0x35, 0xe3, 0xdd, 0x09, 0xbc, 0x16, 0xa6, 0xdb, 0xb8, 0x43, - 0x42, 0x07, 0x09, 0x6b, 0x6b, 0xa7, 0xdb, 0x33, 0x36, 0x47, 0xd1, 0xc3, 0x48, 0x59, 0xf6, 0xc8, - 0xe7, 0x1c, 0xf6, 0x0c, 0xed, 0xdf, 0x76, 0xb8, 0xc5, 0xca, 0x8f, 0x1a, 0xf8, 0x2b, 0xf3, 0xf0, - 0x3a, 0x5b, 0x9b, 0x28, 0x01, 0xd9, 0x42, 0xb4, 0xbe, 0xad, 0x6b, 0x4c, 0xde, 0xb6, 0xe8, 0xa8, - 0x29, 0x30, 0xfd, 0x9b, 0x52, 0x60, 0x66, 0xfc, 0x14, 0x18, 0x46, 0x61, 0x76, 0x68, 0x14, 0xe6, - 0x8e, 0x8a, 0xc2, 0xca, 0xa7, 0x19, 0x91, 0x71, 0xc2, 0xf7, 0x1b, 0x23, 0x26, 0x6e, 0x46, 0x31, - 0x91, 0xe1, 0xbb, 0x8d, 0xa4, 0x26, 0xd6, 0x5a, 0x6b, 0xe0, 0x36, 0x75, 0xb6, 0x1c, 0x1c, 0xbc, - 0x22, 0x32, 0x14, 0xb9, 0x65, 0xe2, 0x72, 0x53, 0xb5, 0x92, 0x7d, 0xe3, 0xb5, 0x32, 0x10, 0x1d, - 0xb9, 0xd7, 0x88, 0x8e, 0xca, 0x8b, 0x34, 0x58, 0x60, 0xee, 0xb8, 0x85, 0x36, 0xb1, 0xfb, 0x1e, - 0x6a, 0x8d, 0xe9, 0x92, 0x33, 0x8a, 0x4b, 0x8a, 0x16, 0xfc, 0x93, 0xf2, 0x11, 0x28, 0xff, 0x42, - 0x03, 0x13, 0x61, 0x0e, 0x87, 0x26, 0x00, 0x02, 0xc6, 0xd3, 0xb4, 0x20, 0x7a, 0x86, 0x81, 0x83, - 0x68, 0xd4, 0x56, 0x2c, 0xe0, 0x47, 0x20, 0x2f, 0x7a, 0x32, 0x0a, 0x4e, 0x28, 0x51, 0x40, 0x03, - 0x8c, 0x5a, 0xd7, 0x1b, 0xc8, 0xa7, 0x38, 0xb0, 0x2e, 0xb3, 0x5d, 0x74, 0x7b, 0xc6, 0xd9, 0x97, - 0x51, 0xc4, 0xeb, 0x46, 0x81, 0x63, 0xce, 0x15, 0xcf, 0xb4, 0xe5, 0x13, 0x2a, 0x9f, 0x69, 0x60, - 0x96, 0x6d, 0x94, 0x51, 0x13, 0xa9, 0x62, 0x15, 0x4c, 0x04, 0xb2, 0xcd, 0xb7, 0x3b, 0xb9, 0x5c, - 0x31, 0xe3, 0xb4, 0x0e, 0xa1, 0xd2, 0xca, 0x3e, 0xee, 0x19, 0x9a, 0x1d, 0x21, 0xe1, 0x4a, 0x8c, - 0xc6, 0xf4, 0x30, 0x1a, 0x19, 0x24, 0x15, 0x23, 0xee, 0x9b, 0x34, 0x80, 0x6b, 0xac, 0xc0, 0x66, - 0xe2, 0xeb, 0xeb, 0xb4, 0x93, 0xd8, 0xd1, 0xa9, 0x3e, 0x29, 0x49, 0x7b, 0xeb, 0x6a, 0xb7, 0x67, - 0x5c, 0x7c, 0x19, 0x2b, 0x2f, 0x01, 0x2b, 0xaf, 0xa0, 0x0a, 0x37, 0xfd, 0xe6, 0x7f, 0x57, 0x1e, - 0xa5, 0xc1, 0xcc, 0xfb, 0x9e, 0xdb, 0x69, 0xe1, 0x88, 0xb8, 0x56, 0x82, 0x38, 0xbd, 0x4f, 0x5c, - 0xdc, 0xd6, 0xba, 0xd8, 0xed, 0x19, 0x2b, 0x23, 0x91, 0x16, 0x07, 0x1e, 0x5f, 0xc2, 0x1e, 0xa6, - 0xc1, 0xfc, 0x86, 0xe7, 0xbf, 0xbb, 0xce, 0x0f, 0x65, 0x4a, 0x5e, 0xc4, 0x09, 0xda, 0xe6, 0xfb, - 0xb4, 0x31, 0xc4, 0x6d, 0x44, 0x03, 0xe7, 0xbe, 0xb5, 0xd2, 0xed, 0x19, 0xd5, 0x91, 0x28, 0xeb, - 0x83, 0x8e, 0x2f, 0x5d, 0xdf, 0xa5, 0xc1, 0xc2, 0xdd, 0x0e, 0x6a, 0x53, 0xc7, 0xc5, 0x82, 0xb2, - 0x88, 0xb0, 0xfd, 0x04, 0x61, 0xa5, 0x3e, 0x61, 0x71, 0x8c, 0xa4, 0xee, 0x7f, 0xdd, 0x9e, 0x71, - 0x79, 0x24, 0xea, 0x86, 0xc1, 0x8f, 0x2f, 0x89, 0x5f, 0xa7, 0xc1, 0xcc, 0xba, 0xa8, 0x97, 0xc2, - 0x37, 0x20, 0x43, 0xc8, 0x53, 0x6f, 0x19, 0xfc, 0x4d, 0x33, 0x8e, 0x18, 0x23, 0x54, 0xe3, 0xc0, - 0xe3, 0x4b, 0xdb, 0xf7, 0x69, 0xb0, 0xb0, 0x8a, 0x29, 0xae, 0x53, 0xdc, 0xb8, 0xe9, 0x60, 0x57, - 0xa1, 0xef, 0xe3, 0x04, 0x7d, 0x65, 0xe5, 0x88, 0x32, 0x14, 0x63, 0xbd, 0xd5, 0xed, 0x19, 0x57, - 0x47, 0x22, 0x70, 0xf8, 0x02, 0x7f, 0x0c, 0x22, 0x79, 0x45, 0x38, 0x26, 0x91, 0x71, 0xcc, 0x6b, - 0x10, 0x19, 0x5f, 0xe0, 0xf8, 0x12, 0xf9, 0x79, 0x1e, 0x4c, 0xf3, 0x1b, 0x82, 0x88, 0xbf, 0x7f, - 0x02, 0x59, 0x3b, 0x4b, 0xf6, 0x60, 0x78, 0xd8, 0x0a, 0xfc, 0xba, 0xb9, 0x2e, 0xab, 0x6a, 0x61, - 0x01, 0x2f, 0x81, 0x3c, 0xe1, 0x47, 0x1a, 0x59, 0x19, 0x95, 0x06, 0x8f, 0xff, 0xf1, 0xc3, 0x53, - 0x2d, 0x65, 0x4b, 0x7b, 0x78, 0x0d, 0xe4, 0x5d, 0x4e, 0xa1, 0x3c, 0xd2, 0x55, 0x06, 0x91, 0xc9, - 0x3a, 0x9f, 0xa1, 0x05, 0x06, 0x5e, 0x00, 0x39, 0x5e, 0x82, 0xc9, 0x0b, 0xb5, 0xd8, 0x63, 0x93, - 0xb5, 0x50, 0x2d, 0x65, 0x0b, 0x73, 0xb8, 0x0c, 0xb2, 0x7e, 0xe0, 0xb5, 0x64, 0x39, 0x7c, 0x6a, - 0xf0, 0x99, 0x6a, 0xfd, 0x58, 0x4b, 0xd9, 0xdc, 0x16, 0x9e, 0x67, 0x67, 0x57, 0x56, 0x78, 0x12, - 0x7e, 0x17, 0xc0, 0x6a, 0x8f, 0x01, 0x98, 0x02, 0x09, 0x4d, 0xe1, 0x79, 0x90, 0xdf, 0xe3, 0xf5, - 0x85, 0xbc, 0xc5, 0x59, 0x54, 0x41, 0xf1, 0xca, 0x83, 0xbd, 0x97, 0xb0, 0x85, 0x37, 0xc1, 0x14, - 0xf5, 0xfc, 0x9d, 0xf0, 0x4b, 0x2e, 0xef, 0x11, 0xca, 0x2a, 0x76, 0xd8, 0x97, 0xbe, 0x96, 0xb2, - 0x63, 0x38, 0x78, 0x07, 0xcc, 0xee, 0xc6, 0xbe, 0x37, 0x98, 0xf0, 0x6b, 0xc9, 0x01, 0x9e, 0x87, - 0x7f, 0x06, 0x6b, 0x29, 0x3b, 0x81, 0x86, 0xab, 0x60, 0x86, 0xc4, 0x92, 0xb0, 0xbc, 0xe7, 0x8b, - 0xbd, 0x57, 0x3c, 0x4d, 0xd7, 0x52, 0xf6, 0x00, 0x06, 0xde, 0x02, 0x33, 0x8d, 0x58, 0x26, 0xd2, - 0x27, 0x93, 0xbb, 0x1a, 0x9e, 0xab, 0xd8, 0x6a, 0x71, 0xac, 0xba, 0x9a, 0x08, 0x47, 0x7d, 0xea, - 0xe8, 0xd5, 0xe2, 0x01, 0xab, 0xae, 0x26, 0x66, 0x2c, 0xd0, 0xcf, 0x1b, 0x95, 0x6f, 0x73, 0x60, - 0x4a, 0x46, 0x85, 0xb8, 0xd6, 0xb8, 0x18, 0x09, 0x5d, 0x04, 0xc5, 0xe9, 0xa3, 0x84, 0xce, 0xcd, - 0x15, 0x9d, 0xff, 0x27, 0xd2, 0xb9, 0x88, 0x90, 0x85, 0x7e, 0x2e, 0xe2, 0xcf, 0x55, 0x10, 0x52, - 0xdb, 0x2b, 0xa1, 0xb6, 0x45, 0x60, 0x9c, 0x1c, 0x7e, 0x44, 0x08, 0x51, 0x52, 0xd8, 0x57, 0x40, - 0xc1, 0x11, 0x37, 0xa3, 0xc3, 0x42, 0x22, 0x79, 0x71, 0xca, 0xa4, 0x2a, 0x01, 0x70, 0xa5, 0x2f, - 0x70, 0x11, 0x17, 0x27, 0x92, 0x02, 0x8f, 0x40, 0xa1, 0xbe, 0xcf, 0x45, 0xfa, 0xce, 0x4b, 0x4c, - 0xa2, 0xae, 0x8e, 0x5e, 0x4c, 0x8a, 0xfb, 0x06, 0x98, 0x0e, 0xe5, 0xc0, 0xa7, 0xa4, 0xba, 0x4f, - 0x1f, 0x55, 0x25, 0x84, 0xf8, 0x38, 0x0a, 0xae, 0x25, 0x34, 0x24, 0x94, 0x6d, 0x1c, 0xfd, 0xb9, - 0x0c, 0x57, 0x1a, 0x14, 0xd0, 0x5a, 0x42, 0x40, 0xe0, 0xa8, 0xa5, 0x42, 0xf9, 0x24, 0x96, 0x12, - 0x13, 0xb0, 0x06, 0x26, 0x5a, 0x98, 0xa2, 0x06, 0xa2, 0x48, 0x2f, 0xf0, 0xb4, 0x7f, 0x26, 0x1e, - 0x69, 0x7d, 0x31, 0x99, 0xb7, 0xa5, 0xe1, 0x8d, 0x36, 0x0d, 0xf6, 0xe5, 0xf1, 0x31, 0x42, 0x2f, - 0x5e, 0x05, 0xd3, 0x31, 0x03, 0x38, 0x0b, 0x32, 0x3b, 0x38, 0xbc, 0xe1, 0x66, 0x4d, 0x38, 0x0f, - 0x72, 0x7b, 0xc8, 0xed, 0x60, 0xae, 0xa9, 0xa2, 0x2d, 0x3a, 0x57, 0xd2, 0x97, 0x34, 0xab, 0x08, - 0x0a, 0x81, 0x78, 0x8a, 0xd5, 0x78, 0xf2, 0xac, 0x94, 0x7a, 0xfa, 0xac, 0x94, 0x7a, 0xf1, 0xac, - 0xa4, 0x7d, 0x72, 0x50, 0xd2, 0xbe, 0x3c, 0x28, 0x69, 0x8f, 0x0f, 0x4a, 0xda, 0x93, 0x83, 0x92, - 0xf6, 0xf3, 0x41, 0x49, 0xfb, 0xe5, 0xa0, 0x94, 0x7a, 0x71, 0x50, 0xd2, 0x1e, 0x3c, 0x2f, 0xa5, - 0x9e, 0x3c, 0x2f, 0xa5, 0x9e, 0x3e, 0x2f, 0xa5, 0x3e, 0x30, 0xc7, 0xfb, 0x02, 0x6d, 0xe6, 0x39, - 0x4d, 0x2b, 0xbf, 0x06, 0x00, 0x00, 0xff, 0xff, 0x7c, 0x1e, 0xb8, 0x33, 0xfe, 0x1a, 0x00, 0x00, + // 1717 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4b, 0x6f, 0xdb, 0xc6, + 0x16, 0x16, 0xf5, 0xb4, 0xc6, 0x8f, 0xeb, 0x3b, 0x36, 0x1c, 0x5e, 0x27, 0x11, 0x05, 0x01, 0x37, + 0xd7, 0x37, 0x68, 0xa5, 0x46, 0x4e, 0xdc, 0xd4, 0x35, 0x82, 0x84, 0x75, 0x02, 0x19, 0x4d, 0x8a, + 0x84, 0x36, 0xba, 0xe8, 0xa6, 0x18, 0x4b, 0x63, 0x89, 0x30, 0x45, 0xd2, 0xe4, 0xc8, 0x89, 0x81, + 0x2e, 0xf2, 0x07, 0x8a, 0x06, 0xe8, 0x9f, 0x68, 0x37, 0x2d, 0x5a, 0x74, 0xd5, 0x55, 0xbb, 0x33, + 0x0a, 0x14, 0xc8, 0x32, 0x10, 0x50, 0xb5, 0x71, 0x36, 0x85, 0x57, 0x01, 0xfa, 0x07, 0x8a, 0x79, + 0x90, 0x1a, 0x8a, 0x72, 0x2d, 0xa5, 0xe8, 0xc2, 0x45, 0x37, 0xd2, 0x3c, 0xce, 0x77, 0x38, 0xfc, + 0xce, 0x77, 0x86, 0x67, 0x06, 0xfc, 0xcf, 0xdd, 0x6d, 0x56, 0xf6, 0x3a, 0xd8, 0x33, 0xb1, 0xc7, + 0xfe, 0x0f, 0x3c, 0x64, 0x37, 0xb1, 0xd4, 0x2c, 0xbb, 0x9e, 0x43, 0x1c, 0x08, 0xfa, 0x23, 0x8b, + 0xd5, 0xa6, 0x49, 0x5a, 0x9d, 0xed, 0x72, 0xdd, 0x69, 0x57, 0x9a, 0x4e, 0xd3, 0xa9, 0x34, 0x1d, + 0xa7, 0x69, 0x61, 0xe4, 0x9a, 0xbe, 0x68, 0x56, 0x3c, 0xb7, 0x5e, 0xf1, 0x09, 0x22, 0x1d, 0x9f, + 0xe3, 0x17, 0xe7, 0xa9, 0x21, 0x6b, 0x32, 0x88, 0x18, 0xd5, 0x84, 0x39, 0xeb, 0x6d, 0x77, 0x76, + 0x2a, 0xc4, 0x6c, 0x63, 0x9f, 0xa0, 0xb6, 0x1b, 0x18, 0xd0, 0xf5, 0x59, 0x4e, 0x93, 0x23, 0x4d, + 0xbb, 0x81, 0x1f, 0x35, 0x11, 0xc1, 0x0f, 0xd1, 0x81, 0x30, 0x38, 0x1f, 0x31, 0x08, 0x1a, 0x62, + 0xf2, 0x3f, 0x91, 0x49, 0x7f, 0x17, 0x93, 0x7a, 0x4b, 0x4c, 0x15, 0xc5, 0xd4, 0x9e, 0xd5, 0x76, + 0x1a, 0xd8, 0x62, 0x8b, 0xf5, 0xf9, 0xaf, 0xb0, 0x98, 0xa3, 0x16, 0x6e, 0xc7, 0x6f, 0xb1, 0x1f, + 0x31, 0xf8, 0xce, 0xa9, 0x7c, 0x6d, 0x23, 0x1f, 0x57, 0x1a, 0x78, 0xc7, 0xb4, 0x4d, 0x62, 0x3a, + 0xb6, 0x2f, 0xb7, 0x85, 0x93, 0x95, 0xd1, 0x9c, 0x0c, 0xc6, 0xa0, 0xf4, 0x55, 0x0a, 0x4c, 0xde, + 0x75, 0x76, 0x4d, 0x03, 0xef, 0x75, 0xb0, 0x4f, 0xe0, 0x3c, 0xc8, 0x30, 0x1b, 0x55, 0x29, 0x2a, + 0x4b, 0x79, 0x83, 0x77, 0xe8, 0xa8, 0x65, 0xb6, 0x4d, 0xa2, 0x26, 0x8b, 0xca, 0xd2, 0xb4, 0xc1, + 0x3b, 0x10, 0x82, 0xb4, 0x4f, 0xb0, 0xab, 0xa6, 0x8a, 0xca, 0x52, 0xca, 0x60, 0x6d, 0xb8, 0x08, + 0x26, 0x4c, 0x9b, 0x60, 0x6f, 0x1f, 0x59, 0x6a, 0x9e, 0x8d, 0x87, 0x7d, 0x78, 0x03, 0xe4, 0x7c, + 0x82, 0x3c, 0xb2, 0xe5, 0xab, 0xe9, 0xa2, 0xb2, 0x34, 0x59, 0x5d, 0x2c, 0xf3, 0x58, 0x95, 0x83, + 0x58, 0x95, 0xb7, 0x82, 0x58, 0xe9, 0x13, 0x87, 0x3d, 0x2d, 0xf1, 0xe4, 0x67, 0x4d, 0x31, 0x02, + 0x10, 0x5c, 0x05, 0x19, 0x6c, 0x37, 0xb6, 0x7c, 0x35, 0x33, 0x06, 0x9a, 0x43, 0xe0, 0x15, 0x90, + 0x6f, 0x98, 0x1e, 0xae, 0x53, 0xce, 0xd4, 0x6c, 0x51, 0x59, 0x9a, 0xa9, 0xce, 0x95, 0xc3, 0xd0, + 0xae, 0x07, 0x53, 0x46, 0xdf, 0x8a, 0xbe, 0x9e, 0x8b, 0x48, 0x4b, 0xcd, 0x31, 0x26, 0x58, 0x1b, + 0x96, 0x40, 0xd6, 0x6f, 0x21, 0xaf, 0xe1, 0xab, 0x13, 0xc5, 0xd4, 0x52, 0x5e, 0x07, 0xc7, 0x3d, + 0x4d, 0x8c, 0x18, 0xe2, 0x1f, 0x7e, 0x08, 0xd2, 0xae, 0x85, 0x6c, 0x15, 0xb0, 0x55, 0xce, 0x96, + 0x25, 0xce, 0xef, 0x5b, 0xc8, 0xd6, 0xdf, 0xea, 0xf6, 0xb4, 0x6b, 0xb2, 0xdc, 0x3d, 0xb4, 0x83, + 0x6c, 0x54, 0xb1, 0x9c, 0x5d, 0xb3, 0xb2, 0xbf, 0x5c, 0x91, 0x23, 0x49, 0x1d, 0x95, 0x1f, 0x50, + 0x07, 0x14, 0x6a, 0x30, 0xc7, 0xa5, 0x1f, 0x92, 0x00, 0xd2, 0x98, 0x6d, 0xd8, 0x3e, 0x41, 0x36, + 0x79, 0x95, 0xd0, 0xad, 0x81, 0x2c, 0x4d, 0x8b, 0x2d, 0x9f, 0x05, 0x6f, 0x54, 0x2e, 0x05, 0x26, + 0x4a, 0x66, 0x7a, 0x2c, 0x32, 0x33, 0x43, 0xc9, 0xcc, 0x9e, 0x4a, 0x66, 0xee, 0xaf, 0x22, 0x53, + 0x05, 0x69, 0xda, 0x83, 0xb3, 0x20, 0xe5, 0xa1, 0x87, 0x8c, 0xbb, 0x29, 0x83, 0x36, 0x4b, 0x5f, + 0xa4, 0xc1, 0x14, 0x4f, 0x0d, 0xdf, 0x75, 0x6c, 0x1f, 0xd3, 0xf5, 0x6e, 0xb2, 0xfd, 0x87, 0x33, + 0x2c, 0xd6, 0xcb, 0x46, 0x0c, 0x31, 0x03, 0x6f, 0x82, 0xf4, 0x3a, 0x22, 0x88, 0xb1, 0x3d, 0x59, + 0x9d, 0x97, 0xd7, 0x4b, 0x7d, 0xd1, 0x39, 0x7d, 0x81, 0x12, 0x7a, 0xdc, 0xd3, 0x66, 0x1a, 0x88, + 0xa0, 0xd7, 0x9c, 0xb6, 0x49, 0x70, 0xdb, 0x25, 0x07, 0x06, 0x43, 0xc2, 0x6b, 0x20, 0x7f, 0xdb, + 0xf3, 0x1c, 0x6f, 0xeb, 0xc0, 0xc5, 0x2c, 0x3a, 0x79, 0xfd, 0xdc, 0x71, 0x4f, 0x9b, 0xc3, 0xc1, + 0xa0, 0x84, 0xe8, 0x5b, 0xc2, 0xff, 0x83, 0x0c, 0xeb, 0xb0, 0x78, 0xe4, 0xf5, 0xb9, 0xe3, 0x9e, + 0xf6, 0x2f, 0x06, 0x91, 0xcc, 0xb9, 0x45, 0x34, 0x7c, 0x99, 0x91, 0xc2, 0x17, 0xaa, 0x28, 0x2b, + 0xab, 0x48, 0x05, 0xb9, 0x7d, 0xec, 0xf9, 0xd4, 0x4d, 0x8e, 0x8d, 0x07, 0x5d, 0x78, 0x0b, 0x00, + 0x4a, 0x8c, 0xe9, 0x13, 0xb3, 0x4e, 0x73, 0x85, 0x92, 0x31, 0x5d, 0xe6, 0x5b, 0xa1, 0x81, 0xfd, + 0x8e, 0x45, 0x74, 0x28, 0x58, 0x90, 0x0c, 0x0d, 0xa9, 0x0d, 0xbf, 0x54, 0x40, 0xae, 0x86, 0x51, + 0x03, 0x7b, 0xbe, 0x9a, 0x2f, 0xa6, 0x96, 0x26, 0xab, 0xff, 0x2d, 0xcb, 0xfb, 0xde, 0x7d, 0xcf, + 0x69, 0x63, 0xd2, 0xc2, 0x1d, 0x3f, 0x08, 0x10, 0xb7, 0xd6, 0xed, 0x6e, 0x4f, 0xc3, 0x23, 0x4a, + 0x62, 0xa4, 0xed, 0xf6, 0xc4, 0x47, 0x1d, 0xf7, 0x34, 0xe5, 0x75, 0x23, 0x58, 0x65, 0xe9, 0x27, + 0x05, 0xfc, 0x9b, 0x06, 0x79, 0x93, 0xfa, 0xf6, 0xa5, 0xb4, 0x6c, 0x23, 0x52, 0x6f, 0xa9, 0x0a, + 0x15, 0xb9, 0xc1, 0x3b, 0xf2, 0x5e, 0x98, 0xfc, 0x53, 0x7b, 0x61, 0x6a, 0xfc, 0xbd, 0x30, 0xc8, + 0xc5, 0xf4, 0xd0, 0x5c, 0xcc, 0x9c, 0x94, 0x8b, 0xa5, 0x4f, 0x52, 0x7c, 0xdf, 0x09, 0xde, 0x6f, + 0x8c, 0xb4, 0xb8, 0x13, 0xa6, 0x45, 0x8a, 0xad, 0x36, 0x54, 0x1b, 0xf7, 0xb5, 0xd1, 0xc0, 0x36, + 0x31, 0x77, 0x4c, 0xec, 0x9d, 0x92, 0x1c, 0x92, 0xe2, 0x52, 0x51, 0xc5, 0xc9, 0x72, 0x49, 0x9f, + 0x05, 0xb9, 0x0c, 0xe4, 0x48, 0xe6, 0x15, 0x72, 0xa4, 0xf4, 0x5b, 0x12, 0x2c, 0xd0, 0x88, 0xdc, + 0x45, 0xdb, 0xd8, 0x7a, 0x0f, 0xb5, 0xc7, 0x8c, 0xca, 0x25, 0x29, 0x2a, 0x79, 0x1d, 0xfe, 0xc3, + 0xfa, 0x68, 0xac, 0x7f, 0xae, 0x80, 0x89, 0x60, 0x33, 0x87, 0x65, 0x00, 0x38, 0x8c, 0xed, 0xd7, + 0x9c, 0xeb, 0x19, 0x0a, 0xf6, 0xc2, 0x51, 0x43, 0xb2, 0x80, 0x6d, 0x90, 0xe5, 0x3d, 0x91, 0x0b, + 0xe7, 0xa4, 0x5c, 0x20, 0x1e, 0x46, 0xed, 0x5b, 0x0d, 0xe4, 0x12, 0xec, 0xe9, 0x6b, 0x74, 0x15, + 0xdd, 0x9e, 0x76, 0xf9, 0x14, 0x96, 0x58, 0x25, 0xc9, 0xa1, 0x34, 0xc4, 0xfc, 0xb1, 0x86, 0x78, + 0x48, 0xe9, 0x63, 0x05, 0xcc, 0xd2, 0xb5, 0x52, 0x76, 0x42, 0x6d, 0xac, 0x83, 0x09, 0x4f, 0xb4, + 0xd9, 0x8a, 0x27, 0xab, 0xa5, 0x72, 0x94, 0xd9, 0x21, 0x6c, 0xea, 0xe9, 0xc3, 0x9e, 0xa6, 0x18, + 0x21, 0x12, 0x2e, 0x47, 0x98, 0x4c, 0x0e, 0x63, 0x92, 0x42, 0x12, 0x11, 0xee, 0xbe, 0x4b, 0x02, + 0xb8, 0x41, 0x4b, 0x6e, 0x2a, 0xc1, 0xbe, 0x5a, 0x1f, 0xc5, 0x56, 0x74, 0xa1, 0xcf, 0x4b, 0xdc, + 0x5e, 0xbf, 0xd1, 0xed, 0x69, 0xab, 0xa7, 0x10, 0xf3, 0x07, 0x78, 0xe9, 0x2d, 0x64, 0x05, 0x27, + 0xcf, 0xc4, 0x67, 0xe6, 0x9b, 0x24, 0x98, 0x79, 0xdf, 0xb1, 0x3a, 0x6d, 0x1c, 0xd2, 0xe7, 0xc6, + 0xe8, 0x53, 0xfb, 0xf4, 0x45, 0x6d, 0xf5, 0xd5, 0x6e, 0x4f, 0x5b, 0x19, 0x95, 0xba, 0x28, 0xf6, + 0x4c, 0xd3, 0xf6, 0x75, 0x12, 0xcc, 0x6f, 0x39, 0xee, 0xbb, 0x9b, 0xec, 0xc8, 0x26, 0xed, 0x94, + 0xad, 0x18, 0x79, 0xf3, 0x7d, 0xf2, 0x28, 0xe2, 0x1e, 0x22, 0x9e, 0xf9, 0x48, 0x5f, 0xe9, 0xf6, + 0xb4, 0xea, 0xa8, 0xc4, 0xf5, 0x71, 0x67, 0x9a, 0xb4, 0xc3, 0x24, 0x58, 0x78, 0xd0, 0x41, 0x36, + 0x31, 0x2d, 0xcc, 0x89, 0x0b, 0x69, 0xfb, 0x28, 0x46, 0x5b, 0xa1, 0x4f, 0x5b, 0x14, 0x23, 0x08, + 0xbc, 0xd9, 0xed, 0x69, 0x6b, 0xa3, 0x12, 0x38, 0xcc, 0xc3, 0x99, 0xa6, 0xf2, 0xdb, 0x24, 0x98, + 0xd9, 0xe4, 0x05, 0x55, 0xf0, 0x12, 0xfb, 0x43, 0x28, 0x94, 0xef, 0x23, 0xdc, 0xed, 0x72, 0x14, + 0x31, 0x5e, 0xf2, 0x46, 0xb1, 0x67, 0x9a, 0xbc, 0x1f, 0x93, 0x60, 0x61, 0x1d, 0x13, 0x5c, 0x27, + 0xb8, 0x71, 0xc7, 0xc4, 0x96, 0x44, 0xe2, 0x63, 0x25, 0xc6, 0x62, 0x51, 0x3a, 0xcd, 0x0c, 0x05, + 0xe9, 0x7a, 0xb7, 0xa7, 0xdd, 0x18, 0x95, 0xc7, 0xe1, 0x3e, 0xfe, 0x36, 0x7c, 0xb2, 0xe2, 0x71, + 0x5c, 0x3e, 0xa3, 0xa0, 0x57, 0xe3, 0x33, 0xea, 0xe3, 0x4c, 0xf3, 0xf9, 0x69, 0x16, 0x4c, 0xb3, + 0x9b, 0x85, 0x90, 0xc6, 0xcb, 0x40, 0x54, 0xdb, 0x82, 0x43, 0x18, 0x9c, 0xd0, 0x3c, 0xb7, 0x5e, + 0xde, 0x14, 0x75, 0x38, 0xb7, 0x80, 0xd7, 0x41, 0xd6, 0x67, 0xe7, 0x20, 0x51, 0x45, 0x15, 0x06, + 0xaf, 0x0d, 0xa2, 0x27, 0xae, 0x5a, 0xc2, 0x10, 0xf6, 0x70, 0x0d, 0x64, 0x2d, 0xc6, 0xa2, 0x38, + 0x07, 0x96, 0x06, 0x91, 0xf1, 0x93, 0x01, 0x45, 0x73, 0x0c, 0x5c, 0x01, 0x19, 0x56, 0xae, 0x89, + 0xeb, 0xb8, 0xc8, 0x63, 0xe3, 0x45, 0x53, 0x2d, 0x61, 0x70, 0x73, 0x58, 0x05, 0x69, 0xd7, 0x73, + 0xda, 0xa2, 0x7a, 0xbe, 0x30, 0xf8, 0x4c, 0xb9, 0xd6, 0xac, 0x25, 0x0c, 0x66, 0x0b, 0xaf, 0xd2, + 0x03, 0x2f, 0x2d, 0x52, 0x7d, 0x76, 0x87, 0x40, 0x2b, 0x94, 0x01, 0x98, 0x04, 0x09, 0x4c, 0xe1, + 0x55, 0x90, 0xdd, 0x67, 0x25, 0x88, 0xb8, 0x00, 0x5a, 0x94, 0x41, 0xd1, 0xe2, 0x84, 0xbe, 0x17, + 0xb7, 0x85, 0x77, 0xc0, 0x14, 0x71, 0xdc, 0xdd, 0xe0, 0x4b, 0x2f, 0xee, 0x1f, 0x8a, 0x32, 0x76, + 0x58, 0x25, 0x50, 0x4b, 0x18, 0x11, 0x1c, 0xbc, 0x0f, 0x66, 0xf7, 0x22, 0x9f, 0x21, 0xec, 0xb3, + 0x4b, 0xcd, 0x01, 0x9e, 0x87, 0x7f, 0x20, 0x6b, 0x09, 0x23, 0x86, 0x86, 0xeb, 0x60, 0xc6, 0x8f, + 0xec, 0xca, 0xe2, 0x96, 0x30, 0xf2, 0x5e, 0xd1, 0x7d, 0xbb, 0x96, 0x30, 0x06, 0x30, 0xf0, 0x2e, + 0x98, 0x69, 0x44, 0xf6, 0x24, 0x75, 0x32, 0xbe, 0xaa, 0xe1, 0xbb, 0x16, 0xf5, 0x16, 0xc5, 0xca, + 0xde, 0x78, 0x46, 0xaa, 0x53, 0x27, 0x7b, 0x8b, 0xe6, 0xac, 0xec, 0x8d, 0xcf, 0xe8, 0xa0, 0xbf, + 0x7b, 0x94, 0xbe, 0xcf, 0x80, 0x29, 0x91, 0x15, 0xfc, 0x2e, 0xe4, 0xcd, 0x50, 0xe8, 0x3c, 0x29, + 0x2e, 0x9e, 0x24, 0x74, 0x66, 0x2e, 0xe9, 0xfc, 0x8d, 0x50, 0xe7, 0x3c, 0x43, 0x16, 0xfa, 0x3b, + 0x12, 0x7b, 0xae, 0x84, 0x10, 0xda, 0x5e, 0x0e, 0xb4, 0xcd, 0x13, 0xe3, 0xfc, 0xf0, 0xe3, 0x44, + 0x80, 0x12, 0xc2, 0x5e, 0x05, 0x39, 0x93, 0x5f, 0xaa, 0x0e, 0x4b, 0x89, 0xf8, 0x9d, 0x2b, 0x95, + 0xaa, 0x00, 0xc0, 0xe5, 0xbe, 0xc0, 0x79, 0x5e, 0x9c, 0x8b, 0x0b, 0x3c, 0x04, 0x05, 0xfa, 0xbe, + 0x12, 0xea, 0x3b, 0x2b, 0x30, 0xb1, 0xd2, 0x3b, 0x7c, 0x31, 0x21, 0xee, 0xdb, 0x60, 0x3a, 0x90, + 0x03, 0x9b, 0x12, 0xea, 0xbe, 0x78, 0x52, 0xe5, 0x10, 0xe0, 0xa3, 0x28, 0xb8, 0x11, 0xd3, 0x10, + 0x57, 0xb6, 0x76, 0xf2, 0xb7, 0x33, 0xf0, 0x34, 0x28, 0xa0, 0x8d, 0x98, 0x80, 0xc0, 0x49, 0xae, + 0x02, 0xf9, 0xc4, 0x5c, 0xf1, 0x09, 0x58, 0x03, 0x13, 0x6d, 0x4c, 0x50, 0x03, 0x11, 0xa4, 0xe6, + 0xd8, 0xce, 0x7f, 0x29, 0x9a, 0x69, 0x7d, 0x31, 0x95, 0xef, 0x09, 0xc3, 0xdb, 0x36, 0xf1, 0x0e, + 0xc4, 0x51, 0x33, 0x44, 0x2f, 0xbe, 0x0d, 0xa6, 0x23, 0x06, 0x70, 0x16, 0xa4, 0x76, 0x71, 0x70, + 0x39, 0x4e, 0x9b, 0x70, 0x1e, 0x64, 0xf6, 0x91, 0xd5, 0xc1, 0x4c, 0x53, 0x79, 0x83, 0x77, 0x56, + 0x93, 0xd7, 0x15, 0x3d, 0x0f, 0x72, 0x1e, 0x7f, 0x8a, 0xde, 0x7c, 0xfa, 0xbc, 0x90, 0x78, 0xf6, + 0xbc, 0x90, 0x78, 0xf9, 0xbc, 0xa0, 0x3c, 0x3e, 0x2a, 0x28, 0x9f, 0x1d, 0x15, 0x94, 0xc3, 0xa3, + 0x82, 0xf2, 0xf4, 0xa8, 0xa0, 0xfc, 0x72, 0x54, 0x50, 0x7e, 0x3d, 0x2a, 0x24, 0x5e, 0x1e, 0x15, + 0x94, 0x27, 0x2f, 0x0a, 0x89, 0xa7, 0x2f, 0x0a, 0x89, 0x67, 0x2f, 0x0a, 0x89, 0x0f, 0xae, 0x8c, + 0xfd, 0x11, 0xda, 0xce, 0x32, 0xa6, 0x96, 0x7f, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x5a, 0x6d, + 0x97, 0x3f, 0x1b, 0x00, 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -5933,7 +5934,7 @@ func (m *LokiRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6189,7 +6190,7 @@ func (m *LokiInstantRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Plan == nil { - m.Plan = &github_com_grafana_loki_pkg_querier_plan.QueryPlan{} + m.Plan = &github_com_grafana_loki_v3_pkg_querier_plan.QueryPlan{} } if err := m.Plan.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -6583,7 +6584,7 @@ func (m *LokiResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -6970,7 +6971,7 @@ func (m *LokiSeriesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7173,7 +7174,7 @@ func (m *LokiLabelNamesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7325,7 +7326,7 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Result = append(m.Result, github_com_grafana_loki_pkg_push.Stream{}) + m.Result = append(m.Result, github_com_grafana_loki_v3_pkg_push.Stream{}) if err := m.Result[len(m.Result)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7535,7 +7536,7 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.IndexStatsResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.IndexStatsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -7570,7 +7571,7 @@ func (m *IndexStatsResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7658,7 +7659,7 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.VolumeResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.VolumeResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -7693,7 +7694,7 @@ func (m *VolumeResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7781,7 +7782,7 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.TopKMatrix{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.TopKMatrix{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -7816,7 +7817,7 @@ func (m *TopKSketchesResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -7904,7 +7905,7 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.QuantileSketchMatrix{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.QuantileSketchMatrix{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -7939,7 +7940,7 @@ func (m *QuantileSketchResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -8027,7 +8028,7 @@ func (m *ShardsResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.ShardsResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.ShardsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -8062,7 +8063,7 @@ func (m *ShardsResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -8150,7 +8151,7 @@ func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.DetectedFieldsResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.DetectedFieldsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -8185,7 +8186,7 @@ func (m *DetectedFieldsResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -8273,7 +8274,7 @@ func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Response == nil { - m.Response = &github_com_grafana_loki_pkg_logproto.DetectedLabelsResponse{} + m.Response = &github_com_grafana_loki_v3_pkg_logproto.DetectedLabelsResponse{} } if err := m.Response.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -8308,7 +8309,7 @@ func (m *DetectedLabelsResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Headers = append(m.Headers, github_com_grafana_loki_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) + m.Headers = append(m.Headers, github_com_grafana_loki_v3_pkg_querier_queryrange_queryrangebase_definitions.PrometheusResponseHeader{}) if err := m.Headers[len(m.Headers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index 2513d1debb5e8..c0c746a3fc945 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -13,7 +13,7 @@ import "pkg/push/push.proto"; import "pkg/querier/queryrange/queryrangebase/definitions/definitions.proto"; import "pkg/querier/queryrange/queryrangebase/queryrange.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange"; option (gogoproto.marshaler_all) = true; option (gogoproto.sizer_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -34,7 +34,7 @@ message LokiRequest { logproto.Direction direction = 6; string path = 7; repeated string shards = 8 [(gogoproto.jsontag) = "shards"]; - Plan plan = 10 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 10 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message LokiInstantRequest { @@ -47,7 +47,7 @@ message LokiInstantRequest { logproto.Direction direction = 4; string path = 5; repeated string shards = 6 [(gogoproto.jsontag) = "shards"]; - Plan plan = 7 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/plan.QueryPlan"]; + Plan plan = 7 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/plan.QueryPlan"]; } message Plan { @@ -71,7 +71,7 @@ message LokiResponse { ]; repeated definitions.PrometheusResponseHeader Headers = 9 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } @@ -98,7 +98,7 @@ message LokiSeriesResponse { uint32 version = 3; repeated definitions.PrometheusResponseHeader Headers = 4 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; stats.Result statistics = 5 [ (gogoproto.nullable) = false, @@ -112,7 +112,7 @@ message LokiLabelNamesResponse { uint32 version = 3; repeated definitions.PrometheusResponseHeader Headers = 4 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; stats.Result statistics = 5 [ (gogoproto.nullable) = false, @@ -125,7 +125,7 @@ message LokiData { repeated logproto.StreamAdapter Result = 2 [ (gogoproto.nullable) = false, (gogoproto.jsontag) = "result", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream" ]; } @@ -136,58 +136,58 @@ message LokiPromResponse { } message IndexStatsResponse { - logproto.IndexStatsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.IndexStatsResponse"]; + logproto.IndexStatsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.IndexStatsResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message VolumeResponse { - logproto.VolumeResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.VolumeResponse"]; + logproto.VolumeResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.VolumeResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message TopKSketchesResponse { - logproto.TopKMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.TopKMatrix"]; + logproto.TopKMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.TopKMatrix"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message QuantileSketchResponse { - logproto.QuantileSketchMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.QuantileSketchMatrix"]; + logproto.QuantileSketchMatrix response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.QuantileSketchMatrix"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message ShardsResponse { - indexgatewaypb.ShardsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.ShardsResponse"]; + indexgatewaypb.ShardsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.ShardsResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message DetectedFieldsResponse { - logproto.DetectedFieldsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.DetectedFieldsResponse"]; + logproto.DetectedFieldsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.DetectedFieldsResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } message DetectedLabelsResponse { - logproto.DetectedLabelsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.DetectedLabelsResponse"]; + logproto.DetectedLabelsResponse response = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.DetectedLabelsResponse"]; repeated definitions.PrometheusResponseHeader Headers = 2 [ (gogoproto.jsontag) = "-", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions.PrometheusResponseHeader" ]; } diff --git a/pkg/querier/queryrange/queryrangebase/alias.go b/pkg/querier/queryrange/queryrangebase/alias.go index 4b4e219202f0b..7aab7a200eb05 100644 --- a/pkg/querier/queryrange/queryrangebase/alias.go +++ b/pkg/querier/queryrange/queryrangebase/alias.go @@ -1,8 +1,8 @@ package queryrangebase import ( - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // Helpful aliases for refactoring circular imports diff --git a/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go b/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go index d4eb4fb83b25f..1f2b484f640a4 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go +++ b/pkg/querier/queryrange/queryrangebase/definitions/definitions.pb.go @@ -137,7 +137,7 @@ func init() { } var fileDescriptor_d1a37772b6ae2c5c = []byte{ - // 262 bytes of a gzipped FileDescriptorProto + // 266 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0x2e, 0xc8, 0x4e, 0xd7, 0x2f, 0x2c, 0x4d, 0x2d, 0xca, 0x4c, 0x2d, 0x02, 0xd3, 0x95, 0x45, 0x89, 0x79, 0xe9, 0xa9, 0x48, 0xcc, 0xa4, 0xc4, 0xe2, 0x54, 0xfd, 0x94, 0xd4, 0xb4, 0xcc, 0xbc, 0xcc, 0x92, 0xcc, 0xfc, 0xbc, @@ -147,14 +147,14 @@ var fileDescriptor_d1a37772b6ae2c5c = []byte{ 0x4d, 0x4c, 0x49, 0x2d, 0x12, 0x92, 0xe4, 0x62, 0xf1, 0x4b, 0xcc, 0x4d, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x74, 0x62, 0x7d, 0x75, 0x4f, 0x9e, 0x51, 0x37, 0x08, 0x2c, 0x24, 0x24, 0xcb, 0xc5, 0x16, 0x96, 0x98, 0x53, 0x9a, 0x5a, 0x2c, 0xc1, 0xa4, 0xc0, 0x8c, 0x90, 0x84, 0x0a, 0x2a, 0x85, - 0x70, 0x49, 0x20, 0x1b, 0x5a, 0x5c, 0x90, 0x9f, 0x57, 0x9c, 0x4a, 0xa9, 0xa9, 0x4e, 0xf5, 0x17, - 0x1e, 0xca, 0x31, 0xdc, 0x78, 0x28, 0xc7, 0xf0, 0xe1, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39, 0xc6, - 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, - 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, - 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x3c, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, - 0xf3, 0x73, 0xf5, 0xd3, 0x8b, 0x12, 0xd3, 0x12, 0xf3, 0x12, 0xf5, 0x73, 0xf2, 0xb3, 0x33, 0xf5, - 0x49, 0x0e, 0xe0, 0x24, 0x36, 0x70, 0x90, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x51, 0x1b, - 0x61, 0xc9, 0x9c, 0x01, 0x00, 0x00, + 0x70, 0x49, 0x20, 0x1b, 0x5a, 0x5c, 0x90, 0x9f, 0x57, 0x9c, 0x4a, 0xa9, 0xa9, 0x4e, 0x4d, 0x8c, + 0x17, 0x1e, 0xca, 0x31, 0xdc, 0x78, 0x28, 0xc7, 0xf0, 0xe1, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39, + 0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, + 0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, + 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x7c, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, + 0x92, 0xf3, 0x73, 0xf5, 0xd3, 0x8b, 0x12, 0xd3, 0x12, 0xf3, 0x12, 0xf5, 0x73, 0xf2, 0xb3, 0x33, + 0xf5, 0xcb, 0x8c, 0xf5, 0x49, 0x0e, 0xe4, 0x24, 0x36, 0x70, 0xb0, 0x19, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xf5, 0x9e, 0x05, 0x86, 0xa0, 0x01, 0x00, 0x00, } func (this *PrometheusRequestHeader) Equal(that interface{}) bool { diff --git a/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto b/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto index 9f5e7967c8524..d5064e3aaad32 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto +++ b/pkg/querier/queryrange/queryrangebase/definitions/definitions.proto @@ -4,7 +4,7 @@ package definitions; import "gogoproto/gogo.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/querier/queryrange/queryrangebase/definitions/interface.go b/pkg/querier/queryrange/queryrangebase/definitions/interface.go index f8c9a0f5531fb..6f303f387dfa6 100644 --- a/pkg/querier/queryrange/queryrangebase/definitions/interface.go +++ b/pkg/querier/queryrange/queryrangebase/definitions/interface.go @@ -8,7 +8,7 @@ import ( "github.com/gogo/protobuf/proto" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // Codec is used to encode/decode query range requests and responses so they can be passed down to middlewares. diff --git a/pkg/querier/queryrange/queryrangebase/marshaling_test.go b/pkg/querier/queryrange/queryrangebase/marshaling_test.go index 4fcba1804c3a6..c2d9f432a4510 100644 --- a/pkg/querier/queryrange/queryrangebase/marshaling_test.go +++ b/pkg/querier/queryrange/queryrangebase/marshaling_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func BenchmarkPrometheusCodec_DecodeResponse(b *testing.B) { diff --git a/pkg/querier/queryrange/queryrangebase/middleware.go b/pkg/querier/queryrange/queryrangebase/middleware.go index 10e80ddf8a2ec..44d0ad65582e9 100644 --- a/pkg/querier/queryrange/queryrangebase/middleware.go +++ b/pkg/querier/queryrange/queryrangebase/middleware.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/middleware" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) const ( diff --git a/pkg/querier/queryrange/queryrangebase/promql_test.go b/pkg/querier/queryrange/queryrangebase/promql_test.go index 8e84304e64117..6ab7f460a99cb 100644 --- a/pkg/querier/queryrange/queryrangebase/promql_test.go +++ b/pkg/querier/queryrange/queryrangebase/promql_test.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/prometheus/util/annotations" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/astmapper" ) var ( diff --git a/pkg/querier/queryrange/queryrangebase/query_range.go b/pkg/querier/queryrange/queryrangebase/query_range.go index 5e8f0dd855edb..6b33097c0033b 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range.go +++ b/pkg/querier/queryrange/queryrangebase/query_range.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/timestamp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) // StatusSuccess Prometheus success result. diff --git a/pkg/querier/queryrange/queryrangebase/query_range_test.go b/pkg/querier/queryrange/queryrangebase/query_range_test.go index ada34f5aba895..4a723403fe1c4 100644 --- a/pkg/querier/queryrange/queryrangebase/query_range_test.go +++ b/pkg/querier/queryrange/queryrangebase/query_range_test.go @@ -15,7 +15,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestResponse(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/queryrange.pb.go b/pkg/querier/queryrange/queryrangebase/queryrange.pb.go index f908b3621dcf6..caf71d1f83206 100644 --- a/pkg/querier/queryrange/queryrangebase/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrangebase/queryrange.pb.go @@ -10,10 +10,10 @@ import ( _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - logproto "github.com/grafana/loki/pkg/logproto" - definitions "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - resultscache "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + logproto "github.com/grafana/loki/v3/pkg/logproto" + definitions "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + resultscache "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" io "io" math "math" math_bits "math/bits" @@ -260,8 +260,8 @@ func (m *PrometheusData) GetResult() []SampleStream { } type SampleStream struct { - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"metric"` - Samples []logproto.LegacySample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,1,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"metric"` + Samples []logproto.LegacySample `protobuf:"bytes,2,rep,name=samples,proto3" json:"values"` } func (m *SampleStream) Reset() { *m = SampleStream{} } @@ -315,54 +315,54 @@ func init() { } var fileDescriptor_4cc6a0c1d6b614c4 = []byte{ - // 739 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcf, 0x4f, 0xdb, 0x48, - 0x18, 0x8d, 0xc9, 0x2f, 0x32, 0xac, 0xb2, 0xda, 0x01, 0xb1, 0x5e, 0x16, 0x8d, 0xa3, 0x68, 0x57, - 0xca, 0x4a, 0x5b, 0xbb, 0xa5, 0x2a, 0x87, 0x4a, 0x48, 0xad, 0x81, 0xaa, 0x42, 0x48, 0x45, 0x86, - 0x53, 0x6f, 0x93, 0x64, 0x70, 0x2c, 0x6c, 0x8f, 0x99, 0x19, 0x23, 0xe5, 0xd6, 0x53, 0xcf, 0xdc, - 0xda, 0x3f, 0xa1, 0xa7, 0xfe, 0x1d, 0x1c, 0x39, 0xa2, 0x1e, 0xdc, 0x12, 0x2e, 0x55, 0x4e, 0xfc, - 0x09, 0xd5, 0xcc, 0x38, 0xe0, 0x04, 0xd1, 0x1f, 0xa7, 0xcc, 0xe7, 0xef, 0xbd, 0x37, 0xef, 0x7b, - 0x9f, 0x63, 0xb0, 0x9e, 0x1c, 0xf9, 0xce, 0x71, 0x4a, 0x58, 0x40, 0x98, 0xfa, 0x1d, 0x32, 0x1c, - 0xfb, 0xa4, 0x70, 0xec, 0x62, 0x5e, 0x2c, 0xed, 0x84, 0x51, 0x41, 0x61, 0x73, 0x1a, 0xb0, 0xb2, - 0xe4, 0x53, 0x9f, 0xaa, 0x96, 0x23, 0x4f, 0x1a, 0xb5, 0x82, 0x7c, 0x4a, 0xfd, 0x90, 0x38, 0xaa, - 0xea, 0xa6, 0x87, 0x4e, 0x3f, 0x65, 0x58, 0x04, 0x34, 0xce, 0xfb, 0xd6, 0x6c, 0x5f, 0x04, 0x11, - 0xe1, 0x02, 0x47, 0x49, 0x0e, 0xf8, 0x5b, 0xda, 0x0b, 0xa9, 0xaf, 0x95, 0x27, 0x87, 0xbc, 0xb9, - 0xf9, 0x73, 0xde, 0xfb, 0xe4, 0x30, 0x88, 0x03, 0x79, 0x2b, 0x2f, 0x9e, 0x73, 0x91, 0x87, 0x52, - 0x84, 0x0b, 0xca, 0xb0, 0x4f, 0x9c, 0xde, 0x20, 0x8d, 0x8f, 0x9c, 0x1e, 0xee, 0x0d, 0x88, 0xc3, - 0x08, 0x4f, 0x43, 0xc1, 0x75, 0x21, 0x86, 0x09, 0xc9, 0x19, 0xed, 0x77, 0x65, 0xf0, 0xc7, 0x1e, - 0xa3, 0x11, 0x11, 0x03, 0x92, 0x72, 0x8f, 0x1c, 0xa7, 0x84, 0x0b, 0x08, 0x41, 0x25, 0xc1, 0x62, - 0x60, 0x1a, 0x2d, 0xa3, 0xd3, 0xf0, 0xd4, 0x19, 0x3e, 0x05, 0x55, 0x2e, 0x30, 0x13, 0xe6, 0x5c, - 0xcb, 0xe8, 0x2c, 0xac, 0xad, 0xd8, 0x7a, 0x5c, 0x7b, 0x32, 0xae, 0x7d, 0x30, 0x19, 0xd7, 0x9d, - 0x3f, 0xcb, 0xac, 0xd2, 0xe9, 0x67, 0xcb, 0xf0, 0x34, 0x05, 0xae, 0x83, 0x32, 0x89, 0xfb, 0x66, - 0xf9, 0x17, 0x98, 0x92, 0x20, 0x7d, 0x70, 0x41, 0x12, 0xb3, 0xd2, 0x32, 0x3a, 0x65, 0x4f, 0x9d, - 0xe1, 0x06, 0xa8, 0xcb, 0x60, 0x69, 0x2a, 0xcc, 0xaa, 0xd2, 0xfb, 0xeb, 0x8e, 0xde, 0x56, 0xbe, - 0x18, 0x2d, 0xf7, 0x5e, 0xca, 0x4d, 0x38, 0x70, 0x09, 0x54, 0x55, 0xa4, 0x66, 0x4d, 0xcd, 0xa6, - 0x0b, 0xb8, 0x03, 0x9a, 0x32, 0x9b, 0x20, 0xf6, 0x5f, 0x25, 0x2a, 0x50, 0xb3, 0xae, 0xb4, 0x57, - 0xed, 0x62, 0x72, 0xf6, 0xe6, 0x14, 0xc6, 0xad, 0x48, 0x79, 0x6f, 0x86, 0x09, 0xb7, 0x41, 0xfd, - 0x25, 0xc1, 0x7d, 0xc2, 0xb8, 0x39, 0xdf, 0x2a, 0x77, 0x16, 0xd6, 0xfe, 0xb1, 0x8b, 0x9b, 0xba, - 0x93, 0xb6, 0x06, 0xbb, 0xd5, 0x71, 0x66, 0x19, 0x0f, 0xbc, 0x09, 0xb7, 0xfd, 0x71, 0x0e, 0xc0, - 0x22, 0x96, 0x27, 0x34, 0xe6, 0x04, 0xb6, 0x41, 0x6d, 0x5f, 0x60, 0x91, 0x72, 0xbd, 0x1c, 0x17, - 0x8c, 0x33, 0xab, 0xc6, 0xd5, 0x13, 0x2f, 0xef, 0xc0, 0x1d, 0x50, 0xd9, 0xc2, 0x02, 0xe7, 0x9b, - 0x42, 0xf6, 0xf4, 0x3b, 0x54, 0x70, 0x20, 0x51, 0xee, 0xb2, 0x9c, 0x62, 0x9c, 0x59, 0xcd, 0x3e, - 0x16, 0xf8, 0x7f, 0x1a, 0x05, 0x82, 0x44, 0x89, 0x18, 0x7a, 0x4a, 0x03, 0x3e, 0x01, 0x8d, 0x6d, - 0xc6, 0x28, 0x3b, 0x18, 0x26, 0x44, 0x2d, 0xb0, 0xe1, 0xfe, 0x39, 0xce, 0xac, 0x45, 0x32, 0x79, - 0x58, 0x60, 0xdc, 0x22, 0xe1, 0x7f, 0xa0, 0xaa, 0x0a, 0xb5, 0xba, 0x86, 0xbb, 0x38, 0xce, 0xac, - 0xdf, 0x15, 0xa5, 0x00, 0xd7, 0x08, 0xf8, 0xe2, 0x36, 0xaf, 0xaa, 0xca, 0xeb, 0xdf, 0x7b, 0xf3, - 0xd2, 0x19, 0xdc, 0x13, 0xd8, 0x5b, 0x03, 0x34, 0xa7, 0x47, 0x83, 0x36, 0x00, 0x9e, 0xda, 0x9f, - 0x72, 0xaf, 0x03, 0x6b, 0x8e, 0x33, 0x0b, 0xb0, 0x9b, 0xa7, 0x5e, 0x01, 0x01, 0xb7, 0x40, 0x4d, - 0x57, 0xe6, 0x9c, 0x72, 0xb2, 0x3a, 0x1b, 0xdd, 0x3e, 0x8e, 0x92, 0x90, 0xec, 0x0b, 0x46, 0x70, - 0xe4, 0x36, 0xf3, 0xe0, 0x6a, 0x5a, 0xcd, 0xcb, 0xb9, 0xed, 0x33, 0x03, 0xfc, 0x56, 0x04, 0xc2, - 0x13, 0x50, 0x0b, 0x71, 0x97, 0x84, 0x72, 0x67, 0x65, 0xf5, 0xc6, 0xde, 0xfc, 0xf9, 0x77, 0x89, - 0x8f, 0x7b, 0xc3, 0x5d, 0xd9, 0xdd, 0xc3, 0x01, 0x73, 0x37, 0xa5, 0xe6, 0xa7, 0xcc, 0x7a, 0xe4, - 0x07, 0x62, 0x90, 0x76, 0xed, 0x1e, 0x8d, 0x1c, 0x9f, 0xe1, 0x43, 0x1c, 0x63, 0x27, 0xa4, 0x47, - 0x81, 0x53, 0xfc, 0x86, 0xd8, 0x8a, 0xf7, 0xbc, 0x8f, 0x13, 0x41, 0x98, 0x34, 0x12, 0x11, 0xc1, - 0x82, 0x9e, 0x97, 0xdf, 0x06, 0x9f, 0x81, 0x3a, 0x57, 0x3e, 0x78, 0x3e, 0xcf, 0xf2, 0xec, 0xc5, - 0xda, 0xe6, 0xed, 0x24, 0x27, 0x38, 0x4c, 0x09, 0xf7, 0x26, 0x34, 0x97, 0x9f, 0x5f, 0xa2, 0xd2, - 0xc5, 0x25, 0x2a, 0x5d, 0x5f, 0x22, 0xe3, 0xcd, 0x08, 0x19, 0x1f, 0x46, 0xc8, 0x38, 0x1b, 0x21, - 0xe3, 0x7c, 0x84, 0x8c, 0x2f, 0x23, 0x64, 0x7c, 0x1d, 0xa1, 0xd2, 0xf5, 0x08, 0x19, 0xa7, 0x57, - 0xa8, 0x74, 0x7e, 0x85, 0x4a, 0x17, 0x57, 0xa8, 0xf4, 0x7a, 0xe3, 0x7b, 0xe6, 0x7f, 0xf8, 0x8d, - 0xeb, 0xd6, 0x94, 0xc3, 0xc7, 0xdf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfe, 0xcd, 0xe4, 0x4f, 0xcf, - 0x05, 0x00, 0x00, + // 740 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcf, 0x4f, 0xd4, 0x40, + 0x18, 0xdd, 0xb2, 0xbf, 0xd8, 0xc1, 0xac, 0x71, 0x20, 0x58, 0x91, 0xb4, 0x9b, 0x8d, 0x26, 0x6b, + 0xa2, 0xad, 0x01, 0xe5, 0x60, 0x62, 0x82, 0xe5, 0x47, 0x0c, 0x21, 0x91, 0x14, 0x4e, 0xde, 0x66, + 0x77, 0x87, 0x6e, 0x43, 0xdb, 0x29, 0x33, 0x53, 0x92, 0xbd, 0x79, 0xf2, 0xcc, 0x4d, 0xff, 0x04, + 0x4f, 0xfe, 0x1d, 0x24, 0x5e, 0x38, 0x12, 0x0f, 0x55, 0x96, 0x8b, 0xd9, 0x13, 0x7f, 0x82, 0x99, + 0x99, 0x2e, 0x74, 0x97, 0x10, 0xf4, 0xb4, 0xf3, 0xf5, 0x7b, 0xef, 0xcd, 0xfb, 0xde, 0xd7, 0x2d, + 0x58, 0x89, 0x0f, 0x3c, 0xfb, 0x30, 0xc1, 0xd4, 0xc7, 0x54, 0xfe, 0xf6, 0x29, 0x8a, 0x3c, 0x9c, + 0x3b, 0xb6, 0x11, 0xcb, 0x97, 0x56, 0x4c, 0x09, 0x27, 0xb0, 0x3e, 0x0e, 0x58, 0x98, 0xf3, 0x88, + 0x47, 0x64, 0xcb, 0x16, 0x27, 0x85, 0x5a, 0x30, 0x3c, 0x42, 0xbc, 0x00, 0xdb, 0xb2, 0x6a, 0x27, + 0xfb, 0x76, 0x37, 0xa1, 0x88, 0xfb, 0x24, 0xca, 0xfa, 0xe6, 0x64, 0x9f, 0xfb, 0x21, 0x66, 0x1c, + 0x85, 0x71, 0x06, 0x78, 0x2c, 0xec, 0x05, 0xc4, 0x53, 0xca, 0xa3, 0x43, 0xd6, 0x5c, 0xfb, 0x37, + 0xef, 0x5d, 0xbc, 0xef, 0x47, 0xbe, 0xb8, 0x95, 0xe5, 0xcf, 0x99, 0xc8, 0x4b, 0x21, 0xc2, 0x38, + 0xa1, 0xc8, 0xc3, 0x76, 0xa7, 0x97, 0x44, 0x07, 0x76, 0x07, 0x75, 0x7a, 0xd8, 0xa6, 0x98, 0x25, + 0x01, 0x67, 0xaa, 0xe0, 0xfd, 0x18, 0x67, 0x8c, 0xe6, 0x97, 0x22, 0x78, 0xb0, 0x43, 0x49, 0x88, + 0x79, 0x0f, 0x27, 0xcc, 0xc5, 0x87, 0x09, 0x66, 0x1c, 0x42, 0x50, 0x8a, 0x11, 0xef, 0xe9, 0x5a, + 0x43, 0x6b, 0xd5, 0x5c, 0x79, 0x86, 0x6f, 0x40, 0x99, 0x71, 0x44, 0xb9, 0x3e, 0xd5, 0xd0, 0x5a, + 0x33, 0x4b, 0x0b, 0x96, 0x1a, 0xd7, 0x1a, 0x8d, 0x6b, 0xed, 0x8d, 0xc6, 0x75, 0xa6, 0x4f, 0x52, + 0xb3, 0x70, 0xfc, 0xcb, 0xd4, 0x5c, 0x45, 0x81, 0x2b, 0xa0, 0x88, 0xa3, 0xae, 0x5e, 0xfc, 0x0f, + 0xa6, 0x20, 0x08, 0x1f, 0x8c, 0xe3, 0x58, 0x2f, 0x35, 0xb4, 0x56, 0xd1, 0x95, 0x67, 0xf8, 0x16, + 0x54, 0x45, 0xb0, 0x24, 0xe1, 0x7a, 0x59, 0xea, 0x3d, 0xba, 0xa1, 0xb7, 0x9e, 0x2d, 0x46, 0xc9, + 0x7d, 0x15, 0x72, 0x23, 0x0e, 0x9c, 0x03, 0x65, 0x19, 0xa9, 0x5e, 0x91, 0xb3, 0xa9, 0x02, 0x6e, + 0x81, 0xba, 0xc8, 0xc6, 0x8f, 0xbc, 0x0f, 0xb1, 0x0c, 0x54, 0xaf, 0x4a, 0xed, 0x45, 0x2b, 0x9f, + 0x9c, 0xb5, 0x36, 0x86, 0x71, 0x4a, 0x42, 0xde, 0x9d, 0x60, 0xc2, 0x0d, 0x50, 0x7d, 0x8f, 0x51, + 0x17, 0x53, 0xa6, 0x4f, 0x37, 0x8a, 0xad, 0x99, 0xa5, 0x27, 0x56, 0x7e, 0x53, 0x37, 0xd2, 0x56, + 0x60, 0xa7, 0x3c, 0x4c, 0x4d, 0xed, 0x85, 0x3b, 0xe2, 0x36, 0xbf, 0x4f, 0x01, 0x98, 0xc7, 0xb2, + 0x98, 0x44, 0x0c, 0xc3, 0x26, 0xa8, 0xec, 0x72, 0xc4, 0x13, 0xa6, 0x96, 0xe3, 0x80, 0x61, 0x6a, + 0x56, 0x98, 0x7c, 0xe2, 0x66, 0x1d, 0xb8, 0x05, 0x4a, 0xeb, 0x88, 0xa3, 0x6c, 0x53, 0x86, 0x35, + 0xfe, 0x0e, 0xe5, 0x1c, 0x08, 0x94, 0x33, 0x2f, 0xa6, 0x18, 0xa6, 0x66, 0xbd, 0x8b, 0x38, 0x7a, + 0x4e, 0x42, 0x9f, 0xe3, 0x30, 0xe6, 0x7d, 0x57, 0x6a, 0xc0, 0xd7, 0xa0, 0xb6, 0x41, 0x29, 0xa1, + 0x7b, 0xfd, 0x18, 0xcb, 0x05, 0xd6, 0x9c, 0x87, 0xc3, 0xd4, 0x9c, 0xc5, 0xa3, 0x87, 0x39, 0xc6, + 0x35, 0x12, 0x3e, 0x03, 0x65, 0x59, 0xc8, 0xd5, 0xd5, 0x9c, 0xd9, 0x61, 0x6a, 0xde, 0x97, 0x94, + 0x1c, 0x5c, 0x21, 0xe0, 0xe6, 0x75, 0x5e, 0x65, 0x99, 0xd7, 0xd3, 0x5b, 0xf3, 0x52, 0x19, 0xdc, + 0x12, 0xd8, 0x67, 0x0d, 0xd4, 0xc7, 0x47, 0x83, 0x16, 0x00, 0xae, 0xdc, 0x9f, 0x74, 0xaf, 0x02, + 0xab, 0x0f, 0x53, 0x13, 0xd0, 0xab, 0xa7, 0x6e, 0x0e, 0x01, 0xd7, 0x41, 0x45, 0x55, 0xfa, 0x94, + 0x74, 0xb2, 0x38, 0x19, 0xdd, 0x2e, 0x0a, 0xe3, 0x00, 0xef, 0x72, 0x8a, 0x51, 0xe8, 0xd4, 0xb3, + 0xe0, 0x2a, 0x4a, 0xcd, 0xcd, 0xb8, 0xcd, 0x1f, 0x1a, 0xb8, 0x97, 0x07, 0xc2, 0x3e, 0xa8, 0x04, + 0xa8, 0x8d, 0x03, 0xb1, 0xb3, 0xa2, 0x7c, 0x63, 0xaf, 0xfe, 0xfc, 0xdb, 0xd8, 0x43, 0x9d, 0xfe, + 0xb6, 0xe8, 0xee, 0x20, 0x9f, 0x3a, 0x9b, 0x42, 0xf3, 0x67, 0x6a, 0xbe, 0xf2, 0x7c, 0xde, 0x4b, + 0xda, 0x56, 0x87, 0x84, 0xb6, 0x47, 0xd1, 0x3e, 0x8a, 0x90, 0x1d, 0x90, 0x03, 0xdf, 0x3e, 0x5a, + 0xb6, 0xf3, 0x9f, 0x11, 0x4b, 0x52, 0xdf, 0x75, 0x51, 0xcc, 0x31, 0x15, 0x5e, 0x42, 0xcc, 0xa9, + 0xdf, 0x71, 0xb3, 0x0b, 0xe1, 0x2a, 0xa8, 0x32, 0x69, 0x85, 0x65, 0x23, 0xcd, 0x4f, 0xde, 0xad, + 0x9c, 0x5e, 0x0f, 0x73, 0x84, 0x82, 0x04, 0x33, 0x77, 0x44, 0x73, 0x8e, 0x4e, 0xcf, 0x8d, 0xc2, + 0xd9, 0xb9, 0x51, 0xb8, 0x3c, 0x37, 0xb4, 0x4f, 0x03, 0x43, 0xfb, 0x36, 0x30, 0xb4, 0x93, 0x81, + 0xa1, 0x9d, 0x0e, 0x0c, 0xed, 0xf7, 0xc0, 0xd0, 0xfe, 0x0c, 0x8c, 0xc2, 0xe5, 0xc0, 0xd0, 0x8e, + 0x2f, 0x8c, 0xc2, 0xe9, 0x85, 0x51, 0x38, 0xbb, 0x30, 0x0a, 0x1f, 0x57, 0xef, 0xf0, 0x7f, 0xe7, + 0x97, 0xae, 0x5d, 0x91, 0x26, 0x97, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xae, 0xb0, 0xf3, 0xf9, + 0xd5, 0x05, 0x00, 0x00, } func (this *PrometheusRequest) Equal(that interface{}) bool { @@ -1751,7 +1751,7 @@ func (m *SampleStream) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/querier/queryrange/queryrangebase/queryrange.proto b/pkg/querier/queryrange/queryrangebase/queryrange.proto index 98ddaa2b7d2db..4d8e42016cc22 100644 --- a/pkg/querier/queryrange/queryrangebase/queryrange.proto +++ b/pkg/querier/queryrange/queryrangebase/queryrange.proto @@ -9,7 +9,7 @@ import "pkg/logproto/logproto.proto"; import "pkg/querier/queryrange/queryrangebase/definitions/definitions.proto"; import "pkg/storage/chunk/cache/resultscache/types.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -56,7 +56,7 @@ message SampleStream { repeated logproto.LegacyLabelPair labels = 1 [ (gogoproto.nullable) = false, (gogoproto.jsontag) = "metric", - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacySample samples = 2 [ (gogoproto.nullable) = false, diff --git a/pkg/querier/queryrange/queryrangebase/results_cache.go b/pkg/querier/queryrange/queryrangebase/results_cache.go index 3511fe0b7dd30..e519ae74f075d 100644 --- a/pkg/querier/queryrange/queryrangebase/results_cache.go +++ b/pkg/querier/queryrange/queryrangebase/results_cache.go @@ -15,10 +15,10 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/querier/queryrange/queryrangebase/results_cache_test.go b/pkg/querier/queryrange/queryrangebase/results_cache_test.go index 2ee599fab30ae..fa1d9c81ef4f1 100644 --- a/pkg/querier/queryrange/queryrangebase/results_cache_test.go +++ b/pkg/querier/queryrange/queryrangebase/results_cache_test.go @@ -14,11 +14,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/querier/queryrange/queryrangebase/retry.go b/pkg/querier/queryrange/queryrangebase/retry.go index d051363771bb9..f02b5d73cd6c4 100644 --- a/pkg/querier/queryrange/queryrangebase/retry.go +++ b/pkg/querier/queryrange/queryrangebase/retry.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type RetryMiddlewareMetrics struct { diff --git a/pkg/querier/queryrange/queryrangebase/retry_test.go b/pkg/querier/queryrange/queryrangebase/retry_test.go index 2c4a15bb9f480..7476fa21a06b2 100644 --- a/pkg/querier/queryrange/queryrangebase/retry_test.go +++ b/pkg/querier/queryrange/queryrangebase/retry_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestRetry(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/series_test.go b/pkg/querier/queryrange/queryrangebase/series_test.go index 0bfdfaefcced4..db574403fffb4 100644 --- a/pkg/querier/queryrange/queryrangebase/series_test.go +++ b/pkg/querier/queryrange/queryrangebase/series_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_ResponseToSamples(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/test_utils.go b/pkg/querier/queryrange/queryrangebase/test_utils.go index fdc02d87a9091..64be6cc0b48e3 100644 --- a/pkg/querier/queryrange/queryrangebase/test_utils.go +++ b/pkg/querier/queryrange/queryrangebase/test_utils.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/util/annotations" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/series" ) // genLabels will create a slice of labels where each label has an equal chance to occupy a value from [0,labelBuckets]. It returns a slice of length labelBuckets^len(labelSet) diff --git a/pkg/querier/queryrange/queryrangebase/test_utils_test.go b/pkg/querier/queryrange/queryrangebase/test_utils_test.go index 0eacb00199375..5cf748dd7dbc3 100644 --- a/pkg/querier/queryrange/queryrangebase/test_utils_test.go +++ b/pkg/querier/queryrange/queryrangebase/test_utils_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/astmapper" ) func TestGenLabelsCorrectness(t *testing.T) { diff --git a/pkg/querier/queryrange/queryrangebase/util.go b/pkg/querier/queryrange/queryrangebase/util.go index 5073b715bc269..a47753013c607 100644 --- a/pkg/querier/queryrange/queryrangebase/util.go +++ b/pkg/querier/queryrange/queryrangebase/util.go @@ -3,7 +3,7 @@ package queryrangebase import ( "context" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" ) // RequestResponse contains a request response and the respective request that was used. diff --git a/pkg/querier/queryrange/queryrangebase/value.go b/pkg/querier/queryrange/queryrangebase/value.go index b0d96f100eac1..e798233dcea38 100644 --- a/pkg/querier/queryrange/queryrangebase/value.go +++ b/pkg/querier/queryrange/queryrangebase/value.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/series" ) // FromResult transforms a promql query result into a samplestream diff --git a/pkg/querier/queryrange/queryrangebase/value_test.go b/pkg/querier/queryrange/queryrangebase/value_test.go index 249fe5eaeb812..965effa30d7ae 100644 --- a/pkg/querier/queryrange/queryrangebase/value_test.go +++ b/pkg/querier/queryrange/queryrangebase/value_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestFromValue(t *testing.T) { diff --git a/pkg/querier/queryrange/querysharding.go b/pkg/querier/queryrange/querysharding.go index 8a11c546a7b64..c67cd753a693a 100644 --- a/pkg/querier/queryrange/querysharding.go +++ b/pkg/querier/queryrange/querysharding.go @@ -15,18 +15,18 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) var errInvalidShardingRange = errors.New("Query does not fit in a single sharding configuration") diff --git a/pkg/querier/queryrange/querysharding_test.go b/pkg/querier/queryrange/querysharding_test.go index b17dfc4d3678a..cb080c279a4d1 100644 --- a/pkg/querier/queryrange/querysharding_test.go +++ b/pkg/querier/queryrange/querysharding_test.go @@ -16,17 +16,17 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index fed95d0110aae..83657d8a5964c 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -16,17 +16,17 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - logqllog "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - logutil "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + logqllog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + logutil "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index 9996790a51147..27d3ff781b0b5 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -20,23 +20,23 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - base "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" - valid "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + base "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" + valid "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/querier/queryrange/serialize.go b/pkg/querier/queryrange/serialize.go index b85f707a692b7..d7a5eb125d30c 100644 --- a/pkg/querier/queryrange/serialize.go +++ b/pkg/querier/queryrange/serialize.go @@ -5,10 +5,10 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/httpreq" - serverutil "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/httpreq" + serverutil "github.com/grafana/loki/v3/pkg/util/server" ) type serializeRoundTripper struct { diff --git a/pkg/querier/queryrange/serialize_test.go b/pkg/querier/queryrange/serialize_test.go index f926da9f19b55..0bd6c36aa4bd6 100644 --- a/pkg/querier/queryrange/serialize_test.go +++ b/pkg/querier/queryrange/serialize_test.go @@ -11,10 +11,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestResponseFormat(t *testing.T) { diff --git a/pkg/querier/queryrange/series_cache.go b/pkg/querier/queryrange/series_cache.go index 5120d61fb0b4f..c7bf2e165117b 100644 --- a/pkg/querier/queryrange/series_cache.go +++ b/pkg/querier/queryrange/series_cache.go @@ -14,10 +14,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util/validation" ) type cacheKeySeries struct { diff --git a/pkg/querier/queryrange/series_cache_test.go b/pkg/querier/queryrange/series_cache_test.go index 6ba869a69411a..97a5eabb9ae2b 100644 --- a/pkg/querier/queryrange/series_cache_test.go +++ b/pkg/querier/queryrange/series_cache_test.go @@ -11,13 +11,13 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) var ( diff --git a/pkg/querier/queryrange/shard_resolver.go b/pkg/querier/queryrange/shard_resolver.go index 2808a4ae2eb21..f6bed294eaae0 100644 --- a/pkg/querier/queryrange/shard_resolver.go +++ b/pkg/querier/queryrange/shard_resolver.go @@ -17,16 +17,16 @@ import ( "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - logqlstats "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + logqlstats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) func shardResolverForConf( diff --git a/pkg/querier/queryrange/split_by_interval.go b/pkg/querier/queryrange/split_by_interval.go index 2f693b438dbd0..92c956bbfed82 100644 --- a/pkg/querier/queryrange/split_by_interval.go +++ b/pkg/querier/queryrange/split_by_interval.go @@ -12,16 +12,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/math" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/validation" ) type lokiResult struct { diff --git a/pkg/querier/queryrange/split_by_interval_test.go b/pkg/querier/queryrange/split_by_interval_test.go index 6c6d66e3bb662..c74ec05c252c7 100644 --- a/pkg/querier/queryrange/split_by_interval_test.go +++ b/pkg/querier/queryrange/split_by_interval_test.go @@ -15,15 +15,15 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) var nilMetrics = NewSplitByMetrics(nil) diff --git a/pkg/querier/queryrange/split_by_range.go b/pkg/querier/queryrange/split_by_range.go index 16076cd948596..380466d04408b 100644 --- a/pkg/querier/queryrange/split_by_range.go +++ b/pkg/querier/queryrange/split_by_range.go @@ -11,13 +11,13 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/util/validation" ) type splitByRange struct { diff --git a/pkg/querier/queryrange/split_by_range_test.go b/pkg/querier/queryrange/split_by_range_test.go index af66c10a2f08a..0f61c3c276b1f 100644 --- a/pkg/querier/queryrange/split_by_range_test.go +++ b/pkg/querier/queryrange/split_by_range_test.go @@ -11,11 +11,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_RangeVectorSplitAlign(t *testing.T) { diff --git a/pkg/querier/queryrange/splitters.go b/pkg/querier/queryrange/splitters.go index e9f2de5b41034..30bc3da18392d 100644 --- a/pkg/querier/queryrange/splitters.go +++ b/pkg/querier/queryrange/splitters.go @@ -5,10 +5,10 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type splitter interface { diff --git a/pkg/querier/queryrange/stats.go b/pkg/querier/queryrange/stats.go index 7cc2943fecaf4..4e5b646e7429b 100644 --- a/pkg/querier/queryrange/stats.go +++ b/pkg/querier/queryrange/stats.go @@ -14,14 +14,14 @@ import ( "github.com/grafana/dskit/middleware" promql_parser "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/logproto" - - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) type ctxKeyType string diff --git a/pkg/querier/queryrange/stats_test.go b/pkg/querier/queryrange/stats_test.go index 28f8d12de7f6d..8c48a9ece8538 100644 --- a/pkg/querier/queryrange/stats_test.go +++ b/pkg/querier/queryrange/stats_test.go @@ -12,9 +12,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func TestStatsCollectorMiddleware(t *testing.T) { diff --git a/pkg/querier/queryrange/views.go b/pkg/querier/queryrange/views.go index be9eee016b4b5..b34020934c1c5 100644 --- a/pkg/querier/queryrange/views.go +++ b/pkg/querier/queryrange/views.go @@ -11,8 +11,8 @@ import ( "github.com/richardartoul/molecule" "github.com/richardartoul/molecule/src/codec" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) // Pull fiel numbers from protobuf message descriptions. diff --git a/pkg/querier/queryrange/views_test.go b/pkg/querier/queryrange/views_test.go index c4c28fe462c0d..7d1938dacb775 100644 --- a/pkg/querier/queryrange/views_test.go +++ b/pkg/querier/queryrange/views_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func TestGetLokiSeriesResponse(t *testing.T) { diff --git a/pkg/querier/queryrange/volume.go b/pkg/querier/queryrange/volume.go index b12fbd48d2459..d4c40964d1423 100644 --- a/pkg/querier/queryrange/volume.go +++ b/pkg/querier/queryrange/volume.go @@ -9,14 +9,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" ) func NewVolumeMiddleware() queryrangebase.Middleware { diff --git a/pkg/querier/queryrange/volume_cache.go b/pkg/querier/queryrange/volume_cache.go index 5ae2af4111150..6f4f73de568ed 100644 --- a/pkg/querier/queryrange/volume_cache.go +++ b/pkg/querier/queryrange/volume_cache.go @@ -12,12 +12,12 @@ import ( "github.com/grafana/dskit/tenant" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/validation" ) type VolumeSplitter struct { diff --git a/pkg/querier/queryrange/volume_cache_test.go b/pkg/querier/queryrange/volume_cache_test.go index 038d8fa925f57..cdf492ee3b1bf 100644 --- a/pkg/querier/queryrange/volume_cache_test.go +++ b/pkg/querier/queryrange/volume_cache_test.go @@ -10,15 +10,15 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache" - - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache" + + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestVolumeCache(t *testing.T) { diff --git a/pkg/querier/queryrange/volume_test.go b/pkg/querier/queryrange/volume_test.go index 8d8b8d48a3f23..62956dd17eab0 100644 --- a/pkg/querier/queryrange/volume_test.go +++ b/pkg/querier/queryrange/volume_test.go @@ -9,11 +9,11 @@ import ( "github.com/grafana/dskit/user" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" ) const forRangeQuery = false diff --git a/pkg/querier/stats/stats.pb.go b/pkg/querier/stats/stats.pb.go index bb56bb26c49c0..bae01dd0eeed5 100644 --- a/pkg/querier/stats/stats.pb.go +++ b/pkg/querier/stats/stats.pb.go @@ -98,26 +98,27 @@ func init() { func init() { proto.RegisterFile("pkg/querier/stats/stats.proto", fileDescriptor_8ca2404f80bab2e8) } var fileDescriptor_8ca2404f80bab2e8 = []byte{ - // 302 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0xd0, 0xb1, 0x4e, 0x3a, 0x31, - 0x1c, 0xc0, 0xf1, 0xfe, 0xfe, 0x7f, 0x31, 0x78, 0x4e, 0x9e, 0x0c, 0x48, 0xe2, 0x0f, 0xe2, 0x84, - 0xcb, 0xd5, 0xe8, 0x0b, 0x18, 0xf0, 0x09, 0xc0, 0xc9, 0xe5, 0xd2, 0x3b, 0x4a, 0x69, 0x38, 0xae, - 0x78, 0xd7, 0xc6, 0xb8, 0xf9, 0x08, 0x8e, 0x3e, 0x82, 0x89, 0x2f, 0xc2, 0xc8, 0xc8, 0xa4, 0x52, - 0x16, 0x47, 0x1e, 0xc1, 0xb4, 0x07, 0x93, 0x4b, 0xd3, 0x5f, 0x3f, 0xf9, 0x26, 0x6d, 0x83, 0xf3, - 0xf9, 0x54, 0xd0, 0x47, 0xc3, 0x0b, 0xc9, 0x0b, 0x5a, 0x6a, 0xa6, 0xcb, 0x6a, 0x8d, 0xe6, 0x85, - 0xd2, 0x2a, 0xac, 0xf9, 0xa1, 0xd5, 0x10, 0x4a, 0x28, 0x7f, 0x42, 0xdd, 0xae, 0xc2, 0x16, 0x0a, - 0xa5, 0x44, 0xc6, 0xa9, 0x9f, 0x12, 0x33, 0xa6, 0x23, 0x53, 0x30, 0x2d, 0x55, 0x5e, 0xf9, 0xc5, - 0x07, 0x04, 0xb5, 0xa1, 0xeb, 0xc3, 0xdb, 0xe0, 0xe8, 0x89, 0x65, 0x59, 0xac, 0xe5, 0x8c, 0x37, - 0xa1, 0x03, 0xdd, 0xe3, 0xeb, 0xb3, 0xa8, 0xaa, 0xa3, 0x7d, 0x1d, 0xdd, 0xed, 0xea, 0x5e, 0x7d, - 0xf1, 0xd9, 0x26, 0x6f, 0x5f, 0x6d, 0x18, 0xd4, 0x5d, 0x75, 0x2f, 0x67, 0x3c, 0xbc, 0x0a, 0x1a, - 0x63, 0xae, 0xd3, 0x09, 0x1f, 0xc5, 0xa5, 0xbb, 0x6c, 0x19, 0xa7, 0xca, 0xe4, 0xba, 0xf9, 0xaf, - 0x03, 0xdd, 0x83, 0x41, 0xb8, 0xb3, 0xa1, 0xa7, 0xbe, 0x93, 0x30, 0x0a, 0x4e, 0xf7, 0x45, 0x3a, - 0x31, 0xf9, 0x34, 0x4e, 0x9e, 0x35, 0x2f, 0x9b, 0xff, 0x7d, 0x70, 0xb2, 0xa3, 0xbe, 0x93, 0x9e, - 0x83, 0x5e, 0xbc, 0x5c, 0x23, 0x59, 0xad, 0x91, 0x6c, 0xd7, 0x08, 0x2f, 0x16, 0xe1, 0xdd, 0x22, - 0x2c, 0x2c, 0xc2, 0xd2, 0x22, 0x7c, 0x5b, 0x84, 0x1f, 0x8b, 0x64, 0x6b, 0x11, 0x5e, 0x37, 0x48, - 0x96, 0x1b, 0x24, 0xab, 0x0d, 0x92, 0x87, 0x4b, 0x21, 0xf5, 0xc4, 0x24, 0x51, 0xaa, 0x66, 0x54, - 0x14, 0x6c, 0xcc, 0x72, 0x46, 0x33, 0x35, 0x95, 0xf4, 0xcf, 0xbf, 0x26, 0x87, 0xfe, 0xa5, 0x37, - 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xe6, 0x22, 0x2b, 0x73, 0x01, 0x00, 0x00, + // 305 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0xd0, 0x31, 0x4e, 0xf3, 0x30, + 0x14, 0xc0, 0x71, 0xbf, 0xef, 0xa3, 0xa8, 0x84, 0x89, 0xd0, 0xa1, 0x54, 0xe2, 0xb5, 0x62, 0xea, + 0x80, 0x62, 0x44, 0x2f, 0x80, 0x5a, 0x4e, 0xd0, 0x32, 0xb1, 0x44, 0x49, 0xea, 0xba, 0x56, 0xd3, + 0xb8, 0x24, 0x36, 0x88, 0x8d, 0x23, 0x30, 0x72, 0x04, 0x24, 0x2e, 0xd2, 0xb1, 0x63, 0x27, 0xa0, + 0xee, 0xc2, 0xd8, 0x23, 0x20, 0x3b, 0xe9, 0xc4, 0x62, 0xf9, 0xf9, 0xa7, 0xbf, 0x64, 0xdb, 0x3b, + 0x5f, 0xcc, 0x38, 0x7d, 0xd0, 0x2c, 0x17, 0x2c, 0xa7, 0x85, 0x8a, 0x54, 0x51, 0xae, 0xc1, 0x22, + 0x97, 0x4a, 0xfa, 0x35, 0x37, 0xb4, 0x1a, 0x5c, 0x72, 0xe9, 0x4e, 0xa8, 0xdd, 0x95, 0xd8, 0x42, + 0x2e, 0x25, 0x4f, 0x19, 0x75, 0x53, 0xac, 0x27, 0x74, 0xac, 0xf3, 0x48, 0x09, 0x99, 0x95, 0x7e, + 0xf1, 0x01, 0x5e, 0x6d, 0x64, 0x7b, 0xff, 0xc6, 0x3b, 0x7a, 0x8a, 0xd2, 0x34, 0x54, 0x62, 0xce, + 0x9a, 0xd0, 0x81, 0xee, 0xf1, 0xf5, 0x59, 0x50, 0xd6, 0xc1, 0xbe, 0x0e, 0x6e, 0xab, 0xba, 0x5f, + 0x5f, 0x7e, 0xb6, 0xc9, 0xdb, 0x57, 0x1b, 0x86, 0x75, 0x5b, 0xdd, 0x89, 0x39, 0xf3, 0xaf, 0xbc, + 0xc6, 0x84, 0xa9, 0x64, 0xca, 0xc6, 0x61, 0x61, 0x2f, 0x5b, 0x84, 0x89, 0xd4, 0x99, 0x6a, 0xfe, + 0xeb, 0x40, 0xf7, 0x60, 0xe8, 0x57, 0x36, 0x72, 0x34, 0xb0, 0xe2, 0x07, 0xde, 0xe9, 0xbe, 0x48, + 0xa6, 0x3a, 0x9b, 0x85, 0xf1, 0xb3, 0x62, 0x45, 0xf3, 0xbf, 0x0b, 0x4e, 0x2a, 0x1a, 0x58, 0xe9, + 0x5b, 0xe8, 0xc7, 0xab, 0x0d, 0x92, 0xf5, 0x06, 0xc9, 0x6e, 0x83, 0xf0, 0x62, 0x10, 0xde, 0x0d, + 0xc2, 0xd2, 0x20, 0xac, 0x0c, 0xc2, 0xb7, 0x41, 0xf8, 0x31, 0x48, 0x76, 0x06, 0xe1, 0x75, 0x8b, + 0x64, 0xb5, 0x45, 0xb2, 0xde, 0x22, 0xb9, 0xbf, 0xe4, 0x42, 0x4d, 0x75, 0x1c, 0x24, 0x72, 0x4e, + 0x79, 0x1e, 0x4d, 0xa2, 0x2c, 0xa2, 0xa9, 0x9c, 0x09, 0xfa, 0xd8, 0xa3, 0x7f, 0xbe, 0x36, 0x3e, + 0x74, 0x8f, 0xed, 0xfd, 0x06, 0x00, 0x00, 0xff, 0xff, 0x67, 0x51, 0xd3, 0x06, 0x76, 0x01, 0x00, + 0x00, } func (this *Stats) Equal(that interface{}) bool { diff --git a/pkg/querier/stats/stats.proto b/pkg/querier/stats/stats.proto index 1315ef7774ce3..2aa02df7351d7 100644 --- a/pkg/querier/stats/stats.proto +++ b/pkg/querier/stats/stats.proto @@ -5,7 +5,7 @@ package stats; import "gogoproto/gogo.proto"; import "google/protobuf/duration.proto"; -option go_package = "github.com/grafana/loki/pkg/querier/stats"; +option go_package = "github.com/grafana/loki/v3/pkg/querier/stats"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/querier/tail.go b/pkg/querier/tail.go index 35cb4bc18e7a7..1bdc01159ed65 100644 --- a/pkg/querier/tail.go +++ b/pkg/querier/tail.go @@ -12,10 +12,10 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/iter" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/iter" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/querier/tail_mock_test.go b/pkg/querier/tail_mock_test.go index 94f135e77648e..a1d161d2f2c95 100644 --- a/pkg/querier/tail_mock_test.go +++ b/pkg/querier/tail_mock_test.go @@ -1,6 +1,6 @@ package querier -import "github.com/grafana/loki/pkg/logproto" +import "github.com/grafana/loki/v3/pkg/logproto" func mockTailResponse(stream logproto.Stream) *logproto.TailResponse { return &logproto.TailResponse{ diff --git a/pkg/querier/tail_test.go b/pkg/querier/tail_test.go index 07d3743af03c5..4867574e5792c 100644 --- a/pkg/querier/tail_test.go +++ b/pkg/querier/tail_test.go @@ -12,9 +12,9 @@ import ( gokitlog "github.com/go-kit/log" - "github.com/grafana/loki/pkg/iter" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/iter" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/querier/testutils.go b/pkg/querier/testutils.go index 5b38ad11eba5f..34cae5f70580d 100644 --- a/pkg/querier/testutils.go +++ b/pkg/querier/testutils.go @@ -3,7 +3,7 @@ package querier import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) func DefaultLimitsConfig() validation.Limits { diff --git a/pkg/querier/worker/frontend_processor.go b/pkg/querier/worker/frontend_processor.go index 45c61862d0598..a0e3569359bfa 100644 --- a/pkg/querier/worker/frontend_processor.go +++ b/pkg/querier/worker/frontend_processor.go @@ -13,9 +13,9 @@ import ( "github.com/opentracing/opentracing-go" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v1/frontendv1pb" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - httpgrpcutil "github.com/grafana/loki/pkg/util/httpgrpc" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v1/frontendv1pb" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + httpgrpcutil "github.com/grafana/loki/v3/pkg/util/httpgrpc" ) var ( diff --git a/pkg/querier/worker/frontend_processor_test.go b/pkg/querier/worker/frontend_processor_test.go index cecdb7bfe27d3..85eac4338a373 100644 --- a/pkg/querier/worker/frontend_processor_test.go +++ b/pkg/querier/worker/frontend_processor_test.go @@ -14,8 +14,8 @@ import ( "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/test/bufconn" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/util/test" ) const bufConnSize = 1024 * 1024 diff --git a/pkg/querier/worker/scheduler_processor.go b/pkg/querier/worker/scheduler_processor.go index 16d0e59d1ed14..00b08219e5dbe 100644 --- a/pkg/querier/worker/scheduler_processor.go +++ b/pkg/querier/worker/scheduler_processor.go @@ -25,12 +25,12 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - querier_stats "github.com/grafana/loki/pkg/querier/stats" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - httpgrpcutil "github.com/grafana/loki/pkg/util/httpgrpc" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + querier_stats "github.com/grafana/loki/v3/pkg/querier/stats" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + httpgrpcutil "github.com/grafana/loki/v3/pkg/util/httpgrpc" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func newSchedulerProcessor(cfg Config, handler RequestHandler, log log.Logger, metrics *Metrics, codec RequestCodec) (*schedulerProcessor, []services.Service) { diff --git a/pkg/querier/worker/scheduler_processor_test.go b/pkg/querier/worker/scheduler_processor_test.go index 154ba1ae4fa73..264d5a1769fd1 100644 --- a/pkg/querier/worker/scheduler_processor_test.go +++ b/pkg/querier/worker/scheduler_processor_test.go @@ -19,9 +19,9 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" ) func TestSchedulerProcessor_processQueriesOnSingleStream(t *testing.T) { diff --git a/pkg/querier/worker/util.go b/pkg/querier/worker/util.go index 812236809a097..3ebb0029bfe76 100644 --- a/pkg/querier/worker/util.go +++ b/pkg/querier/worker/util.go @@ -14,8 +14,8 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/util/server" ) // newExecutionContext returns a new execution context (execCtx) that wraps the input workerCtx and diff --git a/pkg/querier/worker/util_test.go b/pkg/querier/worker/util_test.go index 25dd8127a0da4..96b9be9891cf4 100644 --- a/pkg/querier/worker/util_test.go +++ b/pkg/querier/worker/util_test.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/server" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/server" ) type HandlerFunc func(context.Context, queryrangebase.Request) (queryrangebase.Response, error) diff --git a/pkg/querier/worker/worker.go b/pkg/querier/worker/worker.go index b2e50b205d143..bc41a49d9075d 100644 --- a/pkg/querier/worker/worker.go +++ b/pkg/querier/worker/worker.go @@ -17,9 +17,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util" ) type Config struct { diff --git a/pkg/querier/worker/worker_test.go b/pkg/querier/worker/worker_test.go index 68791b214f178..fb311925fb207 100644 --- a/pkg/querier/worker/worker_test.go +++ b/pkg/querier/worker/worker_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestResetConcurrency(t *testing.T) { diff --git a/pkg/querier/worker_service.go b/pkg/querier/worker_service.go index f95da0eba16d4..40415678122f1 100644 --- a/pkg/querier/worker_service.go +++ b/pkg/querier/worker_service.go @@ -10,9 +10,9 @@ import ( "github.com/grafana/dskit/services" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type WorkerServiceConfig struct { diff --git a/pkg/queue/dequeue_qos_test.go b/pkg/queue/dequeue_qos_test.go index c889cbe8f4c60..db876e1d738fd 100644 --- a/pkg/queue/dequeue_qos_test.go +++ b/pkg/queue/dequeue_qos_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/queue/queue_test.go b/pkg/queue/queue_test.go index a21ce8af622cf..b51ccf7cc2a06 100644 --- a/pkg/queue/queue_test.go +++ b/pkg/queue/queue_test.go @@ -15,7 +15,7 @@ import ( "go.uber.org/atomic" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func BenchmarkGetNextRequest(b *testing.B) { diff --git a/pkg/queue/tenant_queues.go b/pkg/queue/tenant_queues.go index 69fac6ed60a01..46f9f7adccd43 100644 --- a/pkg/queue/tenant_queues.go +++ b/pkg/queue/tenant_queues.go @@ -14,9 +14,9 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) type intPointerMap map[string]*int diff --git a/pkg/queue/tenant_queues_test.go b/pkg/queue/tenant_queues_test.go index 4f49b8233304d..d1b52480e5458 100644 --- a/pkg/queue/tenant_queues_test.go +++ b/pkg/queue/tenant_queues_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/scheduler/limits" ) var noQueueLimits = limits.NewQueueLimits(nil) diff --git a/pkg/ruler/base/api.go b/pkg/ruler/base/api.go index 04a303993228b..4e4d71d5691db 100644 --- a/pkg/ruler/base/api.go +++ b/pkg/ruler/base/api.go @@ -23,10 +23,10 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // In order to reimplement the prometheus rules API, a large amount of code was copied over diff --git a/pkg/ruler/base/api_test.go b/pkg/ruler/base/api_test.go index c14f5de8d4614..f61f2fe3f0237 100644 --- a/pkg/ruler/base/api_test.go +++ b/pkg/ruler/base/api_test.go @@ -24,7 +24,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) func TestRuler_PrometheusRules(t *testing.T) { diff --git a/pkg/ruler/base/client_pool_test.go b/pkg/ruler/base/client_pool_test.go index 05fc23290033c..fa068c2580bdf 100644 --- a/pkg/ruler/base/client_pool_test.go +++ b/pkg/ruler/base/client_pool_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) func Test_newRulerClientFactory(t *testing.T) { diff --git a/pkg/ruler/base/compat.go b/pkg/ruler/base/compat.go index 822537e419bc7..cfe18fcebd087 100644 --- a/pkg/ruler/base/compat.go +++ b/pkg/ruler/base/compat.go @@ -20,9 +20,9 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Pusher is an ingester server that accepts pushes. diff --git a/pkg/ruler/base/compat_test.go b/pkg/ruler/base/compat_test.go index d4cdf4f298a34..e37ef6646811a 100644 --- a/pkg/ruler/base/compat_test.go +++ b/pkg/ruler/base/compat_test.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type fakePusher struct { diff --git a/pkg/ruler/base/error_translate_queryable.go b/pkg/ruler/base/error_translate_queryable.go index 0785a1421ac6e..6e65ed1aaafa2 100644 --- a/pkg/ruler/base/error_translate_queryable.go +++ b/pkg/ruler/base/error_translate_queryable.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/util/annotations" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/validation" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/validation" ) // TranslateToPromqlAPIError converts error to one of promql.Errors for consumption in PromQL API. diff --git a/pkg/ruler/base/lifecycle_test.go b/pkg/ruler/base/lifecycle_test.go index ea1496712839e..2fefc62bf6cbb 100644 --- a/pkg/ruler/base/lifecycle_test.go +++ b/pkg/ruler/base/lifecycle_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/util/test" ) // TestRulerShutdown tests shutting down ruler unregisters correctly diff --git a/pkg/ruler/base/manager.go b/pkg/ruler/base/manager.go index 371eb712508e4..93b975895846c 100644 --- a/pkg/ruler/base/manager.go +++ b/pkg/ruler/base/manager.go @@ -19,7 +19,7 @@ import ( promRules "github.com/prometheus/prometheus/rules" "golang.org/x/net/context/ctxhttp" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) type DefaultMultiTenantManager struct { diff --git a/pkg/ruler/base/manager_metrics.go b/pkg/ruler/base/manager_metrics.go index d5caab8c2ef87..25d5aa13fcae1 100644 --- a/pkg/ruler/base/manager_metrics.go +++ b/pkg/ruler/base/manager_metrics.go @@ -3,7 +3,7 @@ package base import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // ManagerMetrics aggregates metrics exported by the Prometheus diff --git a/pkg/ruler/base/manager_metrics_test.go b/pkg/ruler/base/manager_metrics_test.go index 421133bd4f060..49f34405bff84 100644 --- a/pkg/ruler/base/manager_metrics_test.go +++ b/pkg/ruler/base/manager_metrics_test.go @@ -12,9 +12,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/util/constants" ) func TestManagerMetricsWithRuleGroupLabel(t *testing.T) { diff --git a/pkg/ruler/base/manager_test.go b/pkg/ruler/base/manager_test.go index c2cdc58854090..a93dca6f7977a 100644 --- a/pkg/ruler/base/manager_test.go +++ b/pkg/ruler/base/manager_test.go @@ -13,9 +13,9 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestSyncRuleGroups(t *testing.T) { diff --git a/pkg/ruler/base/notifier.go b/pkg/ruler/base/notifier.go index 403383f2eefc1..8fea76be13963 100644 --- a/pkg/ruler/base/notifier.go +++ b/pkg/ruler/base/notifier.go @@ -19,8 +19,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/notifier" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/util" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/util" ) // TODO: Instead of using the same metrics for all notifiers, diff --git a/pkg/ruler/base/notifier_test.go b/pkg/ruler/base/notifier_test.go index 9db716623c41e..8166193d84575 100644 --- a/pkg/ruler/base/notifier_test.go +++ b/pkg/ruler/base/notifier_test.go @@ -14,8 +14,8 @@ import ( "github.com/prometheus/prometheus/model/relabel" "github.com/stretchr/testify/require" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/util" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/util" ) func TestBuildNotifierConfig(t *testing.T) { diff --git a/pkg/ruler/base/pusher_mock_test.go b/pkg/ruler/base/pusher_mock_test.go index 0fb571c70b653..cde9f43b56fbe 100644 --- a/pkg/ruler/base/pusher_mock_test.go +++ b/pkg/ruler/base/pusher_mock_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/mock" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type pusherMock struct { diff --git a/pkg/ruler/base/ruler.go b/pkg/ruler/base/ruler.go index a0dc0df08f2d7..b8e9f3899e417 100644 --- a/pkg/ruler/base/ruler.go +++ b/pkg/ruler/base/ruler.go @@ -34,12 +34,12 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/ruler/base/ruler.pb.go b/pkg/ruler/base/ruler.pb.go index 63904a26e3927..5b3b1f1b4d5d8 100644 --- a/pkg/ruler/base/ruler.pb.go +++ b/pkg/ruler/base/ruler.pb.go @@ -12,9 +12,9 @@ import ( _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" - rulespb "github.com/grafana/loki/pkg/ruler/rulespb" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" + rulespb "github.com/grafana/loki/v3/pkg/ruler/rulespb" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -334,15 +334,15 @@ func (m *RuleStateDesc) GetEvaluationDuration() time.Duration { } type AlertStateDesc struct { - State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,2,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Annotations []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=annotations,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"annotations"` - Value float64 `protobuf:"fixed64,4,opt,name=value,proto3" json:"value,omitempty"` - ActiveAt time.Time `protobuf:"bytes,5,opt,name=active_at,json=activeAt,proto3,stdtime" json:"active_at"` - FiredAt time.Time `protobuf:"bytes,6,opt,name=fired_at,json=firedAt,proto3,stdtime" json:"fired_at"` - ResolvedAt time.Time `protobuf:"bytes,7,opt,name=resolved_at,json=resolvedAt,proto3,stdtime" json:"resolved_at"` - LastSentAt time.Time `protobuf:"bytes,8,opt,name=last_sent_at,json=lastSentAt,proto3,stdtime" json:"last_sent_at"` - ValidUntil time.Time `protobuf:"bytes,9,opt,name=valid_until,json=validUntil,proto3,stdtime" json:"valid_until"` + State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,2,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Annotations []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,3,rep,name=annotations,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"annotations"` + Value float64 `protobuf:"fixed64,4,opt,name=value,proto3" json:"value,omitempty"` + ActiveAt time.Time `protobuf:"bytes,5,opt,name=active_at,json=activeAt,proto3,stdtime" json:"active_at"` + FiredAt time.Time `protobuf:"bytes,6,opt,name=fired_at,json=firedAt,proto3,stdtime" json:"fired_at"` + ResolvedAt time.Time `protobuf:"bytes,7,opt,name=resolved_at,json=resolvedAt,proto3,stdtime" json:"resolved_at"` + LastSentAt time.Time `protobuf:"bytes,8,opt,name=last_sent_at,json=lastSentAt,proto3,stdtime" json:"last_sent_at"` + ValidUntil time.Time `protobuf:"bytes,9,opt,name=valid_until,json=validUntil,proto3,stdtime" json:"valid_until"` } func (m *AlertStateDesc) Reset() { *m = AlertStateDesc{} } @@ -438,57 +438,57 @@ func init() { func init() { proto.RegisterFile("pkg/ruler/base/ruler.proto", fileDescriptor_ca810a0fd7057a73) } var fileDescriptor_ca810a0fd7057a73 = []byte{ - // 791 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4f, 0x4f, 0xdb, 0x48, - 0x14, 0xf7, 0xe4, 0xbf, 0x27, 0x81, 0x65, 0x07, 0xb4, 0x32, 0x61, 0x71, 0xa2, 0xec, 0x25, 0x5a, - 0xad, 0x6c, 0x6d, 0x58, 0xad, 0xb4, 0xda, 0x5d, 0x55, 0x41, 0xd0, 0x5e, 0x50, 0x55, 0x19, 0xda, - 0x6b, 0x34, 0x49, 0x26, 0xc6, 0x62, 0xe2, 0x71, 0xc7, 0xe3, 0x48, 0xdc, 0xfa, 0x11, 0x38, 0xf6, - 0xda, 0x5b, 0x3f, 0x0a, 0x47, 0x7a, 0x43, 0xad, 0x44, 0x4b, 0xb8, 0xf4, 0xc8, 0x07, 0xe8, 0xa1, - 0x9a, 0x19, 0x9b, 0x84, 0x42, 0x0f, 0x51, 0xc5, 0x05, 0xe6, 0xfd, 0xf9, 0xfd, 0xde, 0x7b, 0xbf, - 0x79, 0xe3, 0xc0, 0x7a, 0x74, 0xe4, 0xbb, 0x3c, 0xa1, 0x84, 0xbb, 0x7d, 0x1c, 0x13, 0x7d, 0x74, - 0x22, 0xce, 0x04, 0x43, 0x05, 0xe9, 0xa9, 0xaf, 0xf9, 0xcc, 0x67, 0xca, 0xe1, 0xca, 0x93, 0x8e, - 0xd5, 0x6d, 0x9f, 0x31, 0x9f, 0x12, 0x57, 0x59, 0xfd, 0x64, 0xe4, 0x0e, 0x13, 0x8e, 0x45, 0xc0, - 0xc2, 0x34, 0xde, 0xf8, 0x36, 0x2e, 0x82, 0x31, 0x89, 0x05, 0x1e, 0x47, 0x69, 0xc2, 0x86, 0x2c, - 0x4c, 0x99, 0xaf, 0x99, 0xb3, 0x43, 0x1a, 0xdc, 0x9c, 0x75, 0x25, 0xff, 0xc6, 0x51, 0x5f, 0xff, - 0xd7, 0xe1, 0xd6, 0x3b, 0x00, 0x6b, 0x9e, 0xb4, 0x3d, 0xf2, 0x32, 0x21, 0xb1, 0x40, 0x5b, 0xb0, - 0x34, 0x0a, 0xa8, 0x20, 0xdc, 0x02, 0x4d, 0xd0, 0x5e, 0xee, 0x6c, 0x38, 0xb2, 0x75, 0x67, 0x3e, - 0x47, 0x19, 0x07, 0xc7, 0x11, 0xf1, 0xd2, 0x54, 0xb4, 0x01, 0x4d, 0x49, 0xda, 0x0b, 0xf1, 0x98, - 0x58, 0xb9, 0x66, 0xbe, 0x6d, 0x7a, 0x15, 0xe9, 0x78, 0x8a, 0xc7, 0x04, 0x6d, 0x42, 0xa8, 0x82, - 0x3e, 0x67, 0x49, 0x64, 0xe5, 0x55, 0x54, 0xa5, 0x3f, 0x91, 0x0e, 0x84, 0x60, 0x61, 0x14, 0x50, - 0x62, 0x15, 0x54, 0x40, 0x9d, 0x5b, 0xff, 0xc1, 0x4a, 0x56, 0x03, 0x55, 0x61, 0xb9, 0x1b, 0x1e, - 0x4b, 0x73, 0xc5, 0x40, 0x2b, 0xb0, 0xd6, 0xa5, 0x84, 0x8b, 0x20, 0xf4, 0x95, 0x07, 0xa0, 0x9f, - 0xe1, 0x92, 0x47, 0x06, 0x8c, 0x0f, 0x33, 0x57, 0xae, 0xf5, 0x3f, 0x5c, 0x4a, 0xdb, 0x8d, 0x23, - 0x16, 0xc6, 0x04, 0xfd, 0x01, 0x4b, 0xaa, 0x78, 0x6c, 0x81, 0x66, 0xbe, 0x5d, 0xed, 0xac, 0xe9, - 0x99, 0x54, 0xfd, 0x7d, 0x81, 0x05, 0xd9, 0x21, 0xf1, 0xc0, 0x4b, 0x73, 0x5a, 0x6f, 0x72, 0x70, - 0xf9, 0x76, 0x08, 0xfd, 0x0e, 0x8b, 0xba, 0x7b, 0xa9, 0x89, 0xc4, 0x6b, 0x09, 0xbd, 0x6c, 0x08, - 0x85, 0xd7, 0x29, 0xe8, 0x6f, 0x58, 0xc3, 0x03, 0x11, 0x4c, 0x48, 0x4f, 0x25, 0x29, 0x39, 0xaa, - 0x9d, 0xd5, 0x99, 0x8c, 0xb3, 0x8a, 0x55, 0x9d, 0xa8, 0x9a, 0x45, 0x2f, 0xe0, 0x2a, 0x99, 0x60, - 0x9a, 0xa8, 0xab, 0x3f, 0xc8, 0xae, 0xd8, 0xca, 0xab, 0x8a, 0x75, 0x47, 0x2f, 0x81, 0x93, 0x2d, - 0x81, 0x73, 0x93, 0xb1, 0x5d, 0x39, 0xbd, 0x68, 0x18, 0x27, 0x1f, 0x1b, 0xc0, 0xbb, 0x8f, 0x00, - 0xed, 0x43, 0x34, 0x73, 0xef, 0xa4, 0xab, 0x65, 0x15, 0x14, 0xed, 0xfa, 0x1d, 0xda, 0x2c, 0x41, - 0xb3, 0xbe, 0x96, 0xac, 0xf7, 0xc0, 0x5b, 0x1f, 0x72, 0x5a, 0xe3, 0x99, 0x44, 0xbf, 0xc1, 0x82, - 0x9c, 0x37, 0x55, 0xe8, 0xa7, 0x39, 0x85, 0xd4, 0xa8, 0x2a, 0x88, 0xd6, 0x60, 0x31, 0x96, 0x08, - 0x2b, 0xd7, 0x04, 0x6d, 0xd3, 0xd3, 0x06, 0xfa, 0x05, 0x96, 0x0e, 0x09, 0xa6, 0xe2, 0x50, 0x0d, - 0x6b, 0x7a, 0xa9, 0x85, 0x7e, 0x85, 0x26, 0xc5, 0xb1, 0xd8, 0xe5, 0x9c, 0x71, 0xd5, 0xb0, 0xe9, - 0xcd, 0x1c, 0xf2, 0x52, 0xb1, 0x5c, 0x85, 0xd8, 0x2a, 0xce, 0x5f, 0xaa, 0x5a, 0x8f, 0xb9, 0x4b, - 0xd5, 0x39, 0xdf, 0x53, 0xb7, 0xf4, 0x30, 0xea, 0x96, 0x7f, 0x4c, 0xdd, 0x2f, 0x05, 0xb8, 0x7c, - 0x7b, 0x8e, 0x99, 0x72, 0x60, 0x5e, 0x39, 0x0a, 0x4b, 0x14, 0xf7, 0x09, 0xcd, 0xb6, 0x6c, 0xdd, - 0xb9, 0x79, 0xfd, 0x7b, 0xc4, 0xc7, 0x83, 0xe3, 0x3d, 0x19, 0x7d, 0x86, 0x03, 0xbe, 0xfd, 0x8f, - 0xac, 0xf8, 0xfe, 0xa2, 0xf1, 0xa7, 0x1f, 0x88, 0xc3, 0xa4, 0xef, 0x0c, 0xd8, 0xd8, 0xf5, 0x39, - 0x1e, 0xe1, 0x10, 0xbb, 0x94, 0x1d, 0x05, 0xee, 0xfc, 0x47, 0xc4, 0x51, 0xb8, 0xee, 0x10, 0x47, - 0x82, 0x70, 0x2f, 0xad, 0x81, 0x26, 0xb0, 0x8a, 0xc3, 0x90, 0x09, 0xd5, 0x64, 0xac, 0x5e, 0xf2, - 0x43, 0x95, 0x9c, 0x2f, 0x24, 0x67, 0x97, 0x1a, 0x11, 0xb5, 0x03, 0xc0, 0xd3, 0x06, 0xea, 0x42, - 0x33, 0x7d, 0x67, 0x58, 0x58, 0xc5, 0x05, 0xee, 0xb1, 0xa2, 0x61, 0x5d, 0x81, 0x1e, 0xc1, 0xca, - 0x28, 0xe0, 0x64, 0x28, 0x19, 0x16, 0xd9, 0x84, 0xb2, 0x42, 0x75, 0x05, 0xda, 0x85, 0x55, 0x4e, - 0x62, 0x46, 0x27, 0x9a, 0xa3, 0xbc, 0x00, 0x07, 0xcc, 0x80, 0x5d, 0x81, 0x1e, 0xc3, 0x9a, 0xdc, - 0xeb, 0x5e, 0x4c, 0x42, 0x21, 0x79, 0x2a, 0x8b, 0xf0, 0x48, 0xe4, 0x3e, 0x09, 0x85, 0x6e, 0x67, - 0x82, 0x69, 0x30, 0xec, 0x25, 0xa1, 0x08, 0xa8, 0x65, 0x2e, 0x42, 0xa3, 0x80, 0xcf, 0x25, 0xae, - 0xf3, 0x2f, 0x2c, 0xca, 0x77, 0xcb, 0x51, 0x47, 0x1f, 0x62, 0x84, 0xee, 0xfe, 0x08, 0xd4, 0x57, - 0x6f, 0xf9, 0xf4, 0x97, 0xb6, 0x65, 0x6c, 0xff, 0x75, 0x76, 0x69, 0x1b, 0xe7, 0x97, 0xb6, 0x71, - 0x7d, 0x69, 0x83, 0x57, 0x53, 0x1b, 0xbc, 0x9d, 0xda, 0xe0, 0x74, 0x6a, 0x83, 0xb3, 0xa9, 0x0d, - 0x3e, 0x4d, 0x6d, 0xf0, 0x79, 0x6a, 0x1b, 0xd7, 0x53, 0x1b, 0x9c, 0x5c, 0xd9, 0xc6, 0xd9, 0x95, - 0x6d, 0x9c, 0x5f, 0xd9, 0x46, 0xbf, 0xa4, 0x9a, 0xdb, 0xfa, 0x1a, 0x00, 0x00, 0xff, 0xff, 0xf0, - 0x4e, 0x48, 0x39, 0x44, 0x07, 0x00, 0x00, + // 790 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0xcd, 0x4e, 0xdb, 0x4c, + 0x14, 0xf5, 0xe4, 0xdf, 0x93, 0xc0, 0xc7, 0x37, 0xa0, 0xca, 0x84, 0xe2, 0x44, 0xe9, 0x26, 0xaa, + 0x2a, 0x5b, 0x0a, 0xa8, 0x9b, 0x52, 0x55, 0x41, 0xd0, 0x6e, 0x50, 0x55, 0x19, 0xda, 0x6d, 0x34, + 0x49, 0x26, 0xc6, 0xc2, 0xf1, 0xb8, 0xe3, 0x71, 0xa4, 0xec, 0xaa, 0x3e, 0x01, 0xcb, 0x6e, 0xbb, + 0xeb, 0xa3, 0xb0, 0xa4, 0x3b, 0xd4, 0x4a, 0xb4, 0x84, 0x4d, 0x97, 0x3c, 0x42, 0x35, 0x33, 0x36, + 0x09, 0x85, 0x2e, 0xa2, 0x8a, 0x0d, 0xcc, 0xfd, 0x39, 0xe7, 0xde, 0x39, 0xf7, 0x8e, 0x03, 0xab, + 0xe1, 0x91, 0x6b, 0xb3, 0xd8, 0x27, 0xcc, 0xee, 0xe2, 0x88, 0xa8, 0xa3, 0x15, 0x32, 0xca, 0x29, + 0xca, 0x09, 0x4f, 0x75, 0xc5, 0xa5, 0x2e, 0x95, 0x0e, 0x5b, 0x9c, 0x54, 0xac, 0x6a, 0xba, 0x94, + 0xba, 0x3e, 0xb1, 0xa5, 0xd5, 0x8d, 0x07, 0x76, 0x3f, 0x66, 0x98, 0x7b, 0x34, 0x48, 0xe2, 0xb5, + 0x3f, 0xe3, 0xdc, 0x1b, 0x92, 0x88, 0xe3, 0x61, 0x98, 0x24, 0xac, 0x89, 0xc2, 0x3e, 0x75, 0x15, + 0x73, 0x7a, 0x48, 0x82, 0xeb, 0xd3, 0xae, 0xc4, 0xdf, 0x28, 0xec, 0xaa, 0xff, 0x2a, 0xdc, 0xf8, + 0x0a, 0x60, 0xc5, 0x11, 0xb6, 0x43, 0xde, 0xc7, 0x24, 0xe2, 0x68, 0x03, 0x16, 0x06, 0x9e, 0xcf, + 0x09, 0x33, 0x40, 0x1d, 0x34, 0x17, 0x5b, 0x6b, 0x96, 0x68, 0xdd, 0x9a, 0xcd, 0x91, 0xc6, 0xc1, + 0x38, 0x24, 0x4e, 0x92, 0x8a, 0xd6, 0xa0, 0x2e, 0x48, 0x3b, 0x01, 0x1e, 0x12, 0x23, 0x53, 0xcf, + 0x36, 0x75, 0xa7, 0x24, 0x1c, 0xaf, 0xf1, 0x90, 0xa0, 0x75, 0x08, 0x65, 0xd0, 0x65, 0x34, 0x0e, + 0x8d, 0xac, 0x8c, 0xca, 0xf4, 0x57, 0xc2, 0x81, 0x10, 0xcc, 0x0d, 0x3c, 0x9f, 0x18, 0x39, 0x19, + 0x90, 0xe7, 0xc6, 0x16, 0x2c, 0xa5, 0x35, 0x50, 0x19, 0x16, 0xdb, 0xc1, 0x58, 0x98, 0x4b, 0x1a, + 0x5a, 0x82, 0x95, 0xb6, 0x4f, 0x18, 0xf7, 0x02, 0x57, 0x7a, 0x00, 0xfa, 0x1f, 0x2e, 0x38, 0xa4, + 0x47, 0x59, 0x3f, 0x75, 0x65, 0x1a, 0xcf, 0xe1, 0x42, 0xd2, 0x6e, 0x14, 0xd2, 0x20, 0x22, 0xe8, + 0x09, 0x2c, 0xc8, 0xe2, 0x91, 0x01, 0xea, 0xd9, 0x66, 0xb9, 0xb5, 0xa2, 0xee, 0x24, 0xeb, 0xef, + 0x73, 0xcc, 0xc9, 0x0e, 0x89, 0x7a, 0x4e, 0x92, 0xd3, 0xf8, 0x9c, 0x81, 0x8b, 0x37, 0x43, 0xe8, + 0x31, 0xcc, 0xab, 0xee, 0x85, 0x26, 0x02, 0xaf, 0x24, 0x74, 0xd2, 0x4b, 0x48, 0xbc, 0x4a, 0x41, + 0x4f, 0x61, 0x05, 0xf7, 0xb8, 0x37, 0x22, 0x1d, 0x99, 0x24, 0xe5, 0x28, 0xb7, 0x96, 0xa7, 0x32, + 0x4e, 0x2b, 0x96, 0x55, 0xa2, 0x6c, 0x16, 0xbd, 0x83, 0xcb, 0x64, 0x84, 0xfd, 0x58, 0x8e, 0xfe, + 0x20, 0x1d, 0xb1, 0x91, 0x95, 0x15, 0xab, 0x96, 0x5a, 0x02, 0x2b, 0x5d, 0x02, 0xeb, 0x3a, 0x63, + 0xbb, 0x74, 0x72, 0x5e, 0xd3, 0x8e, 0x7f, 0xd4, 0x80, 0x73, 0x17, 0x01, 0xda, 0x87, 0x68, 0xea, + 0xde, 0x49, 0x56, 0xcb, 0xc8, 0x49, 0xda, 0xd5, 0x5b, 0xb4, 0x69, 0x82, 0x62, 0xfd, 0x24, 0x58, + 0xef, 0x80, 0x37, 0xbe, 0x67, 0x94, 0xc6, 0x53, 0x89, 0x1e, 0xc1, 0x9c, 0xb8, 0x6f, 0xa2, 0xd0, + 0x7f, 0x33, 0x0a, 0xc9, 0xab, 0xca, 0x20, 0x5a, 0x81, 0xf9, 0x48, 0x20, 0x8c, 0x4c, 0x1d, 0x34, + 0x75, 0x47, 0x19, 0xe8, 0x01, 0x2c, 0x1c, 0x12, 0xec, 0xf3, 0x43, 0x79, 0x59, 0xdd, 0x49, 0x2c, + 0xf4, 0x10, 0xea, 0x3e, 0x8e, 0xf8, 0x2e, 0x63, 0x94, 0xc9, 0x86, 0x75, 0x67, 0xea, 0x10, 0x43, + 0xc5, 0x62, 0x15, 0x22, 0x23, 0x3f, 0x3b, 0x54, 0xb9, 0x1e, 0x33, 0x43, 0x55, 0x39, 0x7f, 0x53, + 0xb7, 0x70, 0x3f, 0xea, 0x16, 0xff, 0x4d, 0xdd, 0x8f, 0x79, 0xb8, 0x78, 0xf3, 0x1e, 0x53, 0xe5, + 0xc0, 0xac, 0x72, 0x14, 0x16, 0x7c, 0xdc, 0x25, 0x7e, 0xba, 0x65, 0xab, 0xd6, 0xf5, 0xeb, 0xdf, + 0x23, 0x2e, 0xee, 0x8d, 0xf7, 0x44, 0xf4, 0x0d, 0xf6, 0xd8, 0xf6, 0x96, 0xa8, 0xf8, 0xed, 0xbc, + 0xb6, 0xe9, 0x7a, 0xfc, 0x30, 0xee, 0x5a, 0x3d, 0x3a, 0xb4, 0x5d, 0x86, 0x07, 0x38, 0xc0, 0xb6, + 0x4f, 0x8f, 0x3c, 0x7b, 0xb4, 0x61, 0xcf, 0x7e, 0x47, 0x2c, 0x09, 0x6d, 0xf7, 0x71, 0xc8, 0x09, + 0x73, 0x92, 0x32, 0x68, 0x0c, 0xcb, 0x38, 0x08, 0x28, 0x97, 0x7d, 0x46, 0xf2, 0x31, 0xdf, 0x63, + 0xd5, 0xd9, 0x5a, 0x42, 0x01, 0xa1, 0x14, 0x91, 0x9b, 0x00, 0x1c, 0x65, 0xa0, 0x36, 0xd4, 0x93, + 0xd7, 0x86, 0xb9, 0x91, 0x9f, 0x63, 0x9a, 0x25, 0x05, 0x6b, 0x73, 0xf4, 0x02, 0x96, 0x06, 0x1e, + 0x23, 0x7d, 0xc1, 0x30, 0xcf, 0x3e, 0x14, 0x25, 0xaa, 0xcd, 0xd1, 0x2e, 0x2c, 0x33, 0x12, 0x51, + 0x7f, 0xa4, 0x38, 0x8a, 0x73, 0x70, 0xc0, 0x14, 0xd8, 0xe6, 0xe8, 0x25, 0xac, 0x88, 0xed, 0xee, + 0x44, 0x24, 0xe0, 0x82, 0xa7, 0x34, 0x0f, 0x8f, 0x40, 0xee, 0x93, 0x80, 0xab, 0x76, 0x46, 0xd8, + 0xf7, 0xfa, 0x9d, 0x38, 0xe0, 0x9e, 0x6f, 0xe8, 0xf3, 0xd0, 0x48, 0xe0, 0x5b, 0x81, 0x6b, 0x3d, + 0x83, 0x79, 0xf1, 0x7a, 0x19, 0x6a, 0xa9, 0x43, 0x84, 0xd0, 0xed, 0x9f, 0x82, 0xea, 0xf2, 0x0d, + 0x9f, 0xfa, 0xde, 0x36, 0xb4, 0xed, 0xcd, 0xd3, 0x0b, 0x53, 0x3b, 0xbb, 0x30, 0xb5, 0xab, 0x0b, + 0x13, 0x7c, 0x98, 0x98, 0xe0, 0xcb, 0xc4, 0x04, 0x27, 0x13, 0x13, 0x9c, 0x4e, 0x4c, 0xf0, 0x73, + 0x62, 0x82, 0x5f, 0x13, 0x53, 0xbb, 0x9a, 0x98, 0xe0, 0xf8, 0xd2, 0xd4, 0x4e, 0x2f, 0x4d, 0xed, + 0xec, 0xd2, 0xd4, 0xba, 0x05, 0xd9, 0xdc, 0xc6, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe9, 0x34, + 0xd7, 0x09, 0x4a, 0x07, 0x00, 0x00, } func (x RulesRequest_RuleType) String() string { @@ -2304,7 +2304,7 @@ func (m *AlertStateDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -2338,7 +2338,7 @@ func (m *AlertStateDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Annotations = append(m.Annotations, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Annotations = append(m.Annotations, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Annotations[len(m.Annotations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ruler/base/ruler.proto b/pkg/ruler/base/ruler.proto index 0b143728efe61..e47c497def902 100644 --- a/pkg/ruler/base/ruler.proto +++ b/pkg/ruler/base/ruler.proto @@ -69,11 +69,11 @@ message AlertStateDesc { string state = 1; repeated logproto.LegacyLabelPair labels = 2 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacyLabelPair annotations = 3 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; double value = 4; google.protobuf.Timestamp active_at = 5 [ diff --git a/pkg/ruler/base/ruler_ring.go b/pkg/ruler/base/ruler_ring.go index 697b1d8b9ac59..4d50dd2934c51 100644 --- a/pkg/ruler/base/ruler_ring.go +++ b/pkg/ruler/base/ruler_ring.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/ruler/base/ruler_test.go b/pkg/ruler/base/ruler_test.go index d17691e1bb6a0..931f8288be4d6 100644 --- a/pkg/ruler/base/ruler_test.go +++ b/pkg/ruler/base/ruler_test.go @@ -45,17 +45,17 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/series" - "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - loki_storage "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + loki_storage "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) func defaultRulerConfig(t testing.TB, store rulestore.RuleStore) Config { diff --git a/pkg/ruler/base/storage.go b/pkg/ruler/base/storage.go index 4a79fd5691221..c2548f0e46830 100644 --- a/pkg/ruler/base/storage.go +++ b/pkg/ruler/base/storage.go @@ -10,23 +10,23 @@ import ( "github.com/prometheus/client_golang/prometheus" promRules "github.com/prometheus/prometheus/rules" - configClient "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/bucketclient" - "github.com/grafana/loki/pkg/ruler/rulestore/configdb" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/bucket" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" + configClient "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/bucketclient" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/configdb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" ) // RuleStoreConfig configures a rule store. diff --git a/pkg/ruler/base/store_mock_test.go b/pkg/ruler/base/store_mock_test.go index 2d92afdfbb1d8..b20a76571df65 100644 --- a/pkg/ruler/base/store_mock_test.go +++ b/pkg/ruler/base/store_mock_test.go @@ -7,9 +7,9 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" ) type mockRuleStore struct { diff --git a/pkg/ruler/compat.go b/pkg/ruler/compat.go index 355acc7d61910..6d3a6b68334d3 100644 --- a/pkg/ruler/compat.go +++ b/pkg/ruler/compat.go @@ -24,11 +24,11 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/template" - "github.com/grafana/loki/pkg/logql/syntax" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulespb" - rulerutil "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logql/syntax" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + rulerutil "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/util" ) // RulesLimits is the one function we need from limits.Overrides, and diff --git a/pkg/ruler/compat_test.go b/pkg/ruler/compat_test.go index 55e77c2f18a2a..6855368aefdee 100644 --- a/pkg/ruler/compat_test.go +++ b/pkg/ruler/compat_test.go @@ -12,11 +12,11 @@ import ( "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logql" - rulerbase "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logql" + rulerbase "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) // TestInvalidRuleGroup tests that a validation error is raised when rule group is invalid diff --git a/pkg/ruler/config.go b/pkg/ruler/config.go index 7d948baa0c30d..69293b91324a0 100644 --- a/pkg/ruler/config.go +++ b/pkg/ruler/config.go @@ -9,9 +9,9 @@ import ( "github.com/prometheus/prometheus/config" "gopkg.in/yaml.v2" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/storage/cleaner" - "github.com/grafana/loki/pkg/ruler/storage/instance" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/storage/cleaner" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" ) type Config struct { diff --git a/pkg/ruler/config/alertmanager.go b/pkg/ruler/config/alertmanager.go index d30eec8df2be3..8282a39326f0f 100644 --- a/pkg/ruler/config/alertmanager.go +++ b/pkg/ruler/config/alertmanager.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/crypto/tls" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) type AlertManagerConfig struct { diff --git a/pkg/ruler/evaluator.go b/pkg/ruler/evaluator.go index 639bd102502cd..ce93e8310be77 100644 --- a/pkg/ruler/evaluator.go +++ b/pkg/ruler/evaluator.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Evaluator is the interface that must be satisfied in order to accept rule evaluations from the Ruler. diff --git a/pkg/ruler/evaluator_jitter.go b/pkg/ruler/evaluator_jitter.go index 449ca0e18011c..ba5b0998c6a19 100644 --- a/pkg/ruler/evaluator_jitter.go +++ b/pkg/ruler/evaluator_jitter.go @@ -10,8 +10,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" ) // EvaluatorWithJitter wraps a given Evaluator. It applies a consistent jitter based on a rule's query string by hashing diff --git a/pkg/ruler/evaluator_jitter_test.go b/pkg/ruler/evaluator_jitter_test.go index 7a1636c55939a..f6e35a390e99a 100644 --- a/pkg/ruler/evaluator_jitter_test.go +++ b/pkg/ruler/evaluator_jitter_test.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) type mockEval struct{} diff --git a/pkg/ruler/evaluator_local.go b/pkg/ruler/evaluator_local.go index 91efd5a14d995..dbbd8c813e1f3 100644 --- a/pkg/ruler/evaluator_local.go +++ b/pkg/ruler/evaluator_local.go @@ -7,9 +7,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) const EvalModeLocal = "local" diff --git a/pkg/ruler/evaluator_remote.go b/pkg/ruler/evaluator_remote.go index 8cfc63efca912..a409e814d87c2 100644 --- a/pkg/ruler/evaluator_remote.go +++ b/pkg/ruler/evaluator_remote.go @@ -35,13 +35,13 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/keepalive" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/build" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/httpreq" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/build" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/ruler/evaluator_remote_test.go b/pkg/ruler/evaluator_remote_test.go index 6c38fe6ca1e5e..515b8ea306528 100644 --- a/pkg/ruler/evaluator_remote_test.go +++ b/pkg/ruler/evaluator_remote_test.go @@ -18,9 +18,9 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) type mockClient struct { diff --git a/pkg/ruler/grouploader.go b/pkg/ruler/grouploader.go index 628e5a1f873a6..2f22e0a680b16 100644 --- a/pkg/ruler/grouploader.go +++ b/pkg/ruler/grouploader.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/prometheus/rules" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) type GroupLoader struct{} diff --git a/pkg/ruler/memstore.go b/pkg/ruler/memstore.go index b70d17a954b38..69d37ddfeed29 100644 --- a/pkg/ruler/memstore.go +++ b/pkg/ruler/memstore.go @@ -19,9 +19,9 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/querier/series" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/querier/series" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/ruler/memstore_test.go b/pkg/ruler/memstore_test.go index 43c13be42c6c1..3c26a0f71506a 100644 --- a/pkg/ruler/memstore_test.go +++ b/pkg/ruler/memstore_test.go @@ -15,7 +15,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ruleName = "testrule" diff --git a/pkg/ruler/registry.go b/pkg/ruler/registry.go index 5874eb7536e8d..868b7f29a6f94 100644 --- a/pkg/ruler/registry.go +++ b/pkg/ruler/registry.go @@ -25,9 +25,9 @@ import ( "github.com/prometheus/prometheus/storage" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/ruler/storage/cleaner" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/ruler/storage/cleaner" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" ) type walRegistry struct { diff --git a/pkg/ruler/registry_test.go b/pkg/ruler/registry_test.go index 46ab9a7084576..261b6d3836763 100644 --- a/pkg/ruler/registry_test.go +++ b/pkg/ruler/registry_test.go @@ -19,10 +19,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/util/test" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/util/test" + "github.com/grafana/loki/v3/pkg/validation" ) const enabledRWTenant = "enabled" diff --git a/pkg/ruler/ruler.go b/pkg/ruler/ruler.go index dd90ccb153390..5ef091badeb7c 100644 --- a/pkg/ruler/ruler.go +++ b/pkg/ruler/ruler.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/prometheus/config" - ruler "github.com/grafana/loki/pkg/ruler/base" - "github.com/grafana/loki/pkg/ruler/rulestore" + ruler "github.com/grafana/loki/v3/pkg/ruler/base" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" ) func NewRuler(cfg Config, evaluator Evaluator, reg prometheus.Registerer, logger log.Logger, ruleStore rulestore.RuleStore, limits RulesLimits, metricsNamespace string) (*ruler.Ruler, error) { diff --git a/pkg/ruler/rulespb/compat.go b/pkg/ruler/rulespb/compat.go index a0da3dc014f46..0c9de4185a101 100644 --- a/pkg/ruler/rulespb/compat.go +++ b/pkg/ruler/rulespb/compat.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/rulefmt" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/logproto" //lint:ignore faillint allowed to import other protobuf + "github.com/grafana/loki/v3/pkg/logproto" //lint:ignore faillint allowed to import other protobuf ) // ToProto transforms a formatted prometheus rulegroup to a rule group protobuf diff --git a/pkg/ruler/rulespb/rules.pb.go b/pkg/ruler/rulespb/rules.pb.go index ead0d482791be..91afa25a655ef 100644 --- a/pkg/ruler/rulespb/rules.pb.go +++ b/pkg/ruler/rulespb/rules.pb.go @@ -10,8 +10,8 @@ import ( github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" types "github.com/gogo/protobuf/types" _ "github.com/golang/protobuf/ptypes/duration" - _ "github.com/grafana/loki/pkg/logproto" - github_com_grafana_loki_pkg_logproto "github.com/grafana/loki/pkg/logproto" + _ "github.com/grafana/loki/v3/pkg/logproto" + github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" io "io" math "math" math_bits "math/bits" @@ -130,12 +130,12 @@ func (m *RuleGroupDesc) GetLimit() int64 { // RuleDesc is a proto representation of a Prometheus Rule type RuleDesc struct { - Expr string `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` - Record string `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` - Alert string `protobuf:"bytes,3,opt,name=alert,proto3" json:"alert,omitempty"` - For time.Duration `protobuf:"bytes,4,opt,name=for,proto3,stdduration" json:"for"` - Labels []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,5,rep,name=labels,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"labels"` - Annotations []github_com_grafana_loki_pkg_logproto.LabelAdapter `protobuf:"bytes,6,rep,name=annotations,proto3,customtype=github.com/grafana/loki/pkg/logproto.LabelAdapter" json:"annotations"` + Expr string `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` + Record string `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` + Alert string `protobuf:"bytes,3,opt,name=alert,proto3" json:"alert,omitempty"` + For time.Duration `protobuf:"bytes,4,opt,name=for,proto3,stdduration" json:"for"` + Labels []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,5,rep,name=labels,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"labels"` + Annotations []github_com_grafana_loki_v3_pkg_logproto.LabelAdapter `protobuf:"bytes,6,rep,name=annotations,proto3,customtype=github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" json:"annotations"` } func (m *RuleDesc) Reset() { *m = RuleDesc{} } @@ -206,39 +206,39 @@ func init() { func init() { proto.RegisterFile("pkg/ruler/rulespb/rules.proto", fileDescriptor_dd3ef3757f506fba) } var fileDescriptor_dd3ef3757f506fba = []byte{ - // 501 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x53, 0x41, 0x6f, 0xd3, 0x30, - 0x18, 0x8d, 0xdb, 0x34, 0x4b, 0x5c, 0x4d, 0x54, 0x56, 0x85, 0xd2, 0x01, 0x6e, 0x35, 0x09, 0xa9, - 0x5c, 0x12, 0x31, 0xc4, 0x81, 0x13, 0x5a, 0x35, 0x09, 0xa9, 0xda, 0x01, 0xe5, 0xc8, 0x05, 0x39, - 0xa9, 0x6b, 0xa2, 0xb9, 0x71, 0xe4, 0x24, 0x13, 0xbd, 0xf1, 0x13, 0xb8, 0x20, 0xf1, 0x13, 0xf8, - 0x29, 0x3b, 0xf6, 0x38, 0x71, 0x18, 0x34, 0xbd, 0x70, 0x63, 0x3f, 0x01, 0xd9, 0x4e, 0xc6, 0x00, - 0x09, 0x71, 0xe1, 0x12, 0x7f, 0xcf, 0xcf, 0x9f, 0xdf, 0xfb, 0x5e, 0x12, 0xf8, 0x20, 0x3f, 0x63, - 0xa1, 0xac, 0x38, 0x95, 0xfa, 0x59, 0xe4, 0xb1, 0x59, 0x83, 0x5c, 0x8a, 0x52, 0xa0, 0x9e, 0x06, - 0x07, 0x43, 0x26, 0x98, 0xd0, 0x3b, 0xa1, 0xaa, 0x0c, 0x79, 0x30, 0x62, 0x42, 0x30, 0x4e, 0x43, - 0x8d, 0xe2, 0x6a, 0x19, 0x92, 0x6c, 0xdd, 0x50, 0xf8, 0x77, 0x6a, 0x51, 0x49, 0x52, 0xa6, 0x22, - 0x6b, 0xf8, 0x7b, 0x4a, 0x96, 0x0b, 0x66, 0xee, 0x6c, 0x0b, 0x43, 0x1e, 0x7e, 0xe8, 0xc0, 0xfd, - 0xa8, 0xe2, 0xf4, 0x85, 0x14, 0x55, 0x7e, 0x42, 0x8b, 0x04, 0x21, 0x68, 0x67, 0x64, 0x45, 0x7d, - 0x30, 0x01, 0x53, 0x2f, 0xd2, 0x35, 0xba, 0x0f, 0x3d, 0xb5, 0x16, 0x39, 0x49, 0xa8, 0xdf, 0xd1, - 0xc4, 0xcf, 0x0d, 0xf4, 0x1c, 0xba, 0x69, 0x56, 0x52, 0x79, 0x4e, 0xb8, 0xdf, 0x9d, 0x80, 0x69, - 0xff, 0x68, 0x14, 0x18, 0x4f, 0x41, 0xeb, 0x29, 0x38, 0x69, 0x3c, 0xcd, 0xdc, 0x8b, 0xab, 0xb1, - 0xf5, 0xf1, 0xcb, 0x18, 0x44, 0x37, 0x4d, 0xe8, 0x21, 0x34, 0xb3, 0xfb, 0xf6, 0xa4, 0x3b, 0xed, - 0x1f, 0xdd, 0x09, 0x4c, 0x2c, 0xca, 0x97, 0xb2, 0x14, 0x19, 0x56, 0x39, 0xab, 0x0a, 0x2a, 0x7d, - 0xc7, 0x38, 0x53, 0x35, 0x0a, 0xe0, 0x9e, 0xc8, 0xd5, 0xc5, 0x85, 0xef, 0xe9, 0xe6, 0xe1, 0x1f, - 0xd2, 0xc7, 0xd9, 0x3a, 0x6a, 0x0f, 0xa1, 0x21, 0xec, 0xf1, 0x74, 0x95, 0x96, 0x3e, 0x9c, 0x80, - 0x69, 0x37, 0x32, 0x60, 0x6e, 0xbb, 0xbd, 0x81, 0x33, 0xb7, 0xdd, 0xbd, 0x81, 0x3b, 0xb7, 0x5d, - 0x77, 0xe0, 0x1d, 0x7e, 0xef, 0x40, 0xb7, 0xd5, 0x57, 0xc2, 0xf4, 0x6d, 0x2e, 0xdb, 0x48, 0x54, - 0x8d, 0xee, 0x42, 0x47, 0xd2, 0x44, 0xc8, 0x45, 0x93, 0x47, 0x83, 0x94, 0x00, 0xe1, 0x54, 0x96, - 0x3a, 0x09, 0x2f, 0x32, 0x00, 0x3d, 0x85, 0xdd, 0xa5, 0x90, 0xbe, 0xfd, 0xef, 0xe9, 0xa8, 0xf3, - 0x88, 0x43, 0x87, 0x93, 0x98, 0xf2, 0xc2, 0xef, 0xe9, 0xe1, 0x46, 0xc1, 0xcd, 0xeb, 0x3b, 0xa5, - 0x8c, 0x24, 0xeb, 0x53, 0xc5, 0xbe, 0x24, 0xa9, 0x9c, 0x3d, 0x53, 0x9d, 0x9f, 0xaf, 0xc6, 0x8f, - 0x59, 0x5a, 0xbe, 0xa9, 0xe2, 0x20, 0x11, 0xab, 0x90, 0x49, 0xb2, 0x24, 0x19, 0x09, 0xb9, 0x38, - 0x4b, 0xc3, 0xdb, 0x5f, 0x41, 0xa0, 0xfb, 0x8e, 0x17, 0x24, 0x2f, 0xa9, 0x8c, 0x1a, 0x0d, 0x74, - 0x0e, 0xfb, 0x24, 0xcb, 0x44, 0x49, 0x4c, 0x9e, 0xce, 0x7f, 0x94, 0xbc, 0x2d, 0xa4, 0x73, 0xdf, - 0x9f, 0xbd, 0xde, 0x6c, 0xb1, 0x75, 0xb9, 0xc5, 0xd6, 0xf5, 0x16, 0x83, 0x77, 0x35, 0x06, 0x9f, - 0x6a, 0x0c, 0x2e, 0x6a, 0x0c, 0x36, 0x35, 0x06, 0x5f, 0x6b, 0x0c, 0xbe, 0xd5, 0xd8, 0xba, 0xae, - 0x31, 0x78, 0xbf, 0xc3, 0xd6, 0x66, 0x87, 0xad, 0xcb, 0x1d, 0xb6, 0x5e, 0x3d, 0xfa, 0x9b, 0xf6, - 0x2f, 0xff, 0x5a, 0xec, 0x68, 0x1f, 0x4f, 0x7e, 0x04, 0x00, 0x00, 0xff, 0xff, 0x6f, 0xf3, 0x96, - 0x82, 0x87, 0x03, 0x00, 0x00, + // 503 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x53, 0x31, 0x6f, 0xd3, 0x40, + 0x18, 0xf5, 0x25, 0x8e, 0x6b, 0x5f, 0x54, 0x11, 0x9d, 0x22, 0xe4, 0x14, 0xb8, 0x44, 0x95, 0x90, + 0x32, 0x20, 0x5b, 0x6a, 0x61, 0x43, 0x42, 0x8d, 0x2a, 0x21, 0x45, 0x1d, 0x90, 0x47, 0xb6, 0xb3, + 0x73, 0x39, 0xac, 0x5e, 0x7c, 0xd6, 0xd9, 0xae, 0xc8, 0xc6, 0x4f, 0x60, 0x41, 0xe2, 0x27, 0xf0, + 0x53, 0x3a, 0x66, 0xac, 0x18, 0x0a, 0x71, 0x16, 0xc6, 0x2e, 0xec, 0xe8, 0xee, 0xec, 0x52, 0x60, + 0x80, 0x85, 0xc5, 0xf7, 0xbd, 0x7b, 0xf7, 0xdd, 0x7b, 0xdf, 0xb3, 0x0d, 0x1f, 0xe5, 0xe7, 0x2c, + 0x94, 0x15, 0xa7, 0x52, 0x3f, 0x8b, 0x3c, 0x36, 0x6b, 0x90, 0x4b, 0x51, 0x0a, 0xd4, 0xd3, 0xe0, + 0x60, 0xc8, 0x04, 0x13, 0x7a, 0x27, 0x54, 0x95, 0x21, 0x0f, 0x46, 0x4c, 0x08, 0xc6, 0x69, 0xa8, + 0x51, 0x5c, 0x2d, 0x43, 0x92, 0xad, 0x1b, 0x0a, 0xff, 0x4e, 0x2d, 0x2a, 0x49, 0xca, 0x54, 0x64, + 0x0d, 0xff, 0x40, 0xc9, 0x72, 0xc1, 0xcc, 0x9d, 0x6d, 0x61, 0xc8, 0xc3, 0x0f, 0x1d, 0xb8, 0x1f, + 0x55, 0x9c, 0xbe, 0x94, 0xa2, 0xca, 0x4f, 0x69, 0x91, 0x20, 0x04, 0xed, 0x8c, 0xac, 0xa8, 0x0f, + 0x26, 0x60, 0xea, 0x45, 0xba, 0x46, 0x0f, 0xa1, 0xa7, 0xd6, 0x22, 0x27, 0x09, 0xf5, 0x3b, 0x9a, + 0xf8, 0xb9, 0x81, 0x5e, 0x40, 0x37, 0xcd, 0x4a, 0x2a, 0x2f, 0x08, 0xf7, 0xbb, 0x13, 0x30, 0xed, + 0x1f, 0x8d, 0x02, 0xe3, 0x29, 0x68, 0x3d, 0x05, 0xa7, 0x8d, 0xa7, 0x99, 0x7b, 0x79, 0x3d, 0xb6, + 0x3e, 0x7e, 0x19, 0x83, 0xe8, 0xb6, 0x09, 0x3d, 0x86, 0x66, 0x76, 0xdf, 0x9e, 0x74, 0xa7, 0xfd, + 0xa3, 0x7b, 0x81, 0x89, 0x45, 0xf9, 0x52, 0x96, 0x22, 0xc3, 0x2a, 0x67, 0x55, 0x41, 0xa5, 0xef, + 0x18, 0x67, 0xaa, 0x46, 0x01, 0xdc, 0x13, 0xb9, 0xba, 0xb8, 0xf0, 0x3d, 0xdd, 0x3c, 0xfc, 0x43, + 0xfa, 0x24, 0x5b, 0x47, 0xed, 0x21, 0x34, 0x84, 0x3d, 0x9e, 0xae, 0xd2, 0xd2, 0x87, 0x13, 0x30, + 0xed, 0x46, 0x06, 0xcc, 0x6d, 0xb7, 0x37, 0x70, 0xe6, 0xb6, 0xbb, 0x37, 0x70, 0xe7, 0xb6, 0xeb, + 0x0e, 0xbc, 0xc3, 0xef, 0x1d, 0xe8, 0xb6, 0xfa, 0x4a, 0x98, 0xbe, 0xcd, 0x65, 0x1b, 0x89, 0xaa, + 0xd1, 0x7d, 0xe8, 0x48, 0x9a, 0x08, 0xb9, 0x68, 0xf2, 0x68, 0x90, 0x12, 0x20, 0x9c, 0xca, 0x52, + 0x27, 0xe1, 0x45, 0x06, 0xa0, 0x67, 0xb0, 0xbb, 0x14, 0xd2, 0xb7, 0xff, 0x3d, 0x1d, 0x75, 0x1e, + 0x09, 0xe8, 0x70, 0x12, 0x53, 0x5e, 0xf8, 0x3d, 0x3d, 0xdc, 0x28, 0xb8, 0x7d, 0x7d, 0x67, 0x94, + 0x91, 0x64, 0x7d, 0xa6, 0xd8, 0x57, 0x24, 0x95, 0xb3, 0xe7, 0xaa, 0xf3, 0xf3, 0xf5, 0xf8, 0x29, + 0x4b, 0xcb, 0x37, 0x55, 0x1c, 0x24, 0x62, 0x15, 0x32, 0x49, 0x96, 0x24, 0x23, 0x21, 0x17, 0xe7, + 0x69, 0x78, 0x71, 0x1c, 0xde, 0xfd, 0x10, 0x02, 0xdd, 0x7a, 0xb2, 0x20, 0x79, 0x49, 0x65, 0xd4, + 0xc8, 0xa0, 0x35, 0xec, 0x93, 0x2c, 0x13, 0x25, 0x31, 0x91, 0x3a, 0xff, 0x57, 0xf5, 0xae, 0x96, + 0x4e, 0x7f, 0x7f, 0x16, 0x6f, 0xb6, 0xd8, 0xba, 0xda, 0x62, 0xeb, 0x66, 0x8b, 0xc1, 0xbb, 0x1a, + 0x83, 0x4f, 0x35, 0x06, 0x97, 0x35, 0x06, 0x9b, 0x1a, 0x83, 0xaf, 0x35, 0x06, 0xdf, 0x6a, 0x6c, + 0xdd, 0xd4, 0x18, 0xbc, 0xdf, 0x61, 0x6b, 0xb3, 0xc3, 0xd6, 0xd5, 0x0e, 0x5b, 0xaf, 0x9f, 0xfc, + 0x45, 0xfe, 0x97, 0x9f, 0x2e, 0x76, 0xb4, 0x95, 0xe3, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xab, + 0x30, 0xeb, 0xc9, 0x90, 0x03, 0x00, 0x00, } func (this *RuleGroupDesc) Equal(that interface{}) bool { @@ -1145,7 +1145,7 @@ func (m *RuleDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Labels = append(m.Labels, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Labels[len(m.Labels)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -1179,7 +1179,7 @@ func (m *RuleDesc) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Annotations = append(m.Annotations, github_com_grafana_loki_pkg_logproto.LabelAdapter{}) + m.Annotations = append(m.Annotations, github_com_grafana_loki_v3_pkg_logproto.LabelAdapter{}) if err := m.Annotations[len(m.Annotations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/ruler/rulespb/rules.proto b/pkg/ruler/rulespb/rules.proto index be001d238d1d0..82dc7daabd56a 100644 --- a/pkg/ruler/rulespb/rules.proto +++ b/pkg/ruler/rulespb/rules.proto @@ -7,7 +7,7 @@ import "google/protobuf/any.proto"; import "google/protobuf/duration.proto"; import "pkg/logproto/logproto.proto"; -option go_package = "github.com/grafana/loki/pkg/ruler/rulespb"; +option go_package = "github.com/grafana/loki/v3/pkg/ruler/rulespb"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; @@ -42,10 +42,10 @@ message RuleDesc { ]; repeated logproto.LegacyLabelPair labels = 5 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; repeated logproto.LegacyLabelPair annotations = 6 [ (gogoproto.nullable) = false, - (gogoproto.customtype) = "github.com/grafana/loki/pkg/logproto.LabelAdapter" + (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/logproto.LabelAdapter" ]; } diff --git a/pkg/ruler/rulestore/bucketclient/bucket_client.go b/pkg/ruler/rulestore/bucketclient/bucket_client.go index ddd90cb57fdfa..a39a8b03532da 100644 --- a/pkg/ruler/rulestore/bucketclient/bucket_client.go +++ b/pkg/ruler/rulestore/bucketclient/bucket_client.go @@ -15,9 +15,9 @@ import ( "github.com/thanos-io/objstore" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/storage/bucket" ) const ( diff --git a/pkg/ruler/rulestore/bucketclient/bucket_client_test.go b/pkg/ruler/rulestore/bucketclient/bucket_client_test.go index 360b7aedb221b..0644238b21685 100644 --- a/pkg/ruler/rulestore/bucketclient/bucket_client_test.go +++ b/pkg/ruler/rulestore/bucketclient/bucket_client_test.go @@ -16,11 +16,11 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/ruler/rulestore/objectclient" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/objectclient" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" ) type testGroup struct { diff --git a/pkg/ruler/rulestore/config.go b/pkg/ruler/rulestore/config.go index f43464d463df2..1f0602424cdb9 100644 --- a/pkg/ruler/rulestore/config.go +++ b/pkg/ruler/rulestore/config.go @@ -6,10 +6,10 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/ruler/rulestore/configdb" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/storage/bucket" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/configdb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/storage/bucket" ) // Config configures a rule store. diff --git a/pkg/ruler/rulestore/configdb/store.go b/pkg/ruler/rulestore/configdb/store.go index a3efa3fc30155..e4a0526386fe4 100644 --- a/pkg/ruler/rulestore/configdb/store.go +++ b/pkg/ruler/rulestore/configdb/store.go @@ -4,9 +4,9 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/configs/userconfig" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) const ( diff --git a/pkg/ruler/rulestore/configdb/store_test.go b/pkg/ruler/rulestore/configdb/store_test.go index 3542b7bf7bc4a..4d39581cb6492 100644 --- a/pkg/ruler/rulestore/configdb/store_test.go +++ b/pkg/ruler/rulestore/configdb/store_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/configs/client" - "github.com/grafana/loki/pkg/configs/userconfig" + "github.com/grafana/loki/v3/pkg/configs/client" + "github.com/grafana/loki/v3/pkg/configs/userconfig" ) var zeroTime time.Time diff --git a/pkg/ruler/rulestore/local/local.go b/pkg/ruler/rulestore/local/local.go index d798c9831510d..0eb3cda68175d 100644 --- a/pkg/ruler/rulestore/local/local.go +++ b/pkg/ruler/rulestore/local/local.go @@ -9,7 +9,7 @@ import ( "github.com/pkg/errors" promRules "github.com/prometheus/prometheus/rules" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) const ( diff --git a/pkg/ruler/rulestore/local/local_test.go b/pkg/ruler/rulestore/local/local_test.go index 8922781a207ae..ee6abc5b8ee3d 100644 --- a/pkg/ruler/rulestore/local/local_test.go +++ b/pkg/ruler/rulestore/local/local_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) func TestClient_LoadAllRuleGroups(t *testing.T) { diff --git a/pkg/ruler/rulestore/objectclient/rule_store.go b/pkg/ruler/rulestore/objectclient/rule_store.go index 8e7effbcaae01..3471bd88db49a 100644 --- a/pkg/ruler/rulestore/objectclient/rule_store.go +++ b/pkg/ruler/rulestore/objectclient/rule_store.go @@ -14,9 +14,9 @@ import ( "github.com/pkg/errors" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/ruler/rulespb" - "github.com/grafana/loki/pkg/ruler/rulestore" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulestore" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) // Object Rule Storage Schema diff --git a/pkg/ruler/rulestore/store.go b/pkg/ruler/rulestore/store.go index 82f7d4f6ec097..67bd5580dd592 100644 --- a/pkg/ruler/rulestore/store.go +++ b/pkg/ruler/rulestore/store.go @@ -4,7 +4,7 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/ruler/rulespb" + "github.com/grafana/loki/v3/pkg/ruler/rulespb" ) var ( diff --git a/pkg/ruler/storage/cleaner/cleaner.go b/pkg/ruler/storage/cleaner/cleaner.go index b1ad8e76caa0b..e923815c0726a 100644 --- a/pkg/ruler/storage/cleaner/cleaner.go +++ b/pkg/ruler/storage/cleaner/cleaner.go @@ -14,8 +14,8 @@ import ( "github.com/go-kit/log/level" promwal "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/ruler/storage/instance" - "github.com/grafana/loki/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" ) // Default settings for the WAL cleaner. diff --git a/pkg/ruler/storage/cleaner/cleaner_test.go b/pkg/ruler/storage/cleaner/cleaner_test.go index 448333165857e..5d5147eb0ada7 100644 --- a/pkg/ruler/storage/cleaner/cleaner_test.go +++ b/pkg/ruler/storage/cleaner/cleaner_test.go @@ -12,7 +12,7 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ruler/storage/instance" + "github.com/grafana/loki/v3/pkg/ruler/storage/instance" ) func TestWALCleaner_getAllStorageNoRoot(t *testing.T) { diff --git a/pkg/ruler/storage/instance/instance.go b/pkg/ruler/storage/instance/instance.go index b6cd1fc2ed25f..9bcfcea1319d4 100644 --- a/pkg/ruler/storage/instance/instance.go +++ b/pkg/ruler/storage/instance/instance.go @@ -27,9 +27,9 @@ import ( "github.com/prometheus/prometheus/tsdb/wlog" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/ruler/storage/util" - "github.com/grafana/loki/pkg/ruler/storage/wal" - "github.com/grafana/loki/pkg/util/build" + "github.com/grafana/loki/v3/pkg/ruler/storage/util" + "github.com/grafana/loki/v3/pkg/ruler/storage/wal" + "github.com/grafana/loki/v3/pkg/util/build" ) func init() { diff --git a/pkg/ruler/storage/instance/instance_test.go b/pkg/ruler/storage/instance/instance_test.go index b017664e33f76..03a469ed187c6 100644 --- a/pkg/ruler/storage/instance/instance_test.go +++ b/pkg/ruler/storage/instance/instance_test.go @@ -29,7 +29,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/test" + "github.com/grafana/loki/v3/pkg/util/test" ) func TestConfig_Unmarshal_Defaults(t *testing.T) { diff --git a/pkg/ruler/storage/instance/manager.go b/pkg/ruler/storage/instance/manager.go index c65b087acae11..765f3dfc91657 100644 --- a/pkg/ruler/storage/instance/manager.go +++ b/pkg/ruler/storage/instance/manager.go @@ -14,7 +14,7 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/prometheus/storage" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/scheduler/scheduler.go b/pkg/scheduler/scheduler.go index 4c26becce7a63..ead2f1799b636 100644 --- a/pkg/scheduler/scheduler.go +++ b/pkg/scheduler/scheduler.go @@ -28,15 +28,15 @@ import ( "go.uber.org/atomic" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/lokifrontend/frontend/v2/frontendv2pb" - "github.com/grafana/loki/pkg/querier/queryrange" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - "github.com/grafana/loki/pkg/util" - lokigrpc "github.com/grafana/loki/pkg/util/httpgrpc" - lokihttpreq "github.com/grafana/loki/pkg/util/httpreq" - lokiring "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/lokifrontend/frontend/v2/frontendv2pb" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + "github.com/grafana/loki/v3/pkg/util" + lokigrpc "github.com/grafana/loki/v3/pkg/util/httpgrpc" + lokihttpreq "github.com/grafana/loki/v3/pkg/util/httpreq" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) const ( diff --git a/pkg/scheduler/scheduler_test.go b/pkg/scheduler/scheduler_test.go index 5be9ed7ed6c8d..7f8d88e4d679e 100644 --- a/pkg/scheduler/scheduler_test.go +++ b/pkg/scheduler/scheduler_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/scheduler/schedulerpb" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/scheduler/schedulerpb" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestScheduler_setRunState(t *testing.T) { diff --git a/pkg/scheduler/schedulerpb/scheduler.pb.go b/pkg/scheduler/schedulerpb/scheduler.pb.go index c2f95e59cdae4..fa4df89363c5b 100644 --- a/pkg/scheduler/schedulerpb/scheduler.pb.go +++ b/pkg/scheduler/schedulerpb/scheduler.pb.go @@ -9,7 +9,7 @@ import ( _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" httpgrpc "github.com/grafana/dskit/httpgrpc" - queryrange "github.com/grafana/loki/pkg/querier/queryrange" + queryrange "github.com/grafana/loki/v3/pkg/querier/queryrange" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/pkg/storage/async_store.go b/pkg/storage/async_store.go index e3794a63733ad..31fea0ced0881 100644 --- a/pkg/storage/async_store.go +++ b/pkg/storage/async_store.go @@ -7,22 +7,22 @@ import ( "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/go-kit/log/level" "github.com/grafana/dskit/concurrency" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) type IngesterQuerier interface { diff --git a/pkg/storage/async_store_test.go b/pkg/storage/async_store_test.go index 9b2e440c1dc7d..366a7d6f1f1d5 100644 --- a/pkg/storage/async_store_test.go +++ b/pkg/storage/async_store_test.go @@ -5,17 +5,17 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" ) // storeMock is a mockable version of Loki's storage, used in querier unit tests diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 20cc45b69590b..21f321a6f8a54 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -13,18 +13,18 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type ChunkMetrics struct { diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 1df906f7dcf2b..e041ad186fe89 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/config" ) var NilMetrics = NewChunkMetrics(nil, 0) diff --git a/pkg/storage/bloom/v1/archive.go b/pkg/storage/bloom/v1/archive.go index 07ed9cd76d7f3..fcc3294eba977 100644 --- a/pkg/storage/bloom/v1/archive.go +++ b/pkg/storage/bloom/v1/archive.go @@ -8,7 +8,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) type TarEntry struct { diff --git a/pkg/storage/bloom/v1/archive_test.go b/pkg/storage/bloom/v1/archive_test.go index c77fbc69f122b..d6131c166f674 100644 --- a/pkg/storage/bloom/v1/archive_test.go +++ b/pkg/storage/bloom/v1/archive_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) func TestArchive(t *testing.T) { diff --git a/pkg/storage/bloom/v1/block_writer.go b/pkg/storage/bloom/v1/block_writer.go index b7954264ae8ba..1bdc38f32fca9 100644 --- a/pkg/storage/bloom/v1/block_writer.go +++ b/pkg/storage/bloom/v1/block_writer.go @@ -8,7 +8,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) const ( diff --git a/pkg/storage/bloom/v1/bloom.go b/pkg/storage/bloom/v1/bloom.go index 058ac68818d5f..1554b6828f246 100644 --- a/pkg/storage/bloom/v1/bloom.go +++ b/pkg/storage/bloom/v1/bloom.go @@ -7,9 +7,9 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // NB(chaudum): Some block pages are way bigger than others (400MiB and diff --git a/pkg/storage/bloom/v1/bloom_tester.go b/pkg/storage/bloom/v1/bloom_tester.go index f60166788e4d6..dbb0f7a12aaff 100644 --- a/pkg/storage/bloom/v1/bloom_tester.go +++ b/pkg/storage/bloom/v1/bloom_tester.go @@ -4,10 +4,10 @@ import ( "github.com/grafana/regexp" regexpsyntax "github.com/grafana/regexp/syntax" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/log/pattern" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/log/pattern" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) type BloomTest interface { diff --git a/pkg/storage/bloom/v1/bloom_tester_test.go b/pkg/storage/bloom/v1/bloom_tester_test.go index 085d56c590063..00db00340340f 100644 --- a/pkg/storage/bloom/v1/bloom_tester_test.go +++ b/pkg/storage/bloom/v1/bloom_tester_test.go @@ -5,8 +5,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) func TestFiltersToBloomTests(t *testing.T) { diff --git a/pkg/storage/bloom/v1/bloom_tokenizer.go b/pkg/storage/bloom/v1/bloom_tokenizer.go index 16f6f0bc68165..f529f22717639 100644 --- a/pkg/storage/bloom/v1/bloom_tokenizer.go +++ b/pkg/storage/bloom/v1/bloom_tokenizer.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/dskit/multierror" - "github.com/grafana/loki/pkg/iter" + "github.com/grafana/loki/v3/pkg/iter" - "github.com/grafana/loki/pkg/util/encoding" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/encoding" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) /* diff --git a/pkg/storage/bloom/v1/bloom_tokenizer_test.go b/pkg/storage/bloom/v1/bloom_tokenizer_test.go index d80c175713454..3f721974c2309 100644 --- a/pkg/storage/bloom/v1/bloom_tokenizer_test.go +++ b/pkg/storage/bloom/v1/bloom_tokenizer_test.go @@ -9,15 +9,15 @@ import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/push" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" "github.com/prometheus/client_golang/prometheus" ) diff --git a/pkg/storage/bloom/v1/bounds.go b/pkg/storage/bloom/v1/bounds.go index 542ba2c972790..1b482e46665bf 100644 --- a/pkg/storage/bloom/v1/bounds.go +++ b/pkg/storage/bloom/v1/bounds.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type BoundsCheck uint8 diff --git a/pkg/storage/bloom/v1/bounds_test.go b/pkg/storage/bloom/v1/bounds_test.go index 98fec7b0aafa0..5baaf07e900df 100644 --- a/pkg/storage/bloom/v1/bounds_test.go +++ b/pkg/storage/bloom/v1/bounds_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func TestBoundsFromProto(t *testing.T) { diff --git a/pkg/storage/bloom/v1/builder.go b/pkg/storage/bloom/v1/builder.go index aa00b58cf6705..0d129aa3def25 100644 --- a/pkg/storage/bloom/v1/builder.go +++ b/pkg/storage/bloom/v1/builder.go @@ -9,9 +9,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/util/encoding" ) var ( diff --git a/pkg/storage/bloom/v1/builder_test.go b/pkg/storage/bloom/v1/builder_test.go index 481c8ec9f915e..2046144032495 100644 --- a/pkg/storage/bloom/v1/builder_test.go +++ b/pkg/storage/bloom/v1/builder_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/util/encoding" ) var blockEncodings = []chunkenc.Encoding{ diff --git a/pkg/storage/bloom/v1/fuse_test.go b/pkg/storage/bloom/v1/fuse_test.go index 5c9f2f06f0478..f1cd77d9ebd4a 100644 --- a/pkg/storage/bloom/v1/fuse_test.go +++ b/pkg/storage/bloom/v1/fuse_test.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/concurrency" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/chunkenc" ) func keysToBloomTest(keys [][]byte) BloomTest { diff --git a/pkg/storage/bloom/v1/index.go b/pkg/storage/bloom/v1/index.go index c69b4eb292be3..ff9ecaffbac37 100644 --- a/pkg/storage/bloom/v1/index.go +++ b/pkg/storage/bloom/v1/index.go @@ -9,9 +9,9 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type Schema struct { diff --git a/pkg/storage/bloom/v1/index_test.go b/pkg/storage/bloom/v1/index_test.go index 477d266af7a75..eb61b1e2a2abc 100644 --- a/pkg/storage/bloom/v1/index_test.go +++ b/pkg/storage/bloom/v1/index_test.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) func TestBloomOffsetEncoding(t *testing.T) { diff --git a/pkg/storage/bloom/v1/metrics.go b/pkg/storage/bloom/v1/metrics.go index c2204bf275505..c45b2235dccdb 100644 --- a/pkg/storage/bloom/v1/metrics.go +++ b/pkg/storage/bloom/v1/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type Metrics struct { diff --git a/pkg/storage/bloom/v1/test_util.go b/pkg/storage/bloom/v1/test_util.go index ed1dd5cc978c1..d3ac7e427ec51 100644 --- a/pkg/storage/bloom/v1/test_util.go +++ b/pkg/storage/bloom/v1/test_util.go @@ -9,8 +9,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) // TODO(owen-d): this should probably be in it's own testing-util package diff --git a/pkg/storage/bucket/azure/config.go b/pkg/storage/bucket/azure/config.go index 18d0f74fc3e85..928503190d931 100644 --- a/pkg/storage/bucket/azure/config.go +++ b/pkg/storage/bucket/azure/config.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/bucket/http" ) // Config holds the config options for an Azure backend diff --git a/pkg/storage/bucket/azure/config_test.go b/pkg/storage/bucket/azure/config_test.go index 7d3c6d9f326de..756ae298b65cb 100644 --- a/pkg/storage/bucket/azure/config_test.go +++ b/pkg/storage/bucket/azure/config_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/bucket/http" ) // defaultConfig should match the default flag values defined in RegisterFlagsWithPrefix. diff --git a/pkg/storage/bucket/client.go b/pkg/storage/bucket/client.go index 57751afe36546..4f81ce7b2934e 100644 --- a/pkg/storage/bucket/client.go +++ b/pkg/storage/bucket/client.go @@ -12,12 +12,12 @@ import ( "github.com/thanos-io/objstore" opentracing "github.com/thanos-io/objstore/tracing/opentracing" - "github.com/grafana/loki/pkg/storage/bucket/azure" - "github.com/grafana/loki/pkg/storage/bucket/filesystem" - "github.com/grafana/loki/pkg/storage/bucket/gcs" - "github.com/grafana/loki/pkg/storage/bucket/s3" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/storage/bucket/azure" + "github.com/grafana/loki/v3/pkg/storage/bucket/filesystem" + "github.com/grafana/loki/v3/pkg/storage/bucket/gcs" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/bucket/client_test.go b/pkg/storage/bucket/client_test.go index 7d4bee7c9e15a..489f7d2f1f269 100644 --- a/pkg/storage/bucket/client_test.go +++ b/pkg/storage/bucket/client_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/bucket/s3/config.go b/pkg/storage/bucket/s3/config.go index 791ef7d758656..b01eb1444a743 100644 --- a/pkg/storage/bucket/s3/config.go +++ b/pkg/storage/bucket/s3/config.go @@ -12,9 +12,9 @@ import ( "github.com/pkg/errors" "github.com/thanos-io/objstore/providers/s3" - bucket_http "github.com/grafana/loki/pkg/storage/bucket/http" - "github.com/grafana/loki/pkg/storage/common/aws" - "github.com/grafana/loki/pkg/util" + bucket_http "github.com/grafana/loki/v3/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/common/aws" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/bucket/s3/config_test.go b/pkg/storage/bucket/s3/config_test.go index 5e6b9f9545a27..a6ae54f586ee8 100644 --- a/pkg/storage/bucket/s3/config_test.go +++ b/pkg/storage/bucket/s3/config_test.go @@ -13,8 +13,8 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - bucket_http "github.com/grafana/loki/pkg/storage/bucket/http" - "github.com/grafana/loki/pkg/storage/common/aws" + bucket_http "github.com/grafana/loki/v3/pkg/storage/bucket/http" + "github.com/grafana/loki/v3/pkg/storage/common/aws" ) // defaultConfig should match the default flag values defined in RegisterFlagsWithPrefix. diff --git a/pkg/storage/bucket/sse_bucket_client.go b/pkg/storage/bucket/sse_bucket_client.go index 51cc68a86673a..426522cfcfd1f 100644 --- a/pkg/storage/bucket/sse_bucket_client.go +++ b/pkg/storage/bucket/sse_bucket_client.go @@ -9,7 +9,7 @@ import ( "github.com/thanos-io/objstore" thanos_s3 "github.com/thanos-io/objstore/providers/s3" - "github.com/grafana/loki/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) // TenantConfigProvider defines a per-tenant config provider. diff --git a/pkg/storage/bucket/sse_bucket_client_test.go b/pkg/storage/bucket/sse_bucket_client_test.go index e347ad039bc1f..697e8837a2f32 100644 --- a/pkg/storage/bucket/sse_bucket_client_test.go +++ b/pkg/storage/bucket/sse_bucket_client_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) func TestSSEBucketClient_Upload_ShouldInjectCustomSSEConfig(t *testing.T) { diff --git a/pkg/storage/chunk/cache/background.go b/pkg/storage/chunk/cache/background.go index 92995cf08d905..859bdf96f9160 100644 --- a/pkg/storage/chunk/cache/background.go +++ b/pkg/storage/chunk/cache/background.go @@ -12,9 +12,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // BackgroundConfig is config for a Background Cache. diff --git a/pkg/storage/chunk/cache/background_test.go b/pkg/storage/chunk/cache/background_test.go index a718301957603..38963d4b02495 100644 --- a/pkg/storage/chunk/cache/background_test.go +++ b/pkg/storage/chunk/cache/background_test.go @@ -8,9 +8,9 @@ import ( "github.com/dustin/go-humanize" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/flagext" ) func TestBackground(t *testing.T) { diff --git a/pkg/storage/chunk/cache/cache.go b/pkg/storage/chunk/cache/cache.go index 870d7c19e5c7c..6e1565fcaa3e8 100644 --- a/pkg/storage/chunk/cache/cache.go +++ b/pkg/storage/chunk/cache/cache.go @@ -11,7 +11,7 @@ import ( "github.com/go-kit/log" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) // Cache byte arrays by key. diff --git a/pkg/storage/chunk/cache/cache_gen.go b/pkg/storage/chunk/cache/cache_gen.go index 689e165e75d01..1e1bcf18062bc 100644 --- a/pkg/storage/chunk/cache/cache_gen.go +++ b/pkg/storage/chunk/cache/cache_gen.go @@ -3,7 +3,7 @@ package cache import ( "context" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type contextKey int diff --git a/pkg/storage/chunk/cache/cache_test.go b/pkg/storage/chunk/cache/cache_test.go index e65339066ad44..5595b2df0a6cf 100644 --- a/pkg/storage/chunk/cache/cache_test.go +++ b/pkg/storage/chunk/cache/cache_test.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" ) const userID = "1" diff --git a/pkg/storage/chunk/cache/embeddedcache.go b/pkg/storage/chunk/cache/embeddedcache.go index 46eb204125b0a..8996283158570 100644 --- a/pkg/storage/chunk/cache/embeddedcache.go +++ b/pkg/storage/chunk/cache/embeddedcache.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/chunk/cache/instrumented.go b/pkg/storage/chunk/cache/instrumented.go index c1f515debf689..81c56c7512e4a 100644 --- a/pkg/storage/chunk/cache/instrumented.go +++ b/pkg/storage/chunk/cache/instrumented.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // Instrument returns an instrumented cache. diff --git a/pkg/storage/chunk/cache/memcached.go b/pkg/storage/chunk/cache/memcached.go index 9b6150839cd29..ca8e2e2f92da2 100644 --- a/pkg/storage/chunk/cache/memcached.go +++ b/pkg/storage/chunk/cache/memcached.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/math" ) // MemcachedConfig is config to make a Memcached diff --git a/pkg/storage/chunk/cache/memcached_client.go b/pkg/storage/chunk/cache/memcached_client.go index f2dc35bbe08f5..d6df538342faf 100644 --- a/pkg/storage/chunk/cache/memcached_client.go +++ b/pkg/storage/chunk/cache/memcached_client.go @@ -21,7 +21,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/sony/gobreaker" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) // MemcachedClient interface exists for mocking memcacheClient. diff --git a/pkg/storage/chunk/cache/memcached_client_selector.go b/pkg/storage/chunk/cache/memcached_client_selector.go index c9604ce36a2af..8c8d49e2ba3af 100644 --- a/pkg/storage/chunk/cache/memcached_client_selector.go +++ b/pkg/storage/chunk/cache/memcached_client_selector.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/gomemcache/memcache" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // MemcachedJumpHashSelector implements the memcache.ServerSelector diff --git a/pkg/storage/chunk/cache/memcached_client_selector_test.go b/pkg/storage/chunk/cache/memcached_client_selector_test.go index 2a3f28709549c..cec908876b1bb 100644 --- a/pkg/storage/chunk/cache/memcached_client_selector_test.go +++ b/pkg/storage/chunk/cache/memcached_client_selector_test.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/gomemcache/memcache" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestNatSort(t *testing.T) { diff --git a/pkg/storage/chunk/cache/memcached_test.go b/pkg/storage/chunk/cache/memcached_test.go index 4082c331a10e0..e79a0f9130939 100644 --- a/pkg/storage/chunk/cache/memcached_test.go +++ b/pkg/storage/chunk/cache/memcached_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestMemcached_fetchKeysBatched(t *testing.T) { diff --git a/pkg/storage/chunk/cache/mock.go b/pkg/storage/chunk/cache/mock.go index 55db7f32a5558..1b0f60da3dec3 100644 --- a/pkg/storage/chunk/cache/mock.go +++ b/pkg/storage/chunk/cache/mock.go @@ -4,7 +4,7 @@ import ( "context" "sync" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type MockCache interface { diff --git a/pkg/storage/chunk/cache/redis_cache.go b/pkg/storage/chunk/cache/redis_cache.go index 5a4f9f73b87a7..f167b6c4ae225 100644 --- a/pkg/storage/chunk/cache/redis_cache.go +++ b/pkg/storage/chunk/cache/redis_cache.go @@ -7,8 +7,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/logqlmodel/stats" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RedisCache type caches chunks in redis diff --git a/pkg/storage/chunk/cache/resultscache/cache.go b/pkg/storage/chunk/cache/resultscache/cache.go index 3ea3e727b502d..549e0b72983cb 100644 --- a/pkg/storage/chunk/cache/resultscache/cache.go +++ b/pkg/storage/chunk/cache/resultscache/cache.go @@ -19,11 +19,11 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) // ConstSplitter is a utility for using a constant split interval when determining cache keys diff --git a/pkg/storage/chunk/cache/resultscache/cache_test.go b/pkg/storage/chunk/cache/resultscache/cache_test.go index cff371097a681..964a310f5951f 100644 --- a/pkg/storage/chunk/cache/resultscache/cache_test.go +++ b/pkg/storage/chunk/cache/resultscache/cache_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" ) const day = 24 * time.Hour diff --git a/pkg/storage/chunk/cache/resultscache/config.go b/pkg/storage/chunk/cache/resultscache/config.go index 5a329168e8372..93c032a91ef69 100644 --- a/pkg/storage/chunk/cache/resultscache/config.go +++ b/pkg/storage/chunk/cache/resultscache/config.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) // Config is the config for the results cache. diff --git a/pkg/storage/chunk/cache/resultscache/test_types.pb.go b/pkg/storage/chunk/cache/resultscache/test_types.pb.go index 7d3a54864e3df..b53ce50a16024 100644 --- a/pkg/storage/chunk/cache/resultscache/test_types.pb.go +++ b/pkg/storage/chunk/cache/resultscache/test_types.pb.go @@ -278,36 +278,37 @@ func init() { } var fileDescriptor_5b2c489557407809 = []byte{ - // 462 bytes of a gzipped FileDescriptorProto + // 465 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x3f, 0x6f, 0x13, 0x31, - 0x14, 0x3f, 0xe7, 0xd2, 0x94, 0x3a, 0x51, 0x07, 0xab, 0xc3, 0x29, 0x42, 0x4e, 0xc8, 0x94, 0xe9, - 0x2c, 0x95, 0x3f, 0x43, 0xc5, 0x14, 0xc4, 0x82, 0xa8, 0x40, 0x86, 0x89, 0xa5, 0x72, 0x0e, 0xd7, - 0x39, 0xe5, 0xee, 0xec, 0xde, 0xf3, 0x21, 0xba, 0xb1, 0xb3, 0xf4, 0x63, 0xf0, 0x51, 0x3a, 0x66, - 0xec, 0x04, 0xe4, 0xb2, 0x30, 0xf6, 0x23, 0x20, 0xfb, 0x92, 0x36, 0xa5, 0x0b, 0xdd, 0xde, 0xf3, - 0xfb, 0xfd, 0xb1, 0x7e, 0xef, 0xe1, 0xe7, 0x66, 0xae, 0x18, 0x58, 0x5d, 0x0a, 0x25, 0x59, 0x32, - 0xab, 0x8a, 0x39, 0x4b, 0x44, 0x32, 0x93, 0xac, 0x94, 0x50, 0x65, 0x16, 0x9a, 0xc6, 0x4a, 0xb0, - 0x27, 0xf6, 0xdc, 0x48, 0x88, 0x4d, 0xa9, 0xad, 0x26, 0xbd, 0xed, 0x71, 0xff, 0x40, 0x69, 0xa5, - 0xfd, 0x80, 0xb9, 0xaa, 0xc1, 0xf4, 0x07, 0x4a, 0x6b, 0x95, 0x49, 0xe6, 0xbb, 0x69, 0x75, 0xca, - 0x6c, 0x9a, 0x4b, 0xb0, 0x22, 0x37, 0x6b, 0x40, 0x77, 0x4b, 0x71, 0xf4, 0xbd, 0x85, 0xbb, 0xc7, - 0x3a, 0x99, 0x73, 0x79, 0x56, 0x49, 0xb0, 0x84, 0xe0, 0xb6, 0x11, 0x76, 0x16, 0xa1, 0x21, 0x1a, - 0xef, 0x71, 0x5f, 0x93, 0x23, 0xbc, 0x03, 0x56, 0x94, 0x36, 0x6a, 0x0d, 0xd1, 0xb8, 0x7b, 0xd8, - 0x8f, 0x1b, 0x87, 0x78, 0xe3, 0x10, 0x7f, 0xdc, 0x38, 0x4c, 0x1e, 0x5d, 0xfe, 0x1c, 0x04, 0x17, - 0xbf, 0x06, 0x88, 0x37, 0x14, 0xf2, 0x02, 0x87, 0xb2, 0xf8, 0x1c, 0x85, 0x0f, 0x60, 0x3a, 0x82, - 0xfb, 0x07, 0x58, 0x69, 0xa2, 0xf6, 0x10, 0x8d, 0x43, 0xee, 0x6b, 0x72, 0x80, 0x77, 0xce, 0x2a, - 0x59, 0x9e, 0x47, 0x1d, 0xff, 0xb9, 0xa6, 0x21, 0x6f, 0xf0, 0xbe, 0x8b, 0x23, 0x2d, 0xd4, 0x3b, - 0x63, 0x53, 0x5d, 0x40, 0xb4, 0xeb, 0xcd, 0x1e, 0xc7, 0xdb, 0x61, 0xc5, 0xaf, 0xee, 0x60, 0x26, - 0x6d, 0x67, 0xc7, 0xff, 0x61, 0x8e, 0xbe, 0xe2, 0x5e, 0x13, 0x06, 0x18, 0x5d, 0x80, 0x24, 0xcf, - 0x70, 0x27, 0x13, 0x53, 0x99, 0x41, 0x84, 0x86, 0xe1, 0x7d, 0x4d, 0x87, 0x7d, 0xeb, 0xe7, 0xef, - 0x45, 0x5a, 0xf2, 0x35, 0x96, 0x1c, 0xe2, 0x5d, 0x10, 0xb9, 0xc9, 0x24, 0x44, 0x2d, 0x4f, 0x8b, - 0xee, 0xd3, 0x3e, 0x78, 0x00, 0xdf, 0x00, 0x47, 0x47, 0x78, 0xff, 0xae, 0x9a, 0x4b, 0xa0, 0x10, - 0xb9, 0xdc, 0x6c, 0xc2, 0xd5, 0x2e, 0x81, 0x2f, 0x22, 0xab, 0xa4, 0xdf, 0xc4, 0x1e, 0x6f, 0x9a, - 0xd1, 0x6b, 0x8c, 0x6f, 0x25, 0x6f, 0x31, 0x8e, 0x88, 0xd6, 0x18, 0xf2, 0x04, 0xf7, 0x6e, 0xee, - 0xe0, 0x24, 0x07, 0x2f, 0x10, 0xf2, 0xee, 0xcd, 0xdb, 0x31, 0x4c, 0xca, 0xc5, 0x92, 0x06, 0x57, - 0x4b, 0x1a, 0x5c, 0x2f, 0x29, 0xfa, 0x56, 0x53, 0xf4, 0xa3, 0xa6, 0xe8, 0xb2, 0xa6, 0x68, 0x51, - 0x53, 0xf4, 0xbb, 0xa6, 0xe8, 0x4f, 0x4d, 0x83, 0xeb, 0x9a, 0xa2, 0x8b, 0x15, 0x0d, 0x16, 0x2b, - 0x1a, 0x5c, 0xad, 0x68, 0xf0, 0xe9, 0xa5, 0x4a, 0xed, 0xac, 0x9a, 0xc6, 0x89, 0xce, 0x99, 0x2a, - 0xc5, 0xa9, 0x28, 0x04, 0xcb, 0xf4, 0x3c, 0x65, 0xff, 0x73, 0xe1, 0xd3, 0x8e, 0xbf, 0x84, 0xa7, - 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x20, 0x73, 0x6a, 0xfb, 0x10, 0x03, 0x00, 0x00, + 0x14, 0x3f, 0xe7, 0xd2, 0x94, 0x3a, 0x51, 0x07, 0xab, 0x83, 0x15, 0x21, 0x27, 0x64, 0xca, 0x74, + 0x96, 0x5a, 0x60, 0xe8, 0x82, 0x14, 0xc4, 0x82, 0xa8, 0x40, 0x86, 0x89, 0xa5, 0x72, 0x0e, 0xd7, + 0x39, 0xe5, 0xee, 0xec, 0x9e, 0x7d, 0x15, 0xdd, 0xd8, 0x59, 0xfa, 0x31, 0xf8, 0x28, 0x1d, 0x33, + 0x76, 0x02, 0x72, 0x59, 0x18, 0xfb, 0x11, 0x90, 0x7d, 0xb9, 0x36, 0x25, 0x0b, 0xdd, 0xde, 0xf3, + 0xfb, 0xfd, 0xb1, 0x7e, 0xef, 0xc1, 0x17, 0x7a, 0x2e, 0xa9, 0xb1, 0xaa, 0xe0, 0x52, 0xd0, 0x78, + 0x56, 0xe6, 0x73, 0x1a, 0xf3, 0x78, 0x26, 0x68, 0x21, 0x4c, 0x99, 0x5a, 0x53, 0x37, 0x56, 0x18, + 0x7b, 0x6a, 0x2f, 0xb5, 0x30, 0x91, 0x2e, 0x94, 0x55, 0xa8, 0xb7, 0x39, 0xee, 0x1f, 0x48, 0x25, + 0x95, 0x1f, 0x50, 0x57, 0xd5, 0x98, 0xfe, 0x40, 0x2a, 0x25, 0x53, 0x41, 0x7d, 0x37, 0x2d, 0xcf, + 0xa8, 0x4d, 0x32, 0x61, 0x2c, 0xcf, 0xf4, 0x1a, 0xd0, 0xdd, 0x50, 0x1c, 0x7d, 0x6f, 0xc1, 0xee, + 0x89, 0x8a, 0xe7, 0x4c, 0x9c, 0x97, 0xc2, 0x58, 0x84, 0x60, 0x5b, 0x73, 0x3b, 0xc3, 0x60, 0x08, + 0xc6, 0x7b, 0xcc, 0xd7, 0xe8, 0x18, 0xee, 0x18, 0xcb, 0x0b, 0x8b, 0x5b, 0x43, 0x30, 0xee, 0x1e, + 0xf6, 0xa3, 0xda, 0x21, 0x6a, 0x1c, 0xa2, 0x4f, 0x8d, 0xc3, 0xe4, 0xc9, 0xf5, 0xcf, 0x41, 0x70, + 0xf5, 0x6b, 0x00, 0x58, 0x4d, 0x41, 0x2f, 0x61, 0x28, 0xf2, 0x2f, 0x38, 0x7c, 0x04, 0xd3, 0x11, + 0xdc, 0x3f, 0x8c, 0x15, 0x1a, 0xb7, 0x87, 0x60, 0x1c, 0x32, 0x5f, 0xa3, 0x03, 0xb8, 0x73, 0x5e, + 0x8a, 0xe2, 0x12, 0x77, 0xfc, 0xe7, 0xea, 0x06, 0xbd, 0x85, 0xfb, 0x2e, 0x8e, 0x24, 0x97, 0xef, + 0xb5, 0x4d, 0x54, 0x6e, 0xf0, 0xae, 0x37, 0x7b, 0x1a, 0x6d, 0x86, 0x15, 0xbd, 0x7e, 0x80, 0x99, + 0xb4, 0x9d, 0x1d, 0xfb, 0x87, 0x39, 0xfa, 0x0a, 0x7b, 0x75, 0x18, 0x46, 0xab, 0xdc, 0x08, 0xf4, + 0x1c, 0x76, 0x52, 0x3e, 0x15, 0xa9, 0xc1, 0x60, 0x18, 0x6e, 0x6b, 0x3a, 0xec, 0x3b, 0x3f, 0xff, + 0xc0, 0x93, 0x82, 0xad, 0xb1, 0xe8, 0x10, 0xee, 0x1a, 0x9e, 0xe9, 0x54, 0x18, 0xdc, 0xf2, 0x34, + 0xbc, 0x4d, 0xfb, 0xe8, 0x01, 0xac, 0x01, 0x8e, 0x8e, 0xe1, 0xfe, 0x43, 0x35, 0x97, 0x40, 0xce, + 0x33, 0xd1, 0x6c, 0xc2, 0xd5, 0x2e, 0x81, 0x0b, 0x9e, 0x96, 0xc2, 0x6f, 0x62, 0x8f, 0xd5, 0xcd, + 0xe8, 0x0d, 0x84, 0xf7, 0x92, 0xf7, 0x18, 0x47, 0x04, 0x6b, 0x0c, 0x7a, 0x06, 0x7b, 0x77, 0x77, + 0x70, 0x9a, 0x19, 0x2f, 0x10, 0xb2, 0xee, 0xdd, 0xdb, 0x89, 0x99, 0x94, 0x8b, 0x25, 0x09, 0x6e, + 0x96, 0x24, 0xb8, 0x5d, 0x12, 0xf0, 0xad, 0x22, 0xe0, 0x47, 0x45, 0xc0, 0x75, 0x45, 0xc0, 0xa2, + 0x22, 0xe0, 0x77, 0x45, 0xc0, 0x9f, 0x8a, 0x04, 0xb7, 0x15, 0x01, 0x57, 0x2b, 0x12, 0x2c, 0x56, + 0x24, 0xb8, 0x59, 0x91, 0xe0, 0xf3, 0x2b, 0x99, 0xd8, 0x59, 0x39, 0x8d, 0x62, 0x95, 0x51, 0x59, + 0xf0, 0x33, 0x9e, 0x73, 0x9a, 0xaa, 0x79, 0x42, 0x2f, 0x8e, 0xe8, 0xff, 0x1c, 0xf9, 0xb4, 0xe3, + 0x8f, 0xe1, 0xe8, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xf5, 0xc5, 0x0d, 0x13, 0x03, 0x00, + 0x00, } func (this *MockRequest) Equal(that interface{}) bool { diff --git a/pkg/storage/chunk/cache/resultscache/test_types.proto b/pkg/storage/chunk/cache/resultscache/test_types.proto index 920db66314de4..bd313a9b85a0b 100644 --- a/pkg/storage/chunk/cache/resultscache/test_types.proto +++ b/pkg/storage/chunk/cache/resultscache/test_types.proto @@ -6,7 +6,7 @@ import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; import "types.proto"; -option go_package = "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache"; +option go_package = "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/storage/chunk/cache/resultscache/types.pb.go b/pkg/storage/chunk/cache/resultscache/types.pb.go index 7c63abdda4bf6..6459262f46840 100644 --- a/pkg/storage/chunk/cache/resultscache/types.pb.go +++ b/pkg/storage/chunk/cache/resultscache/types.pb.go @@ -200,33 +200,33 @@ func init() { } var fileDescriptor_6b13efd4ce8649ef = []byte{ - // 404 bytes of a gzipped FileDescriptorProto + // 406 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x6e, 0xd4, 0x30, - 0x18, 0xc7, 0xe3, 0xde, 0x5d, 0x2f, 0x35, 0x15, 0xa0, 0xa8, 0x43, 0x7a, 0x83, 0x73, 0xba, 0xa9, - 0x03, 0xc4, 0x08, 0x56, 0x04, 0x22, 0x88, 0x01, 0x16, 0x24, 0x8f, 0x2c, 0xc8, 0x49, 0x5c, 0x27, - 0x4a, 0xb0, 0x23, 0xdb, 0x91, 0xc8, 0xc6, 0x23, 0xf0, 0x18, 0x0c, 0x3c, 0x48, 0xc7, 0x1b, 0x3b, - 0x45, 0x5c, 0x6e, 0x41, 0x99, 0xfa, 0x08, 0x28, 0x36, 0x77, 0xea, 0xd8, 0xe5, 0xf3, 0xf7, 0xff, - 0xfe, 0x7f, 0xc9, 0x3f, 0x7f, 0x32, 0x7c, 0xd1, 0x54, 0x1c, 0x6b, 0x23, 0x15, 0xe5, 0x0c, 0x67, - 0x45, 0x2b, 0x2a, 0x9c, 0xd1, 0xac, 0x60, 0x58, 0x31, 0xdd, 0xd6, 0x46, 0x3b, 0x61, 0xba, 0x86, - 0xe9, 0xb8, 0x51, 0xd2, 0xc8, 0xe0, 0xfc, 0xbe, 0xb3, 0xba, 0xe0, 0x92, 0x4b, 0x6b, 0xe0, 0xa9, - 0x73, 0x99, 0xd5, 0x25, 0x97, 0x92, 0xd7, 0x0c, 0x5b, 0x95, 0xb6, 0xd7, 0x98, 0x8a, 0xce, 0x59, - 0x9b, 0x67, 0xf0, 0xf1, 0x7b, 0x9a, 0x15, 0xa5, 0xe0, 0x9f, 0x1b, 0x53, 0x4a, 0xa1, 0x83, 0x15, - 0xf4, 0xf3, 0x52, 0xd3, 0xb4, 0x66, 0x79, 0x08, 0xd6, 0xe0, 0xca, 0x27, 0x47, 0xbd, 0xa9, 0x5d, - 0x9a, 0xe5, 0x84, 0xe9, 0x46, 0x0a, 0xcd, 0x82, 0x4b, 0x38, 0xab, 0x58, 0x67, 0x83, 0x67, 0xc9, - 0x72, 0xec, 0xa3, 0x49, 0x92, 0xa9, 0x04, 0x6f, 0xe1, 0x92, 0x7d, 0x37, 0x4c, 0x18, 0x1d, 0x9e, - 0xac, 0x67, 0x57, 0x8f, 0x5e, 0x5e, 0xc4, 0xf7, 0x59, 0xe3, 0x0f, 0xd6, 0x4c, 0x9e, 0xdc, 0xf4, - 0x91, 0x37, 0xf6, 0xd1, 0x21, 0x4c, 0x0e, 0xcd, 0xe6, 0x37, 0x80, 0xa7, 0x2e, 0x14, 0x44, 0x70, - 0xa1, 0x0d, 0x55, 0xc6, 0x5e, 0x34, 0x4b, 0xce, 0xc6, 0x3e, 0x72, 0x03, 0xe2, 0x8e, 0x89, 0x83, - 0x89, 0x3c, 0x3c, 0xb1, 0xb6, 0xe5, 0x60, 0x22, 0x27, 0x53, 0x09, 0xd6, 0xd0, 0x37, 0x8a, 0x66, - 0xec, 0x6b, 0x99, 0x87, 0x73, 0xcb, 0xb9, 0x18, 0xfb, 0x08, 0x3c, 0x27, 0x4b, 0x3b, 0xfe, 0x98, - 0x07, 0x6f, 0xa0, 0xaf, 0xfe, 0x3f, 0x28, 0x5c, 0xac, 0x81, 0x45, 0x75, 0x2b, 0x8b, 0x0f, 0x2b, - 0x8b, 0xdf, 0x89, 0x2e, 0x39, 0x1f, 0xfb, 0xe8, 0x98, 0x24, 0xc7, 0xee, 0xd3, 0xdc, 0x9f, 0x3d, - 0x9d, 0x27, 0x6a, 0xbb, 0x43, 0xde, 0xed, 0x0e, 0x79, 0x77, 0x3b, 0x04, 0x7e, 0x0c, 0x08, 0xfc, - 0x1a, 0x10, 0xb8, 0x19, 0x10, 0xd8, 0x0e, 0x08, 0xfc, 0x19, 0x10, 0xf8, 0x3b, 0x20, 0xef, 0x6e, - 0x40, 0xe0, 0xe7, 0x1e, 0x79, 0xdb, 0x3d, 0xf2, 0x6e, 0xf7, 0xc8, 0xfb, 0xf2, 0x9a, 0x97, 0xa6, - 0x68, 0xd3, 0x38, 0x93, 0xdf, 0x30, 0x57, 0xf4, 0x9a, 0x0a, 0x8a, 0x6b, 0x59, 0x95, 0xf8, 0x21, - 0x3f, 0x21, 0x3d, 0xb5, 0x7c, 0xaf, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xa9, 0xc6, 0x6f, 0x43, - 0x38, 0x02, 0x00, 0x00, + 0x18, 0xc7, 0xe3, 0xde, 0x5d, 0x2f, 0x35, 0x15, 0xa0, 0xa8, 0x43, 0x7a, 0x83, 0x13, 0xdd, 0xd4, + 0x01, 0x62, 0x44, 0x77, 0x2a, 0x82, 0x18, 0x60, 0x41, 0xf2, 0xc8, 0x82, 0x9c, 0xc4, 0x75, 0xa2, + 0x04, 0x3b, 0x8a, 0x1d, 0x44, 0x36, 0x1e, 0x81, 0xc7, 0x60, 0xe0, 0x41, 0x3a, 0xde, 0xd8, 0x29, + 0xe2, 0x72, 0x0b, 0xca, 0xd4, 0x47, 0x40, 0xb1, 0xb9, 0xd3, 0x8d, 0x5d, 0x3e, 0x7f, 0xff, 0xef, + 0xff, 0x97, 0xfc, 0xf3, 0x27, 0xc3, 0x57, 0x75, 0xc9, 0xb1, 0xd2, 0xb2, 0xa1, 0x9c, 0xe1, 0x34, + 0x6f, 0x45, 0x89, 0x53, 0x9a, 0xe6, 0x0c, 0x37, 0x4c, 0xb5, 0x95, 0x56, 0x56, 0xe8, 0xae, 0x66, + 0x2a, 0xaa, 0x1b, 0xa9, 0xa5, 0x77, 0x7e, 0xec, 0xac, 0x2e, 0xb8, 0xe4, 0xd2, 0x18, 0x78, 0xea, + 0x6c, 0x66, 0x75, 0xc9, 0xa5, 0xe4, 0x15, 0xc3, 0x46, 0x25, 0xed, 0x2d, 0xa6, 0xa2, 0xb3, 0xd6, + 0xfa, 0x05, 0x7c, 0xfa, 0x8e, 0xa6, 0x79, 0x21, 0xf8, 0xa7, 0x5a, 0x17, 0x52, 0x28, 0x6f, 0x05, + 0xdd, 0xac, 0x50, 0x34, 0xa9, 0x58, 0xe6, 0x83, 0x10, 0x5c, 0xb9, 0xe4, 0xa0, 0xd7, 0x95, 0x4d, + 0xb3, 0x8c, 0x30, 0x55, 0x4b, 0xa1, 0x98, 0x77, 0x09, 0x67, 0x25, 0xeb, 0x4c, 0xf0, 0x2c, 0x5e, + 0x8e, 0x7d, 0x30, 0x49, 0x32, 0x15, 0xef, 0x06, 0x2e, 0xd9, 0x77, 0xcd, 0x84, 0x56, 0xfe, 0x49, + 0x38, 0xbb, 0x7a, 0xf2, 0xfa, 0x22, 0x3a, 0x66, 0x8d, 0xde, 0x1b, 0x33, 0x7e, 0x76, 0xd7, 0x07, + 0xce, 0xd8, 0x07, 0xfb, 0x30, 0xd9, 0x37, 0xeb, 0xdf, 0x00, 0x9e, 0xda, 0x90, 0x17, 0xc0, 0x85, + 0xd2, 0xb4, 0xd1, 0xe6, 0xa2, 0x59, 0x7c, 0x36, 0xf6, 0x81, 0x1d, 0x10, 0x7b, 0x4c, 0x1c, 0x4c, + 0x64, 0xfe, 0x89, 0xb1, 0x0d, 0x07, 0x13, 0x19, 0x99, 0x8a, 0x17, 0x42, 0x57, 0x37, 0x34, 0x65, + 0x5f, 0x8a, 0xcc, 0x9f, 0x1b, 0xce, 0xc5, 0xd8, 0x07, 0xe0, 0x25, 0x59, 0x9a, 0xf1, 0x87, 0xcc, + 0x7b, 0x03, 0xdd, 0xe6, 0xff, 0x83, 0xfc, 0x45, 0x08, 0x0c, 0xaa, 0x5d, 0x59, 0xb4, 0x5f, 0x59, + 0xf4, 0x56, 0x74, 0xf1, 0xf9, 0xd8, 0x07, 0x87, 0x24, 0x39, 0x74, 0x1f, 0xe7, 0xee, 0xec, 0xf9, + 0x3c, 0x6e, 0x37, 0x5b, 0xe4, 0xdc, 0x6f, 0x91, 0xf3, 0xb0, 0x45, 0xe0, 0xc7, 0x80, 0xc0, 0xaf, + 0x01, 0x81, 0xbb, 0x01, 0x81, 0xcd, 0x80, 0xc0, 0x9f, 0x01, 0x81, 0xbf, 0x03, 0x72, 0x1e, 0x06, + 0x04, 0x7e, 0xee, 0x90, 0xb3, 0xd9, 0x21, 0xe7, 0x7e, 0x87, 0x9c, 0xcf, 0x37, 0xbc, 0xd0, 0x79, + 0x9b, 0x44, 0xa9, 0xfc, 0x8a, 0x79, 0x43, 0x6f, 0xa9, 0xa0, 0xb8, 0x92, 0x65, 0x81, 0xbf, 0x5d, + 0xe3, 0xc7, 0x7c, 0x86, 0xe4, 0xd4, 0x20, 0x5e, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x94, 0x7b, + 0xf9, 0x0a, 0x3b, 0x02, 0x00, 0x00, } func (this *CachingOptions) Equal(that interface{}) bool { diff --git a/pkg/storage/chunk/cache/resultscache/types.proto b/pkg/storage/chunk/cache/resultscache/types.proto index 835950a0581e7..33c78831a03e6 100644 --- a/pkg/storage/chunk/cache/resultscache/types.proto +++ b/pkg/storage/chunk/cache/resultscache/types.proto @@ -5,7 +5,7 @@ package resultscache; import "gogoproto/gogo.proto"; import "google/protobuf/any.proto"; -option go_package = "github.com/grafana/loki/pkg/storage/chunk/cache/resultscache"; +option go_package = "github.com/grafana/loki/v3/pkg/storage/chunk/cache/resultscache"; option (gogoproto.marshaler_all) = true; option (gogoproto.unmarshaler_all) = true; diff --git a/pkg/storage/chunk/cache/snappy.go b/pkg/storage/chunk/cache/snappy.go index 000827d1bcff5..8f05efc5b1f37 100644 --- a/pkg/storage/chunk/cache/snappy.go +++ b/pkg/storage/chunk/cache/snappy.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "github.com/golang/snappy" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type snappyCache struct { diff --git a/pkg/storage/chunk/cache/stats.go b/pkg/storage/chunk/cache/stats.go index 3bdf0b017ef3e..3ce127e9e8b1a 100644 --- a/pkg/storage/chunk/cache/stats.go +++ b/pkg/storage/chunk/cache/stats.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type statsCollector struct { diff --git a/pkg/storage/chunk/cache/tiered.go b/pkg/storage/chunk/cache/tiered.go index 5ff128d34d34e..b6cdef38261fc 100644 --- a/pkg/storage/chunk/cache/tiered.go +++ b/pkg/storage/chunk/cache/tiered.go @@ -3,7 +3,7 @@ package cache import ( "context" - "github.com/grafana/loki/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) type tiered []Cache diff --git a/pkg/storage/chunk/cache/tiered_test.go b/pkg/storage/chunk/cache/tiered_test.go index e024fe9ab096f..662e57a51b61f 100644 --- a/pkg/storage/chunk/cache/tiered_test.go +++ b/pkg/storage/chunk/cache/tiered_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) func TestTieredSimple(t *testing.T) { diff --git a/pkg/storage/chunk/chunk.go b/pkg/storage/chunk/chunk.go index d316553d72fa6..e807b5fb87798 100644 --- a/pkg/storage/chunk/chunk.go +++ b/pkg/storage/chunk/chunk.go @@ -18,7 +18,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var ( diff --git a/pkg/storage/chunk/chunk_test.go b/pkg/storage/chunk/chunk_test.go index f3f9a6f78eef2..aa7334a67f702 100644 --- a/pkg/storage/chunk/chunk_test.go +++ b/pkg/storage/chunk/chunk_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" ) const userID = "userID" diff --git a/pkg/storage/chunk/client/alibaba/oss_object_client.go b/pkg/storage/chunk/client/alibaba/oss_object_client.go index b14b4d5a0c8e5..3e7674467ae30 100644 --- a/pkg/storage/chunk/client/alibaba/oss_object_client.go +++ b/pkg/storage/chunk/client/alibaba/oss_object_client.go @@ -12,8 +12,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/constants" ) const NoSuchKeyErr = "NoSuchKey" diff --git a/pkg/storage/chunk/client/aws/dynamodb_index_reader.go b/pkg/storage/chunk/client/aws/dynamodb_index_reader.go index 0498655f15921..4b1c4cd8a9e2d 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_index_reader.go +++ b/pkg/storage/chunk/client/aws/dynamodb_index_reader.go @@ -17,8 +17,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type dynamodbIndexReader struct { diff --git a/pkg/storage/chunk/client/aws/dynamodb_metrics.go b/pkg/storage/chunk/client/aws/dynamodb_metrics.go index f1fedfb20bffc..9d19f0a77ff5b 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_metrics.go +++ b/pkg/storage/chunk/client/aws/dynamodb_metrics.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type dynamoDBMetrics struct { diff --git a/pkg/storage/chunk/client/aws/dynamodb_storage_client.go b/pkg/storage/chunk/client/aws/dynamodb_storage_client.go index c48bf518cc23a..87fd24e127db0 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_storage_client.go +++ b/pkg/storage/chunk/client/aws/dynamodb_storage_client.go @@ -28,15 +28,15 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/chunk" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - client_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + client_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go b/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go index 6c77f56d78bcc..6a4cb1238c5ff 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go +++ b/pkg/storage/chunk/client/aws/dynamodb_storage_client_test.go @@ -9,8 +9,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/chunk/client/aws/dynamodb_table_client.go b/pkg/storage/chunk/client/aws/dynamodb_table_client.go index 330624a098ced..f4d9a1f7c1cd9 100644 --- a/pkg/storage/chunk/client/aws/dynamodb_table_client.go +++ b/pkg/storage/chunk/client/aws/dynamodb_table_client.go @@ -15,9 +15,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/log" ) // Pluggable auto-scaler implementation diff --git a/pkg/storage/chunk/client/aws/fixtures.go b/pkg/storage/chunk/client/aws/fixtures.go index d8cc8642a8d9e..b2ab65497b45d 100644 --- a/pkg/storage/chunk/client/aws/fixtures.go +++ b/pkg/storage/chunk/client/aws/fixtures.go @@ -8,10 +8,10 @@ import ( "github.com/grafana/dskit/backoff" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type fixture struct { diff --git a/pkg/storage/chunk/client/aws/metrics_autoscaling.go b/pkg/storage/chunk/client/aws/metrics_autoscaling.go index 7aee4df91a47b..a55be707bd51e 100644 --- a/pkg/storage/chunk/client/aws/metrics_autoscaling.go +++ b/pkg/storage/chunk/client/aws/metrics_autoscaling.go @@ -13,8 +13,8 @@ import ( promV1 "github.com/prometheus/client_golang/api/prometheus/v1" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go b/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go index 6ea2fd90044b0..5e4d1a46a7b27 100644 --- a/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go +++ b/pkg/storage/chunk/client/aws/metrics_autoscaling_test.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/aws/mock.go b/pkg/storage/chunk/client/aws/mock.go index 2dec6c94addb3..b2c6ac8556689 100644 --- a/pkg/storage/chunk/client/aws/mock.go +++ b/pkg/storage/chunk/client/aws/mock.go @@ -18,7 +18,7 @@ import ( "github.com/aws/aws-sdk-go/service/s3/s3iface" "github.com/go-kit/log/level" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const arnPrefix = "arn:" diff --git a/pkg/storage/chunk/client/aws/s3_storage_client.go b/pkg/storage/chunk/client/aws/s3_storage_client.go index 0c2136801f812..bae0fce22df7f 100644 --- a/pkg/storage/chunk/client/aws/s3_storage_client.go +++ b/pkg/storage/chunk/client/aws/s3_storage_client.go @@ -27,13 +27,13 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - bucket_s3 "github.com/grafana/loki/pkg/storage/bucket/s3" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - storageawscommon "github.com/grafana/loki/pkg/storage/common/aws" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" + bucket_s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + storageawscommon "github.com/grafana/loki/v3/pkg/storage/common/aws" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" ) const ( diff --git a/pkg/storage/chunk/client/aws/s3_storage_client_test.go b/pkg/storage/chunk/client/aws/s3_storage_client_test.go index 769f8cf00665c..db9ba83c61831 100644 --- a/pkg/storage/chunk/client/aws/s3_storage_client_test.go +++ b/pkg/storage/chunk/client/aws/s3_storage_client_test.go @@ -20,7 +20,7 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" diff --git a/pkg/storage/chunk/client/aws/sse_config.go b/pkg/storage/chunk/client/aws/sse_config.go index 2ff3c4b4b7995..f39385cbe0151 100644 --- a/pkg/storage/chunk/client/aws/sse_config.go +++ b/pkg/storage/chunk/client/aws/sse_config.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - bucket_s3 "github.com/grafana/loki/pkg/storage/bucket/s3" + bucket_s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) const ( diff --git a/pkg/storage/chunk/client/aws/sse_config_test.go b/pkg/storage/chunk/client/aws/sse_config_test.go index b7c9aea6212f8..6f56f93b0d937 100644 --- a/pkg/storage/chunk/client/aws/sse_config_test.go +++ b/pkg/storage/chunk/client/aws/sse_config_test.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/assert" - s3 "github.com/grafana/loki/pkg/storage/bucket/s3" + s3 "github.com/grafana/loki/v3/pkg/storage/bucket/s3" ) func TestNewSSEParsedConfig(t *testing.T) { diff --git a/pkg/storage/chunk/client/azure/blob_storage_client.go b/pkg/storage/chunk/client/azure/blob_storage_client.go index 0126c048e9b34..7c5f5bb496ca0 100644 --- a/pkg/storage/chunk/client/azure/blob_storage_client.go +++ b/pkg/storage/chunk/client/azure/blob_storage_client.go @@ -25,13 +25,13 @@ import ( "github.com/mattn/go-ieproxy" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - client_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + client_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/azure/blob_storage_client_test.go b/pkg/storage/chunk/client/azure/blob_storage_client_test.go index 2cff02212841f..2f59934aabf20 100644 --- a/pkg/storage/chunk/client/azure/blob_storage_client_test.go +++ b/pkg/storage/chunk/client/azure/blob_storage_client_test.go @@ -17,7 +17,7 @@ import ( "github.com/stretchr/testify/suite" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) var metrics = NewBlobStorageMetrics() diff --git a/pkg/storage/chunk/client/baidubce/bos_storage_client.go b/pkg/storage/chunk/client/baidubce/bos_storage_client.go index 7b3fe633d66b4..30a9e97f4955f 100644 --- a/pkg/storage/chunk/client/baidubce/bos_storage_client.go +++ b/pkg/storage/chunk/client/baidubce/bos_storage_client.go @@ -14,8 +14,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/util/constants" ) // NoSuchKeyErr The resource you requested does not exist. diff --git a/pkg/storage/chunk/client/cassandra/fixtures.go b/pkg/storage/chunk/client/cassandra/fixtures.go index f1ddb1de65f42..e837dba110a31 100644 --- a/pkg/storage/chunk/client/cassandra/fixtures.go +++ b/pkg/storage/chunk/client/cassandra/fixtures.go @@ -7,10 +7,10 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // GOCQL doesn't provide nice mocks, so we use a real Cassandra instance. diff --git a/pkg/storage/chunk/client/cassandra/instrumentation.go b/pkg/storage/chunk/client/cassandra/instrumentation.go index 31db2b1f542be..756f18065b579 100644 --- a/pkg/storage/chunk/client/cassandra/instrumentation.go +++ b/pkg/storage/chunk/client/cassandra/instrumentation.go @@ -7,7 +7,7 @@ import ( "github.com/gocql/gocql" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var requestDuration = prometheus.NewHistogramVec(prometheus.HistogramOpts{ diff --git a/pkg/storage/chunk/client/cassandra/storage_client.go b/pkg/storage/chunk/client/cassandra/storage_client.go index e58de9b19e698..d847f9d6b7e2d 100644 --- a/pkg/storage/chunk/client/cassandra/storage_client.go +++ b/pkg/storage/chunk/client/cassandra/storage_client.go @@ -19,11 +19,11 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/semaphore" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Config for a StorageClient diff --git a/pkg/storage/chunk/client/cassandra/table_client.go b/pkg/storage/chunk/client/cassandra/table_client.go index fe8d7549d7467..f46b886b3d61d 100644 --- a/pkg/storage/chunk/client/cassandra/table_client.go +++ b/pkg/storage/chunk/client/cassandra/table_client.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type tableClient struct { diff --git a/pkg/storage/chunk/client/client.go b/pkg/storage/chunk/client/client.go index 76efb022f73e8..36b65d40b6c2e 100644 --- a/pkg/storage/chunk/client/client.go +++ b/pkg/storage/chunk/client/client.go @@ -4,8 +4,8 @@ import ( "context" "errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ( diff --git a/pkg/storage/chunk/client/congestion/config.go b/pkg/storage/chunk/client/congestion/config.go index 47d86646ad25b..61211d6487ccc 100644 --- a/pkg/storage/chunk/client/congestion/config.go +++ b/pkg/storage/chunk/client/congestion/config.go @@ -4,7 +4,7 @@ import ( "flag" "fmt" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type Config struct { diff --git a/pkg/storage/chunk/client/congestion/controller.go b/pkg/storage/chunk/client/congestion/controller.go index f1f69a29621e6..e7f29fab47638 100644 --- a/pkg/storage/chunk/client/congestion/controller.go +++ b/pkg/storage/chunk/client/congestion/controller.go @@ -10,8 +10,8 @@ import ( "github.com/go-kit/log" "golang.org/x/time/rate" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) // AIMDController implements the Additive-Increase/Multiplicative-Decrease algorithm which is used in TCP congestion avoidance. diff --git a/pkg/storage/chunk/client/congestion/controller_test.go b/pkg/storage/chunk/client/congestion/controller_test.go index 6ecc208c1a3f9..74620d334ff9f 100644 --- a/pkg/storage/chunk/client/congestion/controller_test.go +++ b/pkg/storage/chunk/client/congestion/controller_test.go @@ -14,8 +14,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) var errFakeFailure = errors.New("fake failure") diff --git a/pkg/storage/chunk/client/congestion/hedge.go b/pkg/storage/chunk/client/congestion/hedge.go index 3f7d99b971117..4a7cc265ebf13 100644 --- a/pkg/storage/chunk/client/congestion/hedge.go +++ b/pkg/storage/chunk/client/congestion/hedge.go @@ -5,7 +5,7 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type NoopHedger struct{} diff --git a/pkg/storage/chunk/client/congestion/interfaces.go b/pkg/storage/chunk/client/congestion/interfaces.go index 7266eea67bfe3..ba7e8b40d5745 100644 --- a/pkg/storage/chunk/client/congestion/interfaces.go +++ b/pkg/storage/chunk/client/congestion/interfaces.go @@ -6,8 +6,8 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) // Controller handles congestion by: diff --git a/pkg/storage/chunk/client/congestion/metrics.go b/pkg/storage/chunk/client/congestion/metrics.go index 83c035c806dc5..78684a4e40893 100644 --- a/pkg/storage/chunk/client/congestion/metrics.go +++ b/pkg/storage/chunk/client/congestion/metrics.go @@ -1,7 +1,7 @@ package congestion import ( - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" "github.com/prometheus/client_golang/prometheus" ) diff --git a/pkg/storage/chunk/client/gcp/bigtable_index_client.go b/pkg/storage/chunk/client/gcp/bigtable_index_client.go index 6385b1c2f3cd7..28bb8276c386c 100644 --- a/pkg/storage/chunk/client/gcp/bigtable_index_client.go +++ b/pkg/storage/chunk/client/gcp/bigtable_index_client.go @@ -15,11 +15,11 @@ import ( ot "github.com/opentracing/opentracing-go" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/math" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/math" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/chunk/client/gcp/bigtable_object_client.go b/pkg/storage/chunk/client/gcp/bigtable_object_client.go index b9b4b71e2f3ed..d878bc19bccf0 100644 --- a/pkg/storage/chunk/client/gcp/bigtable_object_client.go +++ b/pkg/storage/chunk/client/gcp/bigtable_object_client.go @@ -9,10 +9,10 @@ import ( otlog "github.com/opentracing/opentracing-go/log" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/math" ) type bigtableObjectClient struct { diff --git a/pkg/storage/chunk/client/gcp/fixtures.go b/pkg/storage/chunk/client/gcp/fixtures.go index 06debee43d089..fc0d04d115597 100644 --- a/pkg/storage/chunk/client/gcp/fixtures.go +++ b/pkg/storage/chunk/client/gcp/fixtures.go @@ -13,11 +13,11 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/gcp/gcs_object_client.go b/pkg/storage/chunk/client/gcp/gcs_object_client.go index 2e340c038e723..2f724e159ae2b 100644 --- a/pkg/storage/chunk/client/gcp/gcs_object_client.go +++ b/pkg/storage/chunk/client/gcp/gcs_object_client.go @@ -19,9 +19,9 @@ import ( google_http "google.golang.org/api/transport/http" amnet "k8s.io/apimachinery/pkg/util/net" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) type ClientFactory func(ctx context.Context, opts ...option.ClientOption) (*storage.Client, error) diff --git a/pkg/storage/chunk/client/gcp/gcs_object_client_test.go b/pkg/storage/chunk/client/gcp/gcs_object_client_test.go index ac3e7a77dcd61..230067f9e9508 100644 --- a/pkg/storage/chunk/client/gcp/gcs_object_client_test.go +++ b/pkg/storage/chunk/client/gcp/gcs_object_client_test.go @@ -14,7 +14,7 @@ import ( "go.uber.org/atomic" "google.golang.org/api/option" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) func Test_Hedging(t *testing.T) { diff --git a/pkg/storage/chunk/client/gcp/instrumentation.go b/pkg/storage/chunk/client/gcp/instrumentation.go index 5f6a6cb066f07..bd35a2e9f59c6 100644 --- a/pkg/storage/chunk/client/gcp/instrumentation.go +++ b/pkg/storage/chunk/client/gcp/instrumentation.go @@ -13,7 +13,7 @@ import ( "google.golang.org/api/option" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/storage/chunk/client/gcp/table_client.go b/pkg/storage/chunk/client/gcp/table_client.go index 5e1819746f1af..24be25b76fda4 100644 --- a/pkg/storage/chunk/client/gcp/table_client.go +++ b/pkg/storage/chunk/client/gcp/table_client.go @@ -11,8 +11,8 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type tableClient struct { diff --git a/pkg/storage/chunk/client/grpc/grpc_client_test.go b/pkg/storage/chunk/client/grpc/grpc_client_test.go index a9cf2c89af271..dc040cb5aecab 100644 --- a/pkg/storage/chunk/client/grpc/grpc_client_test.go +++ b/pkg/storage/chunk/client/grpc/grpc_client_test.go @@ -7,11 +7,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // This includes test for all RPCs in diff --git a/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go b/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go index 420990766ff3b..e5ed3456fd081 100644 --- a/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go +++ b/pkg/storage/chunk/client/grpc/grpc_server_mock_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type server struct { diff --git a/pkg/storage/chunk/client/grpc/index_client.go b/pkg/storage/chunk/client/grpc/index_client.go index 6e2c5bd3a644a..b40576519c061 100644 --- a/pkg/storage/chunk/client/grpc/index_client.go +++ b/pkg/storage/chunk/client/grpc/index_client.go @@ -6,8 +6,8 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func (w *WriteBatch) Add(tableName, hashValue string, rangeValue []byte, value []byte) { diff --git a/pkg/storage/chunk/client/grpc/storage_client.go b/pkg/storage/chunk/client/grpc/storage_client.go index 0a2b983f32fcd..42ee00507e412 100644 --- a/pkg/storage/chunk/client/grpc/storage_client.go +++ b/pkg/storage/chunk/client/grpc/storage_client.go @@ -7,8 +7,8 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) type StorageClient struct { diff --git a/pkg/storage/chunk/client/grpc/table_client.go b/pkg/storage/chunk/client/grpc/table_client.go index ab68d354d2cce..2abdefc4e98f1 100644 --- a/pkg/storage/chunk/client/grpc/table_client.go +++ b/pkg/storage/chunk/client/grpc/table_client.go @@ -7,7 +7,7 @@ import ( "github.com/pkg/errors" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type TableClient struct { diff --git a/pkg/storage/chunk/client/ibmcloud/cos_object_client.go b/pkg/storage/chunk/client/ibmcloud/cos_object_client.go index c576dd2da4751..c9d534ae4163f 100644 --- a/pkg/storage/chunk/client/ibmcloud/cos_object_client.go +++ b/pkg/storage/chunk/client/ibmcloud/cos_object_client.go @@ -27,10 +27,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/log" ) const defaultCOSAuthEndpoint = "https://iam.cloud.ibm.com/identity/token" diff --git a/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go b/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go index 4b6eb7faece38..f6959b3f31d81 100644 --- a/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go +++ b/pkg/storage/chunk/client/ibmcloud/cos_object_client_test.go @@ -24,8 +24,8 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) var ( diff --git a/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go b/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go index 69f4424f72616..b49c9ead51715 100644 --- a/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go +++ b/pkg/storage/chunk/client/ibmcloud/trusted_profile_authentication_provider.go @@ -10,7 +10,7 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/chunk/client/local/boltdb_index_client.go b/pkg/storage/chunk/client/local/boltdb_index_client.go index 42e2dddb784e4..de79d1eb945aa 100644 --- a/pkg/storage/chunk/client/local/boltdb_index_client.go +++ b/pkg/storage/chunk/client/local/boltdb_index_client.go @@ -15,9 +15,9 @@ import ( "github.com/pkg/errors" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/storage/chunk/client/local/boltdb_index_client_test.go b/pkg/storage/chunk/client/local/boltdb_index_client_test.go index 2b26b5cc32cf9..76dcd7afd54a7 100644 --- a/pkg/storage/chunk/client/local/boltdb_index_client_test.go +++ b/pkg/storage/chunk/client/local/boltdb_index_client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ( diff --git a/pkg/storage/chunk/client/local/boltdb_table_client.go b/pkg/storage/chunk/client/local/boltdb_table_client.go index cad8790b836d1..df30db04d29ac 100644 --- a/pkg/storage/chunk/client/local/boltdb_table_client.go +++ b/pkg/storage/chunk/client/local/boltdb_table_client.go @@ -5,8 +5,8 @@ import ( "os" "path/filepath" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type TableClient struct { diff --git a/pkg/storage/chunk/client/local/fixtures.go b/pkg/storage/chunk/client/local/fixtures.go index 6f86734d7e670..fc344185c5020 100644 --- a/pkg/storage/chunk/client/local/fixtures.go +++ b/pkg/storage/chunk/client/local/fixtures.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type fixture struct { diff --git a/pkg/storage/chunk/client/local/fs_object_client.go b/pkg/storage/chunk/client/local/fs_object_client.go index deee987a27abe..41e911cb28c03 100644 --- a/pkg/storage/chunk/client/local/fs_object_client.go +++ b/pkg/storage/chunk/client/local/fs_object_client.go @@ -13,10 +13,10 @@ import ( "github.com/grafana/dskit/runutil" "github.com/pkg/errors" - "github.com/grafana/loki/pkg/ruler/rulestore/local" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/ruler/rulestore/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // FSConfig is the config for a FSObjectClient. diff --git a/pkg/storage/chunk/client/local/fs_object_client_test.go b/pkg/storage/chunk/client/local/fs_object_client_test.go index 09bf69530befb..2dc059b3f5f1a 100644 --- a/pkg/storage/chunk/client/local/fs_object_client_test.go +++ b/pkg/storage/chunk/client/local/fs_object_client_test.go @@ -12,7 +12,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) func TestFSObjectClient_DeleteChunksBefore(t *testing.T) { diff --git a/pkg/storage/chunk/client/metrics.go b/pkg/storage/chunk/client/metrics.go index 4f507621a3a4c..76ca20a1bac5f 100644 --- a/pkg/storage/chunk/client/metrics.go +++ b/pkg/storage/chunk/client/metrics.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/constants" ) // takes a chunk client and exposes metrics for its operations. diff --git a/pkg/storage/chunk/client/object_client.go b/pkg/storage/chunk/client/object_client.go index a61fc501fec13..7a3b2e40c1663 100644 --- a/pkg/storage/chunk/client/object_client.go +++ b/pkg/storage/chunk/client/object_client.go @@ -10,9 +10,9 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" ) // ObjectClient is used to store arbitrary data in Object Store (S3/GCS/Azure/...) diff --git a/pkg/storage/chunk/client/object_client_test.go b/pkg/storage/chunk/client/object_client_test.go index 0b9e659b91944..f27f5d964150d 100644 --- a/pkg/storage/chunk/client/object_client_test.go +++ b/pkg/storage/chunk/client/object_client_test.go @@ -7,9 +7,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) func MustParseDayTime(s string) config.DayTime { diff --git a/pkg/storage/chunk/client/openstack/swift_object_client.go b/pkg/storage/chunk/client/openstack/swift_object_client.go index ee29ac4a6ca71..96b836b0a909f 100644 --- a/pkg/storage/chunk/client/openstack/swift_object_client.go +++ b/pkg/storage/chunk/client/openstack/swift_object_client.go @@ -13,10 +13,10 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - bucket_swift "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/util/log" + bucket_swift "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/util/log" ) var defaultTransport http.RoundTripper = &http.Transport{ diff --git a/pkg/storage/chunk/client/openstack/swift_object_client_test.go b/pkg/storage/chunk/client/openstack/swift_object_client_test.go index ad2bb173dc91a..ce2f130f1bfca 100644 --- a/pkg/storage/chunk/client/openstack/swift_object_client_test.go +++ b/pkg/storage/chunk/client/openstack/swift_object_client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/bucket/swift" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/bucket/swift" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" ) type RoundTripperFunc func(*http.Request) (*http.Response, error) diff --git a/pkg/storage/chunk/client/testutils/inmemory_storage_client.go b/pkg/storage/chunk/client/testutils/inmemory_storage_client.go index 15e2ddb2564bd..5f2a95da76fdf 100644 --- a/pkg/storage/chunk/client/testutils/inmemory_storage_client.go +++ b/pkg/storage/chunk/client/testutils/inmemory_storage_client.go @@ -12,10 +12,10 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/log" ) type MockStorageMode int diff --git a/pkg/storage/chunk/client/testutils/testutils.go b/pkg/storage/chunk/client/testutils/testutils.go index 1ee5e95fd878c..2b35b612badca 100644 --- a/pkg/storage/chunk/client/testutils/testutils.go +++ b/pkg/storage/chunk/client/testutils/testutils.go @@ -12,13 +12,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/chunk/client/util/parallel_chunk_fetch.go b/pkg/storage/chunk/client/util/parallel_chunk_fetch.go index be77e2d556d12..c61fdcf1bd522 100644 --- a/pkg/storage/chunk/client/util/parallel_chunk_fetch.go +++ b/pkg/storage/chunk/client/util/parallel_chunk_fetch.go @@ -7,8 +7,8 @@ import ( "github.com/opentracing/opentracing-go" otlog "github.com/opentracing/opentracing-go/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var decodeContextPool = sync.Pool{ diff --git a/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go b/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go index 7fc48ca5366c8..98b654d9df074 100644 --- a/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go +++ b/pkg/storage/chunk/client/util/parallel_chunk_fetch_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) func BenchmarkGetParallelChunks(b *testing.B) { diff --git a/pkg/storage/chunk/client/util/util.go b/pkg/storage/chunk/client/util/util.go index e49fad20136fb..3485552c220fd 100644 --- a/pkg/storage/chunk/client/util/util.go +++ b/pkg/storage/chunk/client/util/util.go @@ -8,8 +8,8 @@ import ( ot "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/math" ) // DoSingleQuery is the interface for indexes that don't support batching yet. diff --git a/pkg/storage/chunk/dummy.go b/pkg/storage/chunk/dummy.go index 7951ad2f80efb..a957d42e654dd 100644 --- a/pkg/storage/chunk/dummy.go +++ b/pkg/storage/chunk/dummy.go @@ -5,7 +5,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/util/filter" ) func newDummyChunk() *dummyChunk { diff --git a/pkg/storage/chunk/fetcher/fetcher.go b/pkg/storage/chunk/fetcher/fetcher.go index fd90f685e981e..7801143932842 100644 --- a/pkg/storage/chunk/fetcher/fetcher.go +++ b/pkg/storage/chunk/fetcher/fetcher.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/prometheus/promql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( diff --git a/pkg/storage/chunk/fetcher/fetcher_test.go b/pkg/storage/chunk/fetcher/fetcher_test.go index d73974506d4a1..c6215bde5b980 100644 --- a/pkg/storage/chunk/fetcher/fetcher_test.go +++ b/pkg/storage/chunk/fetcher/fetcher_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/assert" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) func Test(t *testing.T) { diff --git a/pkg/storage/chunk/interface.go b/pkg/storage/chunk/interface.go index cf3f619fa8d87..8da4312c60398 100644 --- a/pkg/storage/chunk/interface.go +++ b/pkg/storage/chunk/interface.go @@ -24,7 +24,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/util/filter" + "github.com/grafana/loki/v3/pkg/util/filter" ) // ChunkLen is the length of a chunk in bytes. diff --git a/pkg/storage/chunk/predicate.go b/pkg/storage/chunk/predicate.go index e200f28fb55e8..9c6abe226719a 100644 --- a/pkg/storage/chunk/predicate.go +++ b/pkg/storage/chunk/predicate.go @@ -3,7 +3,7 @@ package chunk import ( "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/querier/plan" ) // TODO(owen-d): rename. This is not a predicate and is confusing. diff --git a/pkg/storage/chunk/tests/by_key_test.go b/pkg/storage/chunk/tests/by_key_test.go index 1fada6bb632e9..e5ab23f8726a2 100644 --- a/pkg/storage/chunk/tests/by_key_test.go +++ b/pkg/storage/chunk/tests/by_key_test.go @@ -1,8 +1,8 @@ package tests import ( - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) // ByKey allow you to sort chunks by ID diff --git a/pkg/storage/chunk/tests/caching_fixtures_test.go b/pkg/storage/chunk/tests/caching_fixtures_test.go index 194f2b1276e0a..fbec1ac4a4fc8 100644 --- a/pkg/storage/chunk/tests/caching_fixtures_test.go +++ b/pkg/storage/chunk/tests/caching_fixtures_test.go @@ -8,14 +8,14 @@ import ( "github.com/grafana/dskit/flagext" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/validation" ) type fixture struct { diff --git a/pkg/storage/chunk/tests/chunk_client_test.go b/pkg/storage/chunk/tests/chunk_client_test.go index a584d7a74f9b2..c995225524c0f 100644 --- a/pkg/storage/chunk/tests/chunk_client_test.go +++ b/pkg/storage/chunk/tests/chunk_client_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func TestChunksBasic(t *testing.T) { diff --git a/pkg/storage/chunk/tests/index_client_test.go b/pkg/storage/chunk/tests/index_client_test.go index a4751f1fb1d31..0a2545206cdae 100644 --- a/pkg/storage/chunk/tests/index_client_test.go +++ b/pkg/storage/chunk/tests/index_client_test.go @@ -11,9 +11,9 @@ import ( "github.com/grafana/dskit/user" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) var ctx = user.InjectOrgID(context.Background(), "1") diff --git a/pkg/storage/chunk/tests/utils_test.go b/pkg/storage/chunk/tests/utils_test.go index e885e1d53bcdb..80e6e1b659f02 100644 --- a/pkg/storage/chunk/tests/utils_test.go +++ b/pkg/storage/chunk/tests/utils_test.go @@ -5,13 +5,13 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/common/aws/storage_class.go b/pkg/storage/common/aws/storage_class.go index b284c7326fbb0..ab3950e9d8c3c 100644 --- a/pkg/storage/common/aws/storage_class.go +++ b/pkg/storage/common/aws/storage_class.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) const ( diff --git a/pkg/storage/config/bench_test.go b/pkg/storage/config/bench_test.go index 8296f229bde1c..df5fb913b1f43 100644 --- a/pkg/storage/config/bench_test.go +++ b/pkg/storage/config/bench_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" ) func BenchmarkExternalKey(b *testing.B) { diff --git a/pkg/storage/config/schema_config.go b/pkg/storage/config/schema_config.go index 4b96a722cadb8..c7e72886b738e 100644 --- a/pkg/storage/config/schema_config.go +++ b/pkg/storage/config/schema_config.go @@ -17,10 +17,10 @@ import ( "github.com/prometheus/common/model" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/config/schema_config_test.go b/pkg/storage/config/schema_config_test.go index 06fd191b7092a..a4ed59933150c 100644 --- a/pkg/storage/config/schema_config_test.go +++ b/pkg/storage/config/schema_config_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) func TestChunkTableFor(t *testing.T) { diff --git a/pkg/storage/config/store.go b/pkg/storage/config/store.go index 14218bb9cfb12..8dbd57cdc2503 100644 --- a/pkg/storage/config/store.go +++ b/pkg/storage/config/store.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) type ChunkStoreConfig struct { diff --git a/pkg/storage/factory.go b/pkg/storage/factory.go index b619d978a7564..3660bfb4b2116 100644 --- a/pkg/storage/factory.go +++ b/pkg/storage/factory.go @@ -14,32 +14,32 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/grpc" - "github.com/grafana/loki/pkg/storage/chunk/client/hedging" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/series/index" - bloomshipperconfig "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/grpc" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/hedging" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + bloomshipperconfig "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/storage/factory_test.go b/pkg/storage/factory_test.go index 2588c9dc69dd1..5685424f199cd 100644 --- a/pkg/storage/factory_test.go +++ b/pkg/storage/factory_test.go @@ -12,15 +12,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/cassandra" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/cassandra" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func TestFactoryStop(t *testing.T) { diff --git a/pkg/storage/hack/main.go b/pkg/storage/hack/main.go index 93278b429c9af..f85e44a41ac5f 100644 --- a/pkg/storage/hack/main.go +++ b/pkg/storage/hack/main.go @@ -14,16 +14,16 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 5a6170c6c6fe4..4c741228eee30 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -7,13 +7,13 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // LazyChunk loads the chunk when it is accessed. diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 2244c02c924c6..6757e94e1e958 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -9,13 +9,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util" ) func TestLazyChunkIterator(t *testing.T) { diff --git a/pkg/storage/store.go b/pkg/storage/store.go index b582f7e6c7156..1a4fa386062f7 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -6,9 +6,9 @@ import ( "math" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" - lokilog "github.com/grafana/loki/pkg/logql/log" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" "github.com/go-kit/log" "github.com/go-kit/log/level" @@ -19,28 +19,28 @@ import ( "github.com/grafana/dskit/tenant" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/congestion" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/series" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/deletion" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/congestion" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/deletion" ) var ( diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index 52df29c079acf..c59df5aba7bc1 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -13,7 +13,7 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" "github.com/cespare/xxhash/v2" "github.com/go-kit/log" @@ -23,25 +23,25 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/iter" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - lokilog "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/push" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/marshal" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/iter" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + lokilog "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/pkg/storage/stores/composite_store.go b/pkg/storage/stores/composite_store.go index 7df10aa076480..212cd94082837 100644 --- a/pkg/storage/stores/composite_store.go +++ b/pkg/storage/stores/composite_store.go @@ -7,15 +7,15 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" ) type ChunkWriter interface { diff --git a/pkg/storage/stores/composite_store_entry.go b/pkg/storage/stores/composite_store_entry.go index 200b701449a5d..d4590d199d9ee 100644 --- a/pkg/storage/stores/composite_store_entry.go +++ b/pkg/storage/stores/composite_store_entry.go @@ -5,24 +5,24 @@ import ( "fmt" "time" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/go-kit/log/level" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/util/validation" ) type StoreLimits interface { diff --git a/pkg/storage/stores/composite_store_test.go b/pkg/storage/stores/composite_store_test.go index d3fc6ba4dacfc..3836243f38783 100644 --- a/pkg/storage/stores/composite_store_test.go +++ b/pkg/storage/stores/composite_store_test.go @@ -8,17 +8,17 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/dskit/test" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type mockStore int diff --git a/pkg/storage/stores/index/index.go b/pkg/storage/stores/index/index.go index db3ccfd2578e6..26b2a44880047 100644 --- a/pkg/storage/stores/index/index.go +++ b/pkg/storage/stores/index/index.go @@ -8,12 +8,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - loki_instrument "github.com/grafana/loki/pkg/util/instrument" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + loki_instrument "github.com/grafana/loki/v3/pkg/util/instrument" ) type Filterable interface { diff --git a/pkg/storage/stores/index/metrics.go b/pkg/storage/stores/index/metrics.go index 924122f950a80..2474a9ece07c1 100644 --- a/pkg/storage/stores/index/metrics.go +++ b/pkg/storage/stores/index/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type metrics struct { diff --git a/pkg/storage/stores/index/seriesvolume/volume.go b/pkg/storage/stores/index/seriesvolume/volume.go index b09ccd800e87f..0e079702ccf67 100644 --- a/pkg/storage/stores/index/seriesvolume/volume.go +++ b/pkg/storage/stores/index/seriesvolume/volume.go @@ -5,7 +5,7 @@ import ( "sort" "sync" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) const ( diff --git a/pkg/storage/stores/index/seriesvolume/volume_test.go b/pkg/storage/stores/index/seriesvolume/volume_test.go index 8f0ecb6eb266b..6487bb0260b95 100644 --- a/pkg/storage/stores/index/seriesvolume/volume_test.go +++ b/pkg/storage/stores/index/seriesvolume/volume_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) func Test_AddVolume(t *testing.T) { diff --git a/pkg/storage/stores/index/stats/stats.go b/pkg/storage/stores/index/stats/stats.go index 82d0791c1dc7c..088c21f76ca1d 100644 --- a/pkg/storage/stores/index/stats/stats.go +++ b/pkg/storage/stores/index/stats/stats.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" "github.com/willf/bloom" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var BloomPool PoolBloom diff --git a/pkg/storage/stores/series/index/caching_index_client.go b/pkg/storage/stores/series/index/caching_index_client.go index dd6e7348f8fd4..40181ba794c71 100644 --- a/pkg/storage/stores/series/index/caching_index_client.go +++ b/pkg/storage/stores/series/index/caching_index_client.go @@ -14,9 +14,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/storage/stores/series/index/caching_index_client_test.go b/pkg/storage/stores/series/index/caching_index_client_test.go index dcb7b90a82fa9..99a9264a0a0aa 100644 --- a/pkg/storage/stores/series/index/caching_index_client_test.go +++ b/pkg/storage/stores/series/index/caching_index_client_test.go @@ -14,9 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/validation" ) var ctx = user.InjectOrgID(context.Background(), "1") diff --git a/pkg/storage/stores/series/index/schema.go b/pkg/storage/stores/series/index/schema.go index 24ff305a064b8..5b60c5f9c6a69 100644 --- a/pkg/storage/stores/series/index/schema.go +++ b/pkg/storage/stores/series/index/schema.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/querier/astmapper" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/series/index/schema_config.go b/pkg/storage/stores/series/index/schema_config.go index c4b3f2dfe17cf..32a05fb7634b5 100644 --- a/pkg/storage/stores/series/index/schema_config.go +++ b/pkg/storage/stores/series/index/schema_config.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/series/index/schema_test.go b/pkg/storage/stores/series/index/schema_test.go index b70fd49a56f91..5a74936085268 100644 --- a/pkg/storage/stores/series/index/schema_test.go +++ b/pkg/storage/stores/series/index/schema_test.go @@ -12,8 +12,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/config" ) func TestDailyBuckets(t *testing.T) { diff --git a/pkg/storage/stores/series/index/table_client.go b/pkg/storage/stores/series/index/table_client.go index 8767fcaf3aaac..e32cc8968b192 100644 --- a/pkg/storage/stores/series/index/table_client.go +++ b/pkg/storage/stores/series/index/table_client.go @@ -3,7 +3,7 @@ package index import ( "context" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) // TableClient is a client for telling Dynamo what to do with tables. diff --git a/pkg/storage/stores/series/index/table_manager.go b/pkg/storage/stores/series/index/table_manager.go index c477ecf135105..414e08f494c89 100644 --- a/pkg/storage/stores/series/index/table_manager.go +++ b/pkg/storage/stores/series/index/table_manager.go @@ -20,8 +20,8 @@ import ( "github.com/prometheus/common/model" tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/stores/series/index/table_manager_test.go b/pkg/storage/stores/series/index/table_manager_test.go index 09512fd98ccd3..74429b48f9b3a 100644 --- a/pkg/storage/stores/series/index/table_manager_test.go +++ b/pkg/storage/stores/series/index/table_manager_test.go @@ -11,7 +11,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/stores/series/series_index_gateway_store.go b/pkg/storage/stores/series/series_index_gateway_store.go index c3af8c0c8d3c6..b58979bd11a13 100644 --- a/pkg/storage/stores/series/series_index_gateway_store.go +++ b/pkg/storage/stores/series/series_index_gateway_store.go @@ -9,11 +9,11 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) // NB(owen-d): mostly modeled off of the proto-generated `logproto.IndexGatewayClient`, diff --git a/pkg/storage/stores/series/series_index_gateway_store_test.go b/pkg/storage/stores/series/series_index_gateway_store_test.go index 8c65881d3c5b3..48256220191e4 100644 --- a/pkg/storage/stores/series/series_index_gateway_store_test.go +++ b/pkg/storage/stores/series/series_index_gateway_store_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) type fakeClient struct { diff --git a/pkg/storage/stores/series/series_index_store.go b/pkg/storage/stores/series/series_index_store.go index 50a036db3762c..138fd17a7ab13 100644 --- a/pkg/storage/stores/series/series_index_store.go +++ b/pkg/storage/stores/series/series_index_store.go @@ -17,22 +17,22 @@ import ( "github.com/grafana/dskit/concurrency" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - storageerrors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/extract" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + storageerrors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/extract" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( diff --git a/pkg/storage/stores/series/series_store_test.go b/pkg/storage/stores/series/series_store_test.go index 582bb9a2fea52..2f1146a1d7376 100644 --- a/pkg/storage/stores/series/series_store_test.go +++ b/pkg/storage/stores/series/series_store_test.go @@ -17,18 +17,18 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) type configFactory func() config.ChunkStoreConfig diff --git a/pkg/storage/stores/series/series_store_utils.go b/pkg/storage/stores/series/series_store_utils.go index 1c8430c9d8fbe..9e7bde3daaef2 100644 --- a/pkg/storage/stores/series/series_store_utils.go +++ b/pkg/storage/stores/series/series_store_utils.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util" ) func filterChunksByTime(from, through model.Time, chunks []chunk.Chunk) []chunk.Chunk { diff --git a/pkg/storage/stores/series/series_store_utils_test.go b/pkg/storage/stores/series/series_store_utils_test.go index c2bd07ee401b4..35ef774c237ea 100644 --- a/pkg/storage/stores/series/series_store_utils_test.go +++ b/pkg/storage/stores/series/series_store_utils_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) // Refer to https://github.com/prometheus/prometheus/issues/2651. diff --git a/pkg/storage/stores/series_store_write.go b/pkg/storage/stores/series_store_write.go index db22c5caa1202..a36ae4510b8e3 100644 --- a/pkg/storage/stores/series_store_write.go +++ b/pkg/storage/stores/series_store_write.go @@ -9,12 +9,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) var ( diff --git a/pkg/storage/stores/series_store_write_test.go b/pkg/storage/stores/series_store_write_test.go index 9c8c2f4069333..823f5bf11f0a1 100644 --- a/pkg/storage/stores/series_store_write_test.go +++ b/pkg/storage/stores/series_store_write_test.go @@ -8,11 +8,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" ) type mockCache struct { diff --git a/pkg/storage/stores/shipper/bloomshipper/blockscache.go b/pkg/storage/stores/shipper/bloomshipper/blockscache.go index 767518273059a..b26a4ed5cbda5 100644 --- a/pkg/storage/stores/shipper/bloomshipper/blockscache.go +++ b/pkg/storage/stores/shipper/bloomshipper/blockscache.go @@ -15,9 +15,9 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go b/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go index 4ec69e6d5a666..1ddc465577fcf 100644 --- a/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/blockscache_test.go @@ -11,9 +11,9 @@ import ( "github.com/grafana/dskit/flagext" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) var ( diff --git a/pkg/storage/stores/shipper/bloomshipper/cache.go b/pkg/storage/stores/shipper/bloomshipper/cache.go index e7fcfaff1666a..3e08b53eac3f3 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache.go @@ -10,9 +10,9 @@ import ( "github.com/go-kit/log/level" "github.com/pkg/errors" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util" ) type CloseableBlockQuerier struct { diff --git a/pkg/storage/stores/shipper/bloomshipper/cache_test.go b/pkg/storage/stores/shipper/bloomshipper/cache_test.go index eb2a061c775bb..dd7a44e57cf7d 100644 --- a/pkg/storage/stores/shipper/bloomshipper/cache_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/cache_test.go @@ -11,8 +11,8 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) type mockCache[K comparable, V any] struct { diff --git a/pkg/storage/stores/shipper/bloomshipper/client.go b/pkg/storage/stores/shipper/bloomshipper/client.go index eef3b667d253e..4ac0c24732b69 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client.go +++ b/pkg/storage/stores/shipper/bloomshipper/client.go @@ -14,12 +14,12 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/util/encoding" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/util/encoding" ) const ( diff --git a/pkg/storage/stores/shipper/bloomshipper/client_test.go b/pkg/storage/stores/shipper/bloomshipper/client_test.go index cee23671b7216..9bfd3d1674f66 100644 --- a/pkg/storage/stores/shipper/bloomshipper/client_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/client_test.go @@ -13,9 +13,9 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/client/testutils" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/testutils" + "github.com/grafana/loki/v3/pkg/storage/config" ) func parseTime(s string) model.Time { diff --git a/pkg/storage/stores/shipper/bloomshipper/compress_utils.go b/pkg/storage/stores/shipper/bloomshipper/compress_utils.go index 57025113cea71..52de4a4da5820 100644 --- a/pkg/storage/stores/shipper/bloomshipper/compress_utils.go +++ b/pkg/storage/stores/shipper/bloomshipper/compress_utils.go @@ -6,7 +6,7 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func CompressBloomBlock(ref BlockRef, archivePath, localDst string, logger log.Logger) (Block, error) { diff --git a/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go b/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go index 11a6afb21af48..f0b1598dadf9e 100644 --- a/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/compress_utils_test.go @@ -10,7 +10,7 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func directoryDoesNotExist(path string) bool { diff --git a/pkg/storage/stores/shipper/bloomshipper/config/config.go b/pkg/storage/stores/shipper/bloomshipper/config/config.go index 89a2f30e2dd33..de1ad3a12034c 100644 --- a/pkg/storage/stores/shipper/bloomshipper/config/config.go +++ b/pkg/storage/stores/shipper/bloomshipper/config/config.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" ) type Config struct { diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher.go b/pkg/storage/stores/shipper/bloomshipper/fetcher.go index b9483675f21cc..936e120af8501 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher.go @@ -16,9 +16,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "k8s.io/utils/keymutex" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/util/constants" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/util/constants" ) var downloadQueueCapacity = 10000 diff --git a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go index ca3fc006c2688..43658f9ed2137 100644 --- a/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/fetcher_test.go @@ -14,10 +14,10 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" ) func makeMetas(t *testing.T, schemaCfg config.SchemaConfig, ts model.Time, keyspaces []v1.FingerprintBounds) []Meta { diff --git a/pkg/storage/stores/shipper/bloomshipper/interval.go b/pkg/storage/stores/shipper/bloomshipper/interval.go index 430bde1a76809..86e0aff919d4b 100644 --- a/pkg/storage/stores/shipper/bloomshipper/interval.go +++ b/pkg/storage/stores/shipper/bloomshipper/interval.go @@ -9,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/util/encoding" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Interval defines a time range with start end end time diff --git a/pkg/storage/stores/shipper/bloomshipper/interval_test.go b/pkg/storage/stores/shipper/bloomshipper/interval_test.go index 2914e18030578..044bffb0b5813 100644 --- a/pkg/storage/stores/shipper/bloomshipper/interval_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/interval_test.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/assert" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func Test_Interval_String(t *testing.T) { diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver.go b/pkg/storage/stores/shipper/bloomshipper/resolver.go index b93f84f827048..8f86ce7cb09ee 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver.go @@ -9,7 +9,7 @@ import ( "strconv" "strings" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) const ( diff --git a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go index 151b3bc11bb47..ba45845ea9ba5 100644 --- a/pkg/storage/stores/shipper/bloomshipper/resolver_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/resolver_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func TestResolver_ParseMetaKey(t *testing.T) { diff --git a/pkg/storage/stores/shipper/bloomshipper/shipper.go b/pkg/storage/stores/shipper/bloomshipper/shipper.go index 66982bc065f87..09d4652fb9f61 100644 --- a/pkg/storage/stores/shipper/bloomshipper/shipper.go +++ b/pkg/storage/stores/shipper/bloomshipper/shipper.go @@ -5,7 +5,7 @@ import ( "fmt" "sort" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) type ForEachBlockCallback func(bq *v1.BlockQuerier, bounds v1.FingerprintBounds) error diff --git a/pkg/storage/stores/shipper/bloomshipper/shipper_test.go b/pkg/storage/stores/shipper/bloomshipper/shipper_test.go index a85132d379bb6..81e17a84b5279 100644 --- a/pkg/storage/stores/shipper/bloomshipper/shipper_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/shipper_test.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func TestBloomShipper_findBlocks(t *testing.T) { diff --git a/pkg/storage/stores/shipper/bloomshipper/store.go b/pkg/storage/stores/shipper/bloomshipper/store.go index 83d0db9e4296b..42e9b66eae6ad 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store.go +++ b/pkg/storage/stores/shipper/bloomshipper/store.go @@ -13,13 +13,13 @@ import ( "github.com/prometheus/common/model" "golang.org/x/exp/slices" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/storage/stores/shipper/bloomshipper/store_test.go b/pkg/storage/stores/shipper/bloomshipper/store_test.go index 3ba7b8d2b5dee..077a871e71fbb 100644 --- a/pkg/storage/stores/shipper/bloomshipper/store_test.go +++ b/pkg/storage/stores/shipper/bloomshipper/store_test.go @@ -15,13 +15,13 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - storageconfig "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/bloomshipper/config" + "github.com/grafana/loki/v3/pkg/storage" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + storageconfig "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/bloomshipper/config" ) func newMockBloomStore(t *testing.T) (*BloomStore, string, error) { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go index 584116b240417..cb73f9aa95bf0 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - seriesindex "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - shipperutil "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + seriesindex "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + shipperutil "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) type CompactedIndex struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go index ace66e0f06749..043d36d00401e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/compacted_index_test.go @@ -12,13 +12,13 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestCompactedIndex_IndexProcessor(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go index 402fb8471cc84..73e87e06e1e76 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/compactor/retention" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go index b017cb82f7fd4..3a5c4a96f3e5a 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/index_compactor.go @@ -5,8 +5,8 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go index 450ccfabfc1c6..7b2422fdc1149 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/config" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/config" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go index 509e5661a4d62..26e9aef596bf4 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/iterator_test.go @@ -15,11 +15,11 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" ) func Test_ChunkIterator(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go index 497153625e214..2e53a37b44984 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/series.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/config" ) type userSeries struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go index d864d306a2ba7..bdd42afc935d6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor.go @@ -15,11 +15,11 @@ import ( "github.com/pkg/errors" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go index 4a2b6d2371e7d..4fa6d598c8e36 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/table_compactor_test.go @@ -16,14 +16,14 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go index 27897df0dd02e..25ccb52e9b18e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/chunkenc" - ingesterclient "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/chunkenc" + ingesterclient "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" ) // unsafeGetString is like yolostring but with a meaningful name diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go index 7d015316017a6..7e6be7bb1414e 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/compactor/util_test.go @@ -15,18 +15,18 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func dayFromTime(t model.Time) config.DayTime { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/index.go b/pkg/storage/stores/shipper/indexshipper/boltdb/index.go index 6d548f02c3348..aa0a615c00eb7 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/index.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/index.go @@ -13,11 +13,11 @@ import ( "github.com/go-kit/log/level" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const TempFileSuffix = ".temp" diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go b/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go index c60c7feeebce0..07f67c4f35edb 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/index_client.go @@ -13,12 +13,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - series_index "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + series_index "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" ) type indexClientMetrics struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go b/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go index 54d6034b36be1..e27f4fb20dccc 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/querier.go @@ -7,9 +7,9 @@ import ( "github.com/grafana/dskit/tenant" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) type Writer interface { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table.go index 0db3ffa7f0a68..a5ed2ff0009a6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table.go @@ -15,11 +15,11 @@ import ( "github.com/grafana/dskit/tenant" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipper_util "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipper_util "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go index 29fa04deb326b..4bc00d082d293 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager.go @@ -16,12 +16,12 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) type tableManagerMetrics struct { diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go index 2f8b9f2d4b05c..9cd73fe3e60c6 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_manager_test.go @@ -12,12 +12,12 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const indexTablePeriod = 24 * time.Hour diff --git a/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go b/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go index 44c583e7966cc..4066149c042aa 100644 --- a/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/boltdb/table_test.go @@ -13,11 +13,11 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go b/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go index f9c12edd9c6f3..1be495ed46d99 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/index_set.go @@ -17,11 +17,11 @@ import ( "github.com/grafana/dskit/concurrency" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go index a0cadf1da6cdb..5a2f6522de9f2 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/index_set_test.go @@ -9,10 +9,10 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func buildTestIndexSet(t *testing.T, userID, path string) (*indexSet, stopFunc) { diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table.go b/pkg/storage/stores/shipper/indexshipper/downloads/table.go index 47c78924f2f5b..4767861300594 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table.go @@ -15,11 +15,11 @@ import ( "github.com/pkg/errors" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) // timeout for downloading initial files for a table to avoid leaking resources by allowing it to take all the time. diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go index 8d3875afe75cd..12e8a9373ed7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager.go @@ -14,12 +14,12 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/compactor/deletion" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/compactor/deletion" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go index b02912381e9db..e8d9e3efcc8d3 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_manager_test.go @@ -11,11 +11,11 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go b/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go index cc88451704b65..a12bfc70cbc69 100644 --- a/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/downloads/table_test.go @@ -12,9 +12,9 @@ import ( "github.com/pkg/errors" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go index a2054e806b0cf..472f6c019e85e 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client.go @@ -26,15 +26,15 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/discovery" - util_math "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/discovery" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go index 0ec6e81c17754..1dd1bff4abf04 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/gateway_client_test.go @@ -20,12 +20,12 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/distributor/clientpool" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/distributor/clientpool" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/validation" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go b/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go index a769bb55c11e6..825809a252f22 100644 --- a/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go +++ b/pkg/storage/stores/shipper/indexshipper/gatewayclient/index_gateway_grpc_pool.go @@ -7,7 +7,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // IndexGatewayGRPCPool represents a pool of gRPC connections to different index gateway instances. diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go index eb5c134a5de18..e27af9516de07 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/config.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util/ring" + "github.com/grafana/loki/v3/pkg/util/ring" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go index 60bf8f1f7c113..99b79bd9922a7 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway.go @@ -18,20 +18,20 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/plan" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - seriesindex "github.com/grafana/loki/pkg/storage/stores/series/index" - tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/plan" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + seriesindex "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go index 52518f3995b77..bf6af7cb5110a 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/gateway_test.go @@ -14,16 +14,16 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/logproto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - util_test "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" - util_math "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/logproto" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + util_test "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go index de8edda70c08b..b2f6c16bda430 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/grpc.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "google.golang.org/grpc" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) type ServerInterceptors struct { diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go index dcf517ea468b7..6ea51e5623846 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/metrics.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go index c82efac95f025..07bd2e8aa97b7 100644 --- a/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go +++ b/pkg/storage/stores/shipper/indexshipper/indexgateway/shufflesharding.go @@ -4,7 +4,7 @@ import ( "github.com/grafana/dskit/ring" "github.com/pkg/errors" - lokiring "github.com/grafana/loki/pkg/util/ring" + lokiring "github.com/grafana/loki/v3/pkg/util/ring" ) var ( diff --git a/pkg/storage/stores/shipper/indexshipper/shipper.go b/pkg/storage/stores/shipper/indexshipper/shipper.go index abfda3b733108..169f7eeb79fee 100644 --- a/pkg/storage/stores/shipper/indexshipper/shipper.go +++ b/pkg/storage/stores/shipper/indexshipper/shipper.go @@ -14,14 +14,14 @@ import ( "github.com/prometheus/client_golang/prometheus" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/gatewayclient" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/uploads" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/gatewayclient" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/uploads" ) type Mode string diff --git a/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go b/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go index f74c3ea8b4ac5..c7d909bc09844 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/cached_client.go @@ -11,9 +11,9 @@ import ( "github.com/go-kit/log/level" "golang.org/x/sync/singleflight" - "github.com/grafana/loki/pkg/storage/chunk/client" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/spanlogger" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/spanlogger" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go b/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go index 6e2c8a5def263..78c04bd4dc3eb 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/cached_client_test.go @@ -11,9 +11,9 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) var objectsMtime = time.Now().Local() diff --git a/pkg/storage/stores/shipper/indexshipper/storage/client.go b/pkg/storage/stores/shipper/indexshipper/storage/client.go index e8a3f30a2d825..d63504cd33090 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/client.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/client.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" ) const delimiter = "/" diff --git a/pkg/storage/stores/shipper/indexshipper/storage/client_test.go b/pkg/storage/stores/shipper/indexshipper/storage/client_test.go index c9b7da78b3a3f..1f766e0113802 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/client_test.go @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" ) func TestIndexStorageClient(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/storage/util_test.go b/pkg/storage/stores/shipper/indexshipper/storage/util_test.go index 3136d553c8d2c..330602821d78f 100644 --- a/pkg/storage/stores/shipper/indexshipper/storage/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/storage/util_test.go @@ -10,9 +10,9 @@ import ( gzip "github.com/klauspost/pgzip" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func Test_GetFileFromStorage(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/table_client.go b/pkg/storage/stores/shipper/indexshipper/table_client.go index 2d7de63006a61..6492dd94943eb 100644 --- a/pkg/storage/stores/shipper/indexshipper/table_client.go +++ b/pkg/storage/stores/shipper/indexshipper/table_client.go @@ -3,10 +3,10 @@ package indexshipper import ( "context" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type tableClient struct { diff --git a/pkg/storage/stores/shipper/indexshipper/table_client_test.go b/pkg/storage/stores/shipper/indexshipper/table_client_test.go index 0b76ab64f99c5..7b84b74beeef3 100644 --- a/pkg/storage/stores/shipper/indexshipper/table_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/table_client_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func TestBoltDBShipperTableClient(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go b/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go index 6022128e36d49..48f5990dc0790 100644 --- a/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go +++ b/pkg/storage/stores/shipper/indexshipper/testutil/testutil.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) func AddRecordsToDB(t testing.TB, path string, start, numRecords int, bucketName []byte) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go b/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go index ee59debb2e078..0e7af08d4d243 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/builder.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - chunk_util "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + chunk_util "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Builder is a helper used to create tsdb indices. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go index 539cdd57fc5a6..9ccf972151a15 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/builder_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_Build(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go index c88b0e124c9b5..5c2ae28d89351 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) const readDBsConcurrency = 50 diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go index 5032f6df085f5..5f8a5b1e6d9d5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/compactor_test.go @@ -17,17 +17,17 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head.go index 4c841b3741462..00e4f86b9ad90 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head.go @@ -22,7 +22,7 @@ import ( "github.com/prometheus/prometheus/storage" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) /* diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go index ad285bc32b158..5df88b04071e5 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager.go @@ -21,10 +21,10 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/wal" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/wal" ) /* diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go index c58e556457174..daa2e0bafa588 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_manager_test.go @@ -18,14 +18,14 @@ import ( "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/validation" ) type noopTSDBManager struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go index 203e951a435d5..3a0cf3cdbfc7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_read.go @@ -21,7 +21,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Index returns an IndexReader against the block. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go index 386067ff5dbb3..d1a3dcf2dc046 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/prometheus/prometheus/tsdb/wlog" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/encoding" ) type WAL interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go index 28255a4a7ec88..5e9b5e7d505f8 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/head_wal_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/tsdb/record" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func Test_Encoding_Series(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go index 475446b15090f..a60b86b6a6e00 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index.go @@ -6,9 +6,9 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" ) type Series struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go index c5f107478c538..8094e19af0c5e 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Meta holds information about a chunk of data. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go index 1eab509d489a4..a1f98f1ab71ae 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/chunk_test.go @@ -8,7 +8,7 @@ import ( tsdb_enc "github.com/prometheus/prometheus/tsdb/encoding" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) // Test all sort variants diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go index 7aa429d367718..8d6f316acfa58 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index.go @@ -38,7 +38,7 @@ import ( tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" "github.com/prometheus/prometheus/tsdb/fileutil" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go index 6ef9ebee01105..2f8576b825649 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index/index_test.go @@ -35,7 +35,7 @@ import ( tsdb_enc "github.com/prometheus/prometheus/tsdb/encoding" "github.com/prometheus/prometheus/util/testutil" - "github.com/grafana/loki/pkg/util/encoding" + "github.com/grafana/loki/v3/pkg/util/encoding" ) func TestMain(m *testing.M) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go index 1a2115b26cdbc..47d33fe632faf 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client.go @@ -6,23 +6,23 @@ import ( "sync" "time" - "github.com/grafana/loki/pkg/logql" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/logql" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util" ) // implements stores.Index diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go index 596e53e62009f..c823a61528286 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_client_test.go @@ -11,10 +11,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) type mockIndexShipperIndexIterator struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go index dbc94b18c027a..b0d1824936d59 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/index_shipper_querier.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type indexShipperIterator interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go index db7b0e04720ed..ad3fb3b086200 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/lazy_index.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Index adapter for a function which returns an index when queried. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go index 78ef447169ccd..0fed45d3252ee 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/manager.go @@ -15,10 +15,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // nolint:revive diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go index c0d2080bcbe35..0c6044843026b 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "golang.org/x/sync/errgroup" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type MultiIndex struct { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go index 945402f954f5b..8139c52b39fc0 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multi_file_index_test.go @@ -8,7 +8,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestMultiIndex(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go index 52de31078673a..403443a805c0d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/multitenant.go @@ -7,8 +7,8 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // TenantLabel is part of the reserved label namespace (__ prefix) diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go b/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go index f5a009d37bc12..fd1ad97587638 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/pool.go @@ -3,7 +3,7 @@ package tsdb import ( "sync" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) var ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go b/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go index b1e3306b14d1f..48de47a70c3b4 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/querier.go @@ -22,7 +22,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // Bitmap used by func isRegexMetaCharacter to check whether a character needs to be escaped. diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go index a0873faeb6a20..a3c5caf5b81c4 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/querier_test.go @@ -10,8 +10,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func mustParseLabels(s string) labels.Labels { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go index 362665a022776..bca81214dc9ce 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/for_series.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) // General purpose iteration over series. Makes it easier to build custom functionality on top of indices diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go index 299cc21ea197b..257c198ee2d75 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power.go @@ -5,8 +5,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go index 5134b25c5cde0..940d6a53b554d 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/power_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) func TestGuessShardFactor(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go index 284468a9de315..b7f0e8cd46296 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding.go @@ -5,9 +5,9 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/queue" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/queue" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) var ( diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go index 49a3b12a8ff6e..fc476223848ae 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/sharding/sharding_test.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" ) func TestSizedFPs_Sort(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go index b7bff50e52d2d..7934b952ba88f 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ErrAlreadyOnDesiredVersion = errors.New("tsdb file already on desired version") diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go index 9d7b80ce161f6..068630c553a04 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/single_file_index_test.go @@ -12,11 +12,11 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/stores/index/seriesvolume" - "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" + "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestSingleIdx(t *testing.T) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go index 8f97997c5d401..1ef58c32a1e56 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/store.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/store.go @@ -13,14 +13,14 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/downloads" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/downloads" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type IndexWriter interface { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go b/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go index a5a3651a7a138..2a3a6ca57eeca 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/testutil/objstore.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" "github.com/thanos-io/objstore" - "github.com/grafana/loki/pkg/storage/bucket/filesystem" + "github.com/grafana/loki/v3/pkg/storage/bucket/filesystem" ) func PrepareFilesystemBucket(t testing.TB) (objstore.Bucket, string) { diff --git a/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go b/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go index 64827a926e466..10957a3510b84 100644 --- a/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go +++ b/pkg/storage/stores/shipper/indexshipper/tsdb/util_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type LoadableSeries struct { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go b/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go index 7cbe4b21b4d86..19bf88842b020 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/index_set.go @@ -11,10 +11,10 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type IndexSet interface { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go index d6d772734195c..cc9b65e28588b 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/index_set_test.go @@ -10,10 +10,10 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/testutil" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/testutil" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const userID = "user-id" diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table.go b/pkg/storage/stores/shipper/indexshipper/uploads/table.go index 2e092ce3727a2..44698834a344a 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table.go @@ -7,9 +7,9 @@ import ( "github.com/go-kit/log" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go index 6d9aa9e35d487..9fcdf43206c7e 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager.go @@ -9,8 +9,8 @@ import ( "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) type Config struct { diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go index 7013ff389c782..b307ee18d6b9a 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_manager_test.go @@ -10,9 +10,9 @@ import ( "github.com/go-kit/log" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" ) const objectsStorageDirName = "objects" diff --git a/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go b/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go index f67b3d3571c28..b58b05fa6ad9f 100644 --- a/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go +++ b/pkg/storage/stores/shipper/indexshipper/uploads/table_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/util/queries.go b/pkg/storage/stores/shipper/indexshipper/util/queries.go index 8da02c02d9794..46291a58a6f5b 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/queries.go +++ b/pkg/storage/stores/shipper/indexshipper/util/queries.go @@ -6,8 +6,8 @@ import ( "github.com/grafana/dskit/concurrency" - "github.com/grafana/loki/pkg/storage/stores/series/index" - util_math "github.com/grafana/loki/pkg/util/math" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + util_math "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/storage/stores/shipper/indexshipper/util/queries_test.go b/pkg/storage/stores/shipper/indexshipper/util/queries_test.go index 1968a2737a758..a33da42c264f0 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/queries_test.go +++ b/pkg/storage/stores/shipper/indexshipper/util/queries_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" ) type mockTableQuerier struct { diff --git a/pkg/storage/stores/shipper/indexshipper/util/util.go b/pkg/storage/stores/shipper/indexshipper/util/util.go index 9150ab34a0cbc..f47cea40d6d7d 100644 --- a/pkg/storage/stores/shipper/indexshipper/util/util.go +++ b/pkg/storage/stores/shipper/indexshipper/util/util.go @@ -9,7 +9,7 @@ import ( "go.etcd.io/bbolt" - "github.com/grafana/loki/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" ) const maxStackSize = 8 * 1024 diff --git a/pkg/storage/util_test.go b/pkg/storage/util_test.go index 12e8168473af9..7743bce2fb0f9 100644 --- a/pkg/storage/util_test.go +++ b/pkg/storage/util_test.go @@ -11,24 +11,24 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/syntax" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/querier/astmapper" - "github.com/grafana/loki/pkg/querier/plan" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/cache" - chunkclient "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/chunk/fetcher" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores" - index_stats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - loki_util "github.com/grafana/loki/pkg/util" - "github.com/grafana/loki/pkg/util/constants" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/querier/astmapper" + "github.com/grafana/loki/v3/pkg/querier/plan" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + chunkclient "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/chunk/fetcher" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores" + index_stats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + loki_util "github.com/grafana/loki/v3/pkg/util" + "github.com/grafana/loki/v3/pkg/util/constants" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/util/config.go b/pkg/util/config.go index f54d469690c98..89d586b37e13e 100644 --- a/pkg/util/config.go +++ b/pkg/util/config.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/common/version" "gopkg.in/yaml.v2" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // LogConfig takes a pointer to a config object, marshalls it to YAML and prints each line in REVERSE order diff --git a/pkg/util/deletion/deletion.go b/pkg/util/deletion/deletion.go index e90b6a4c2f073..fd97205a6bf45 100644 --- a/pkg/util/deletion/deletion.go +++ b/pkg/util/deletion/deletion.go @@ -1,10 +1,10 @@ package deletion import ( - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/syntax" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logql/syntax" ) func SetupPipeline(req logql.SelectLogParams, p log.Pipeline) (log.Pipeline, error) { diff --git a/pkg/util/dns_watcher.go b/pkg/util/dns_watcher.go index 147af45e19e76..9fa698e872c06 100644 --- a/pkg/util/dns_watcher.go +++ b/pkg/util/dns_watcher.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/dskit/services" "github.com/pkg/errors" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // Notifications about address resolution. All notifications are sent on the same goroutine. diff --git a/pkg/util/errors.go b/pkg/util/errors.go index cb994e1b46ffe..f05decc6223cd 100644 --- a/pkg/util/errors.go +++ b/pkg/util/errors.go @@ -10,7 +10,7 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/status" - "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/log" ) // LogError logs any error returned by f; useful when deferring Close etc. diff --git a/pkg/util/extract/extract.go b/pkg/util/extract/extract.go index ad0eab10dcfb4..580eba0e2d927 100644 --- a/pkg/util/extract/extract.go +++ b/pkg/util/extract/extract.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) var errNoMetricNameLabel = fmt.Errorf("No metric name label") diff --git a/pkg/util/flagext/labelset.go b/pkg/util/flagext/labelset.go index 859e7eb07246e..79a72c07739e0 100644 --- a/pkg/util/flagext/labelset.go +++ b/pkg/util/flagext/labelset.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // LabelSet is a labelSet that can be used as a flag. diff --git a/pkg/util/http_test.go b/pkg/util/http_test.go index ba365f777de8f..d032085db5028 100644 --- a/pkg/util/http_test.go +++ b/pkg/util/http_test.go @@ -15,9 +15,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) func TestRenderHTTPResponse(t *testing.T) { diff --git a/pkg/util/httpgrpc/carrier.go b/pkg/util/httpgrpc/carrier.go index ab1753ef6c271..6b160d615e1bc 100644 --- a/pkg/util/httpgrpc/carrier.go +++ b/pkg/util/httpgrpc/carrier.go @@ -4,7 +4,7 @@ import ( weaveworks_httpgrpc "github.com/grafana/dskit/httpgrpc" "github.com/opentracing/opentracing-go" - "github.com/grafana/loki/pkg/querier/queryrange" + "github.com/grafana/loki/v3/pkg/querier/queryrange" ) type Request interface { diff --git a/pkg/util/limiter/combined_limits.go b/pkg/util/limiter/combined_limits.go index ba0bf50e8003e..b1bc467e6cac4 100644 --- a/pkg/util/limiter/combined_limits.go +++ b/pkg/util/limiter/combined_limits.go @@ -1,17 +1,17 @@ package limiter import ( - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - querier_limits "github.com/grafana/loki/pkg/querier/limits" - queryrange_limits "github.com/grafana/loki/pkg/querier/queryrange/limits" - "github.com/grafana/loki/pkg/ruler" - scheduler_limits "github.com/grafana/loki/pkg/scheduler/limits" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + querier_limits "github.com/grafana/loki/v3/pkg/querier/limits" + queryrange_limits "github.com/grafana/loki/v3/pkg/querier/queryrange/limits" + "github.com/grafana/loki/v3/pkg/ruler" + scheduler_limits "github.com/grafana/loki/v3/pkg/scheduler/limits" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" ) type CombinedLimits interface { diff --git a/pkg/util/limiter/query_limiter.go b/pkg/util/limiter/query_limiter.go index a827ad82227a7..430eee3ebc8be 100644 --- a/pkg/util/limiter/query_limiter.go +++ b/pkg/util/limiter/query_limiter.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/common/model" "go.uber.org/atomic" - "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/logproto" ) type queryLimiterCtxKey struct{} diff --git a/pkg/util/log/experimental.go b/pkg/util/log/experimental.go index ed26c06af347d..30aa39c33f3a2 100644 --- a/pkg/util/log/experimental.go +++ b/pkg/util/log/experimental.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var experimentalFeaturesInUse = promauto.NewCounter( diff --git a/pkg/util/log/log.go b/pkg/util/log/log.go index 7453b615118a0..93ccb86abf57a 100644 --- a/pkg/util/log/log.go +++ b/pkg/util/log/log.go @@ -16,7 +16,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" ) var ( diff --git a/pkg/util/loser/tree_test.go b/pkg/util/loser/tree_test.go index 956b420f129e6..9c6f5f1c2e99a 100644 --- a/pkg/util/loser/tree_test.go +++ b/pkg/util/loser/tree_test.go @@ -4,7 +4,7 @@ import ( "math" "testing" - "github.com/grafana/loki/pkg/util/loser" + "github.com/grafana/loki/v3/pkg/util/loser" ) type List struct { diff --git a/pkg/util/marshal/labels.go b/pkg/util/marshal/labels.go index 8998f133b921a..016e4bad44bd5 100644 --- a/pkg/util/marshal/labels.go +++ b/pkg/util/marshal/labels.go @@ -3,7 +3,7 @@ package marshal import ( "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) // NewLabelSet constructs a Labelset from a promql metric list as a string diff --git a/pkg/util/marshal/labels_test.go b/pkg/util/marshal/labels_test.go index 8095ec3a1a908..b877e3ba675c0 100644 --- a/pkg/util/marshal/labels_test.go +++ b/pkg/util/marshal/labels_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/grafana/loki/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/loghttp" ) func TestNewLabelSet(t *testing.T) { diff --git a/pkg/util/marshal/legacy/marshal.go b/pkg/util/marshal/legacy/marshal.go index 867268f8e59ee..82dd100999bb8 100644 --- a/pkg/util/marshal/legacy/marshal.go +++ b/pkg/util/marshal/legacy/marshal.go @@ -9,9 +9,9 @@ import ( "github.com/gorilla/websocket" json "github.com/json-iterator/go" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // Note that the below methods directly marshal the values passed in. This is because these objects currently marshal diff --git a/pkg/util/marshal/legacy/marshal_test.go b/pkg/util/marshal/legacy/marshal_test.go index e146e9a99a5fa..b535a390479f9 100644 --- a/pkg/util/marshal/legacy/marshal_test.go +++ b/pkg/util/marshal/legacy/marshal_test.go @@ -9,9 +9,9 @@ import ( json "github.com/json-iterator/go" "github.com/stretchr/testify/require" - loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" + loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" ) // covers responses from /api/prom/query diff --git a/pkg/util/marshal/marshal.go b/pkg/util/marshal/marshal.go index 08c90a348c2b3..b720bfa557bd1 100644 --- a/pkg/util/marshal/marshal.go +++ b/pkg/util/marshal/marshal.go @@ -11,14 +11,14 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - indexStats "github.com/grafana/loki/pkg/storage/stores/index/stats" - "github.com/grafana/loki/pkg/util/httpreq" - marshal_legacy "github.com/grafana/loki/pkg/util/marshal/legacy" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + indexStats "github.com/grafana/loki/v3/pkg/storage/stores/index/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" + marshal_legacy "github.com/grafana/loki/v3/pkg/util/marshal/legacy" ) func WriteResponseJSON(r *http.Request, v any, w http.ResponseWriter) error { diff --git a/pkg/util/marshal/marshal_test.go b/pkg/util/marshal/marshal_test.go index 7917be41dae76..ba2acf5ee85ef 100644 --- a/pkg/util/marshal/marshal_test.go +++ b/pkg/util/marshal/marshal_test.go @@ -15,12 +15,12 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) const emptyStats = `{ diff --git a/pkg/util/marshal/query.go b/pkg/util/marshal/query.go index 8f41915c720a8..4591d851c553e 100644 --- a/pkg/util/marshal/query.go +++ b/pkg/util/marshal/query.go @@ -12,12 +12,12 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logqlmodel" - "github.com/grafana/loki/pkg/logqlmodel/stats" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logqlmodel" + "github.com/grafana/loki/v3/pkg/logqlmodel/stats" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) // NewResultValue constructs a ResultValue from a promql.Value diff --git a/pkg/util/marshal/tail.go b/pkg/util/marshal/tail.go index 222b76c046b7d..4dbada1d83816 100644 --- a/pkg/util/marshal/tail.go +++ b/pkg/util/marshal/tail.go @@ -1,8 +1,8 @@ package marshal import ( - "github.com/grafana/loki/pkg/loghttp" - legacy "github.com/grafana/loki/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy "github.com/grafana/loki/v3/pkg/loghttp/legacy" ) // NewDroppedStream constructs a DroppedStream from a legacy.DroppedEntry diff --git a/pkg/util/metrics_helper.go b/pkg/util/metrics_helper.go index 58733c1c1e9ea..e4572b4e4a15c 100644 --- a/pkg/util/metrics_helper.go +++ b/pkg/util/metrics_helper.go @@ -13,7 +13,7 @@ import ( "github.com/prometheus/prometheus/model/labels" tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) var ( diff --git a/pkg/util/querylimits/limiter.go b/pkg/util/querylimits/limiter.go index 051e31270f137..9510a3fdc8250 100644 --- a/pkg/util/querylimits/limiter.go +++ b/pkg/util/querylimits/limiter.go @@ -8,8 +8,8 @@ import ( "github.com/go-kit/log" "github.com/go-kit/log/level" - "github.com/grafana/loki/pkg/util/limiter" - logutil "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/limiter" + logutil "github.com/grafana/loki/v3/pkg/util/log" ) type Limiter struct { diff --git a/pkg/util/querylimits/limiter_test.go b/pkg/util/querylimits/limiter_test.go index ad80fa34ec186..549972d32a2e9 100644 --- a/pkg/util/querylimits/limiter_test.go +++ b/pkg/util/querylimits/limiter_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/validation" ) type mockTenantLimits struct { diff --git a/pkg/util/querylimits/middleware.go b/pkg/util/querylimits/middleware.go index 58a93ad850f6b..a25d53949ba1d 100644 --- a/pkg/util/querylimits/middleware.go +++ b/pkg/util/querylimits/middleware.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" "github.com/grafana/dskit/middleware" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type queryLimitsMiddleware struct { diff --git a/pkg/util/querylimits/propagation.go b/pkg/util/querylimits/propagation.go index f0e5fbc8f6b49..a9cb06e347ca6 100644 --- a/pkg/util/querylimits/propagation.go +++ b/pkg/util/querylimits/propagation.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) // Context key type used to avoid collisions diff --git a/pkg/util/ring/ring.go b/pkg/util/ring/ring.go index bc8b2576984ea..27de64642497e 100644 --- a/pkg/util/ring/ring.go +++ b/pkg/util/ring/ring.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/dskit/ring" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) // TokenFor generates a token used for finding ingesters from ring diff --git a/pkg/util/ring/ring_config.go b/pkg/util/ring/ring_config.go index 779c40f4dad5c..d64bea1759cc9 100644 --- a/pkg/util/ring/ring_config.go +++ b/pkg/util/ring/ring_config.go @@ -15,8 +15,8 @@ import ( "github.com/grafana/dskit/netutil" "github.com/grafana/dskit/ring" - util_flagext "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" + util_flagext "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // RingConfig masks the ring lifecycler config which contains diff --git a/pkg/util/server/error.go b/pkg/util/server/error.go index 65cb430bb3f32..c120a79176f85 100644 --- a/pkg/util/server/error.go +++ b/pkg/util/server/error.go @@ -14,9 +14,9 @@ import ( "github.com/gogo/googleapis/google/rpc" "github.com/gogo/status" - "github.com/grafana/loki/pkg/logqlmodel" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/util" ) // StatusClientClosedRequest is the status code for when a client request cancellation of an http request diff --git a/pkg/util/server/error_test.go b/pkg/util/server/error_test.go index 47b2453f14925..69f2bff163c6c 100644 --- a/pkg/util/server/error_test.go +++ b/pkg/util/server/error_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/codes" - "github.com/grafana/loki/pkg/logqlmodel" - storage_errors "github.com/grafana/loki/pkg/storage/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/logqlmodel" + storage_errors "github.com/grafana/loki/v3/pkg/storage/errors" + "github.com/grafana/loki/v3/pkg/util" ) func Test_writeError(t *testing.T) { diff --git a/pkg/util/server/grpc_headers.go b/pkg/util/server/grpc_headers.go index 3b205a73d10d2..27f5bb9a75d62 100644 --- a/pkg/util/server/grpc_headers.go +++ b/pkg/util/server/grpc_headers.go @@ -6,7 +6,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func injectHTTPHeadersIntoGRPCRequest(ctx context.Context) context.Context { diff --git a/pkg/util/server/grpc_headers_test.go b/pkg/util/server/grpc_headers_test.go index db222451f4bde..1c0e728659b83 100644 --- a/pkg/util/server/grpc_headers_test.go +++ b/pkg/util/server/grpc_headers_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func TestInjectHTTPHeaderIntoGRPCRequest(t *testing.T) { diff --git a/pkg/util/server/grpc_query_tags.go b/pkg/util/server/grpc_query_tags.go index d5d6e58d2c938..cfd5e95ff23df 100644 --- a/pkg/util/server/grpc_query_tags.go +++ b/pkg/util/server/grpc_query_tags.go @@ -6,7 +6,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func getQueryTags(ctx context.Context) string { diff --git a/pkg/util/server/grpc_query_tags_test.go b/pkg/util/server/grpc_query_tags_test.go index ae718178caa00..733cd488dce7a 100644 --- a/pkg/util/server/grpc_query_tags_test.go +++ b/pkg/util/server/grpc_query_tags_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/util/httpreq" + "github.com/grafana/loki/v3/pkg/util/httpreq" ) func TestInjectQueryTagsIntoGRPCRequest(t *testing.T) { diff --git a/pkg/util/server/recovery.go b/pkg/util/server/recovery.go index 713d77e44a306..ce3ad109512b7 100644 --- a/pkg/util/server/recovery.go +++ b/pkg/util/server/recovery.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/util/constants" ) const maxStacksize = 8 * 1024 diff --git a/pkg/util/server/recovery_test.go b/pkg/util/server/recovery_test.go index 3a98b01b1beb4..a8d1d3f1b6b9d 100644 --- a/pkg/util/server/recovery_test.go +++ b/pkg/util/server/recovery_test.go @@ -10,7 +10,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "github.com/grafana/loki/pkg/querier/queryrange/queryrangebase" + "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" ) func Test_onPanic(t *testing.T) { diff --git a/pkg/util/spanlogger/spanlogger.go b/pkg/util/spanlogger/spanlogger.go index dbbf7679f7c7c..03ba8ab06e2d5 100644 --- a/pkg/util/spanlogger/spanlogger.go +++ b/pkg/util/spanlogger/spanlogger.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/dskit/spanlogger" "github.com/grafana/dskit/tenant" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/pkg/util/time.go b/pkg/util/time.go index b943fea92aad8..9de06f381c88c 100644 --- a/pkg/util/time.go +++ b/pkg/util/time.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/dskit/httpgrpc" "github.com/prometheus/common/model" - utilsMath "github.com/grafana/loki/pkg/util/math" + utilsMath "github.com/grafana/loki/v3/pkg/util/math" ) const ( diff --git a/pkg/util/unmarshal/legacy/unmarshal.go b/pkg/util/unmarshal/legacy/unmarshal.go index 0f62b6df04c5f..5a58ca6bbfed1 100644 --- a/pkg/util/unmarshal/legacy/unmarshal.go +++ b/pkg/util/unmarshal/legacy/unmarshal.go @@ -5,7 +5,7 @@ import ( json "github.com/json-iterator/go" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // DecodePushRequest directly decodes json to a logproto.PushRequest diff --git a/pkg/util/unmarshal/legacy/unmarshal_test.go b/pkg/util/unmarshal/legacy/unmarshal_test.go index 780a5ed268ed7..dc51815890dff 100644 --- a/pkg/util/unmarshal/legacy/unmarshal_test.go +++ b/pkg/util/unmarshal/legacy/unmarshal_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logproto" ) // covers requests to /api/prom/push diff --git a/pkg/util/unmarshal/unmarshal.go b/pkg/util/unmarshal/unmarshal.go index 51e7d1108d9d9..4b048d7089c65 100644 --- a/pkg/util/unmarshal/unmarshal.go +++ b/pkg/util/unmarshal/unmarshal.go @@ -6,8 +6,8 @@ import ( jsoniter "github.com/json-iterator/go" - "github.com/grafana/loki/pkg/loghttp" - "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/v3/pkg/loghttp" + "github.com/grafana/loki/v3/pkg/logproto" ) // DecodePushRequest directly decodes json to a logproto.PushRequest diff --git a/pkg/util/unmarshal/unmarshal_test.go b/pkg/util/unmarshal/unmarshal_test.go index 93372f62ebef1..3ae76e1d9fbb0 100644 --- a/pkg/util/unmarshal/unmarshal_test.go +++ b/pkg/util/unmarshal/unmarshal_test.go @@ -10,10 +10,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/loghttp" - legacy_loghttp "github.com/grafana/loki/pkg/loghttp/legacy" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/util/marshal" + "github.com/grafana/loki/v3/pkg/loghttp" + legacy_loghttp "github.com/grafana/loki/v3/pkg/loghttp/legacy" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/util/marshal" ) func Test_DecodePushRequest(t *testing.T) { diff --git a/pkg/util/validation/notifications_limit_flag.go b/pkg/util/validation/notifications_limit_flag.go index 1b8524c1ab5b1..f05e634e87569 100644 --- a/pkg/util/validation/notifications_limit_flag.go +++ b/pkg/util/validation/notifications_limit_flag.go @@ -6,7 +6,7 @@ import ( "github.com/pkg/errors" - "github.com/grafana/loki/pkg/util" + "github.com/grafana/loki/v3/pkg/util" ) var allowedIntegrationNames = []string{ diff --git a/pkg/validation/exporter.go b/pkg/validation/exporter.go index ad9dde8574dd0..78cd13a1dad84 100644 --- a/pkg/validation/exporter.go +++ b/pkg/validation/exporter.go @@ -7,7 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/flagext" ) type ExportedLimits interface { diff --git a/pkg/validation/limits.go b/pkg/validation/limits.go index a004b8eb94f89..ed8f508447683 100644 --- a/pkg/validation/limits.go +++ b/pkg/validation/limits.go @@ -19,18 +19,18 @@ import ( "golang.org/x/time/rate" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/distributor/shardstreams" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logql" - "github.com/grafana/loki/pkg/logql/syntax" - ruler_config "github.com/grafana/loki/pkg/ruler/config" - "github.com/grafana/loki/pkg/ruler/util" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" - "github.com/grafana/loki/pkg/util/flagext" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/distributor/shardstreams" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logql" + "github.com/grafana/loki/v3/pkg/logql/syntax" + ruler_config "github.com/grafana/loki/v3/pkg/ruler/config" + "github.com/grafana/loki/v3/pkg/ruler/util" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/sharding" + "github.com/grafana/loki/v3/pkg/util/flagext" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/util/validation" ) const ( diff --git a/pkg/validation/limits_test.go b/pkg/validation/limits_test.go index d8527bcdaa59c..598a6f9033cde 100644 --- a/pkg/validation/limits_test.go +++ b/pkg/validation/limits_test.go @@ -12,10 +12,10 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/compactor/deletionmode" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/compactor/deletionmode" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/logql" ) func TestLimitsTagsYamlMatchJson(t *testing.T) { diff --git a/pkg/validation/validate.go b/pkg/validation/validate.go index 09c444aa64987..4b02505b98e54 100644 --- a/pkg/validation/validate.go +++ b/pkg/validation/validate.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" - "github.com/grafana/loki/pkg/util/flagext" + "github.com/grafana/loki/v3/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/flagext" ) const ( diff --git a/tools/bloom/inspector/main.go b/tools/bloom/inspector/main.go index 36d1523714292..dfcc7c79cd86d 100644 --- a/tools/bloom/inspector/main.go +++ b/tools/bloom/inspector/main.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" ) func main() { diff --git a/tools/deprecated-config-checker/main.go b/tools/deprecated-config-checker/main.go index 6ce387c1eec88..61683333899ad 100644 --- a/tools/deprecated-config-checker/main.go +++ b/tools/deprecated-config-checker/main.go @@ -7,7 +7,7 @@ import ( "github.com/fatih/color" - "github.com/grafana/loki/tools/deprecated-config-checker/checker" + "github.com/grafana/loki/v3/tools/deprecated-config-checker/checker" ) const upgradeGuideURL = "https://grafana.com/docs/loki/latest/setup/upgrade/" diff --git a/tools/doc-generator/main.go b/tools/doc-generator/main.go index 3d28f91eaa29c..c2748cee925e4 100644 --- a/tools/doc-generator/main.go +++ b/tools/doc-generator/main.go @@ -13,8 +13,8 @@ import ( "strings" "text/template" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/tools/doc-generator/parse" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/tools/doc-generator/parse" ) const ( diff --git a/tools/doc-generator/parse/parser.go b/tools/doc-generator/parse/parser.go index d5896a0666bf5..f565bf2dc9c90 100644 --- a/tools/doc-generator/parse/parser.go +++ b/tools/doc-generator/parse/parser.go @@ -22,10 +22,10 @@ import ( prometheus_config "github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/model/relabel" - "github.com/grafana/loki/pkg/ruler/util" - storage_config "github.com/grafana/loki/pkg/storage/config" - util_validation "github.com/grafana/loki/pkg/util/validation" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/ruler/util" + storage_config "github.com/grafana/loki/v3/pkg/storage/config" + util_validation "github.com/grafana/loki/v3/pkg/util/validation" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/tools/doc-generator/parse/root_blocks.go b/tools/doc-generator/parse/root_blocks.go index 25a4e785ded63..37debb7c41765 100644 --- a/tools/doc-generator/parse/root_blocks.go +++ b/tools/doc-generator/parse/root_blocks.go @@ -13,36 +13,36 @@ import ( "github.com/grafana/dskit/runtimeconfig" "github.com/grafana/dskit/server" - "github.com/grafana/loki/pkg/analytics" - "github.com/grafana/loki/pkg/bloomcompactor" - "github.com/grafana/loki/pkg/bloomgateway" - "github.com/grafana/loki/pkg/compactor" - "github.com/grafana/loki/pkg/distributor" - "github.com/grafana/loki/pkg/ingester" - ingester_client "github.com/grafana/loki/pkg/ingester/client" - "github.com/grafana/loki/pkg/loghttp/push" - "github.com/grafana/loki/pkg/loki/common" - frontend "github.com/grafana/loki/pkg/lokifrontend" - "github.com/grafana/loki/pkg/querier" - "github.com/grafana/loki/pkg/querier/queryrange" - querier_worker "github.com/grafana/loki/pkg/querier/worker" - "github.com/grafana/loki/pkg/ruler" - "github.com/grafana/loki/pkg/scheduler" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/cache" - "github.com/grafana/loki/pkg/storage/chunk/client/alibaba" - "github.com/grafana/loki/pkg/storage/chunk/client/aws" - "github.com/grafana/loki/pkg/storage/chunk/client/azure" - "github.com/grafana/loki/pkg/storage/chunk/client/baidubce" - "github.com/grafana/loki/pkg/storage/chunk/client/gcp" - "github.com/grafana/loki/pkg/storage/chunk/client/ibmcloud" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/chunk/client/openstack" - storage_config "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/series/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/indexgateway" - "github.com/grafana/loki/pkg/tracing" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/analytics" + "github.com/grafana/loki/v3/pkg/bloomcompactor" + "github.com/grafana/loki/v3/pkg/bloomgateway" + "github.com/grafana/loki/v3/pkg/compactor" + "github.com/grafana/loki/v3/pkg/distributor" + "github.com/grafana/loki/v3/pkg/ingester" + ingester_client "github.com/grafana/loki/v3/pkg/ingester/client" + "github.com/grafana/loki/v3/pkg/loghttp/push" + "github.com/grafana/loki/v3/pkg/loki/common" + frontend "github.com/grafana/loki/v3/pkg/lokifrontend" + "github.com/grafana/loki/v3/pkg/querier" + "github.com/grafana/loki/v3/pkg/querier/queryrange" + querier_worker "github.com/grafana/loki/v3/pkg/querier/worker" + "github.com/grafana/loki/v3/pkg/ruler" + "github.com/grafana/loki/v3/pkg/scheduler" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/cache" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/alibaba" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/aws" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/azure" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/baidubce" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/gcp" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/ibmcloud" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/openstack" + storage_config "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/series/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" + "github.com/grafana/loki/v3/pkg/tracing" + "github.com/grafana/loki/v3/pkg/validation" ) var ( diff --git a/tools/doc-generator/writer.go b/tools/doc-generator/writer.go index f182d0a8600c5..7a04c891ed2bc 100644 --- a/tools/doc-generator/writer.go +++ b/tools/doc-generator/writer.go @@ -15,7 +15,7 @@ import ( "github.com/mitchellh/go-wordwrap" "gopkg.in/yaml.v3" - "github.com/grafana/loki/tools/doc-generator/parse" + "github.com/grafana/loki/v3/tools/doc-generator/parse" ) type specWriter struct { diff --git a/tools/querytee/response_comparator.go b/tools/querytee/response_comparator.go index a6cb8544fa081..04a28fff85c1d 100644 --- a/tools/querytee/response_comparator.go +++ b/tools/querytee/response_comparator.go @@ -12,8 +12,8 @@ import ( "github.com/pkg/errors" "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/loghttp" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/loghttp" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // SamplesComparatorFunc helps with comparing different types of samples coming from /api/v1/query and /api/v1/query_range routes. diff --git a/tools/tsdb/bloom-tester/concurrent.go b/tools/tsdb/bloom-tester/concurrent.go index c42d403809aea..a6a2382a2a4ad 100644 --- a/tools/tsdb/bloom-tester/concurrent.go +++ b/tools/tsdb/bloom-tester/concurrent.go @@ -4,7 +4,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) type pool struct { diff --git a/tools/tsdb/bloom-tester/lib.go b/tools/tsdb/bloom-tester/lib.go index c7608f0046bb2..0d3a505668047 100644 --- a/tools/tsdb/bloom-tester/lib.go +++ b/tools/tsdb/bloom-tester/lib.go @@ -7,8 +7,8 @@ import ( "flag" "fmt" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" "hash/fnv" "math" @@ -23,17 +23,17 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/storage" - bt "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/storage" + bt "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) const ( diff --git a/tools/tsdb/bloom-tester/main.go b/tools/tsdb/bloom-tester/main.go index 916796b917042..ab5b9dfbcf1c1 100644 --- a/tools/tsdb/bloom-tester/main.go +++ b/tools/tsdb/bloom-tester/main.go @@ -7,7 +7,7 @@ import ( "github.com/go-kit/log/level" - util_log "github.com/grafana/loki/pkg/util/log" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) // go build ./tools/tsdb/bloom-tester && HOSTNAME="bloom-tester-121" NUM_TESTERS="128" BUCKET="19625" DIR=/Users/progers/dev/bloom WRITE_MODE="false" BUCKET_PREFIX="new-experiments" ./tools/tsdb/bloom-tester/bloom-tester --config.file=/Users/progers/dev/bloom/config.yaml diff --git a/tools/tsdb/bloom-tester/metrics.go b/tools/tsdb/bloom-tester/metrics.go index cc0b0b345b7ff..3eea766b95f51 100644 --- a/tools/tsdb/bloom-tester/metrics.go +++ b/tools/tsdb/bloom-tester/metrics.go @@ -4,9 +4,9 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" ) type Experiment struct { diff --git a/tools/tsdb/bloom-tester/readlib.go b/tools/tsdb/bloom-tester/readlib.go index e2a21754e865c..5886b13e2a9d2 100644 --- a/tools/tsdb/bloom-tester/readlib.go +++ b/tools/tsdb/bloom-tester/readlib.go @@ -7,13 +7,13 @@ import ( "github.com/grafana/dskit/services" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" - bt "github.com/grafana/loki/pkg/storage/bloom/v1" - "github.com/grafana/loki/pkg/storage/bloom/v1/filter" - "github.com/grafana/loki/pkg/storage/chunk" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" + bt "github.com/grafana/loki/v3/pkg/storage/bloom/v1" + "github.com/grafana/loki/v3/pkg/storage/bloom/v1/filter" + "github.com/grafana/loki/v3/pkg/storage/chunk" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" "math" "os" @@ -27,18 +27,18 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" //indexshipper_index "github.com/grafana/loki/pkg/storage/stores/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" //"github.com/grafana/loki/pkg/storage/stores/tsdb" //"github.com/grafana/loki/pkg/storage/stores/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) var queryExperiments = []QueryExperiment{ diff --git a/tools/tsdb/bloom-tester/tokenizer.go b/tools/tsdb/bloom-tester/tokenizer.go index 3d82c770020ef..c89844240455b 100644 --- a/tools/tsdb/bloom-tester/tokenizer.go +++ b/tools/tsdb/bloom-tester/tokenizer.go @@ -5,22 +5,22 @@ import ( "math" "time" - v1 "github.com/grafana/loki/pkg/storage/bloom/v1" + v1 "github.com/grafana/loki/v3/pkg/storage/bloom/v1" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/grafana/loki/pkg/util/constants" + "github.com/grafana/loki/v3/pkg/util/constants" "github.com/go-kit/log/level" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/logproto" + "github.com/grafana/loki/v3/pkg/logql/log" - "github.com/grafana/loki/pkg/storage/chunk" - "github.com/grafana/loki/pkg/util/encoding" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage/chunk" + "github.com/grafana/loki/v3/pkg/util/encoding" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) type metrics struct { diff --git a/tools/tsdb/helpers/setup.go b/tools/tsdb/helpers/setup.go index a0eb490269fc9..807adb4dd9897 100644 --- a/tools/tsdb/helpers/setup.go +++ b/tools/tsdb/helpers/setup.go @@ -12,14 +12,14 @@ import ( "github.com/prometheus/client_golang/prometheus/collectors" "github.com/prometheus/client_golang/prometheus/collectors/version" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/pkg/validation" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/pkg/validation" ) func Setup() (loki.Config, services.Service, string, error) { diff --git a/tools/tsdb/helpers/util.go b/tools/tsdb/helpers/util.go index b30e9e9b1f5cc..8cc0602045b08 100644 --- a/tools/tsdb/helpers/util.go +++ b/tools/tsdb/helpers/util.go @@ -12,8 +12,8 @@ import ( "github.com/prometheus/common/model" - "github.com/grafana/loki/pkg/storage/chunk/client" - "github.com/grafana/loki/pkg/storage/config" + "github.com/grafana/loki/v3/pkg/storage/chunk/client" + "github.com/grafana/loki/v3/pkg/storage/config" ) const ( diff --git a/tools/tsdb/index-analyzer/analytics.go b/tools/tsdb/index-analyzer/analytics.go index d9baeedc69533..de01d47d6ec00 100644 --- a/tools/tsdb/index-analyzer/analytics.go +++ b/tools/tsdb/index-analyzer/analytics.go @@ -7,10 +7,10 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/model/labels" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - tsdb_index "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + tsdb_index "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func analyze(indexShipper indexshipper.IndexShipper, tableName string, tenants []string) error { diff --git a/tools/tsdb/index-analyzer/main.go b/tools/tsdb/index-analyzer/main.go index fd59bd4792fdf..2d19ad9c3c421 100644 --- a/tools/tsdb/index-analyzer/main.go +++ b/tools/tsdb/index-analyzer/main.go @@ -5,11 +5,11 @@ import ( "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - util_log "github.com/grafana/loki/pkg/util/log" - "github.com/grafana/loki/tools/tsdb/helpers" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + util_log "github.com/grafana/loki/v3/pkg/util/log" + "github.com/grafana/loki/v3/tools/tsdb/helpers" ) // go build ./tools/tsdb/index-analyzer && BUCKET=19453 DIR=/tmp/loki-index-analysis ./index-analyzer --config.file=/tmp/loki-config.yaml diff --git a/tools/tsdb/migrate-versions/main.go b/tools/tsdb/migrate-versions/main.go index b458c80d4c1b8..2c49906a56452 100644 --- a/tools/tsdb/migrate-versions/main.go +++ b/tools/tsdb/migrate-versions/main.go @@ -17,17 +17,17 @@ import ( "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" - "github.com/grafana/loki/pkg/chunkenc" - "github.com/grafana/loki/pkg/loki" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/util" - "github.com/grafana/loki/pkg/storage/config" - shipperindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/index" - shipperstorage "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - tsdbindex "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/util/cfg" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/chunkenc" + "github.com/grafana/loki/v3/pkg/loki" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/util" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/index" + shipperstorage "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + tsdbindex "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/util/cfg" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/tools/tsdb/migrate-versions/main_test.go b/tools/tsdb/migrate-versions/main_test.go index 2f4690fde0a7e..7ac68521545bd 100644 --- a/tools/tsdb/migrate-versions/main_test.go +++ b/tools/tsdb/migrate-versions/main_test.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage" - "github.com/grafana/loki/pkg/storage/chunk/client/local" - "github.com/grafana/loki/pkg/storage/config" - shipperstorage "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/storage" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - util_log "github.com/grafana/loki/pkg/util/log" + "github.com/grafana/loki/v3/pkg/storage" + "github.com/grafana/loki/v3/pkg/storage/chunk/client/local" + "github.com/grafana/loki/v3/pkg/storage/config" + shipperstorage "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/storage" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) const ( diff --git a/tools/tsdb/tsdb-map/main.go b/tools/tsdb/tsdb-map/main.go index 7748b48c800c2..0a72ac98db13d 100644 --- a/tools/tsdb/tsdb-map/main.go +++ b/tools/tsdb/tsdb-map/main.go @@ -11,12 +11,12 @@ import ( "go.etcd.io/bbolt" "gopkg.in/yaml.v2" - "github.com/grafana/loki/pkg/compactor/retention" - "github.com/grafana/loki/pkg/storage/config" - boltdbcompactor "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/util" + "github.com/grafana/loki/v3/pkg/compactor/retention" + "github.com/grafana/loki/v3/pkg/storage/config" + boltdbcompactor "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/boltdb/compactor" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/util" ) var ( diff --git a/tools/tsdb/tsdb-map/main_test.go b/tools/tsdb/tsdb-map/main_test.go index bf8c802db8456..56fdcdbc3b255 100644 --- a/tools/tsdb/tsdb-map/main_test.go +++ b/tools/tsdb/tsdb-map/main_test.go @@ -11,8 +11,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb" - "github.com/grafana/loki/pkg/storage/stores/shipper/indexshipper/tsdb/index" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb" + "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/tsdb/index" ) func TestExtractChecksum(t *testing.T) { diff --git a/vendor/github.com/grafana/loki/pkg/push/LICENSE b/vendor/github.com/grafana/loki/v3/pkg/push/LICENSE similarity index 100% rename from vendor/github.com/grafana/loki/pkg/push/LICENSE rename to vendor/github.com/grafana/loki/v3/pkg/push/LICENSE diff --git a/vendor/github.com/grafana/loki/pkg/push/push.pb.go b/vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go similarity index 92% rename from vendor/github.com/grafana/loki/pkg/push/push.pb.go rename to vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go index 3b07d850ff162..7979872929611 100644 --- a/vendor/github.com/grafana/loki/pkg/push/push.pb.go +++ b/vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go @@ -296,40 +296,41 @@ func init() { func init() { proto.RegisterFile("pkg/push/push.proto", fileDescriptor_35ec442956852c9e) } var fileDescriptor_35ec442956852c9e = []byte{ - // 527 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x40, - 0x10, 0xf5, 0x26, 0x6e, 0xda, 0x6e, 0x4a, 0xa9, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x47, 0x16, 0x87, - 0x1c, 0xc0, 0x96, 0xc2, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x20, 0x71, - 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0x6b, 0xa4, 0xde, 0xf8, 0x84, 0xf2, 0x17, 0x7c, - 0x01, 0xdf, 0xd0, 0x63, 0x8e, 0x15, 0x07, 0x43, 0x9c, 0x0b, 0xca, 0xa9, 0x9f, 0x80, 0xbc, 0xb6, - 0x49, 0x28, 0x48, 0x5c, 0x36, 0x6f, 0x66, 0x67, 0xde, 0x7b, 0x99, 0x1d, 0xc3, 0x07, 0xd9, 0x45, - 0xe0, 0x66, 0xb9, 0x08, 0xd5, 0xe1, 0x64, 0x9c, 0x49, 0x86, 0xb6, 0x62, 0x16, 0x28, 0x64, 0xee, - 0x07, 0x2c, 0x60, 0x0a, 0xba, 0x15, 0xaa, 0xef, 0x4d, 0x2b, 0x60, 0x2c, 0x88, 0xa9, 0xab, 0xa2, - 0x49, 0x7e, 0xee, 0xca, 0x28, 0xa1, 0x42, 0x92, 0x24, 0xab, 0x0b, 0xec, 0x77, 0xb0, 0x7f, 0x9a, - 0x8b, 0xd0, 0xa7, 0x1f, 0x72, 0x2a, 0x24, 0x3a, 0x86, 0x9b, 0x42, 0x72, 0x4a, 0x12, 0x61, 0x80, - 0x41, 0x77, 0xd8, 0x1f, 0x3d, 0x74, 0x5a, 0x05, 0xe7, 0xb5, 0xba, 0x18, 0x4f, 0x49, 0x26, 0x29, - 0xf7, 0x0e, 0xbe, 0x15, 0x56, 0xaf, 0x4e, 0x2d, 0x0b, 0xab, 0xed, 0xf2, 0x5b, 0x60, 0xef, 0xc2, - 0x9d, 0x9a, 0x58, 0x64, 0x2c, 0x15, 0xd4, 0xfe, 0x0c, 0xe0, 0xbd, 0x3f, 0x18, 0x90, 0x0d, 0x7b, - 0x31, 0x99, 0xd0, 0xb8, 0x92, 0x02, 0xc3, 0x6d, 0x0f, 0x2e, 0x0b, 0xab, 0xc9, 0xf8, 0xcd, 0x2f, - 0x1a, 0xc3, 0x4d, 0x9a, 0x4a, 0x1e, 0x51, 0x61, 0x74, 0x94, 0x9f, 0xc3, 0x95, 0x9f, 0x97, 0xa9, - 0xe4, 0x97, 0xad, 0x9d, 0xfb, 0xd7, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, 0xe8, 0x11, - 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x3b, 0x00, 0x43, 0xdd, 0xdb, 0x58, 0x16, 0x16, 0x78, 0xea, 0xab, - 0x94, 0xfd, 0x02, 0xee, 0x9d, 0x54, 0x3a, 0xa7, 0x24, 0xe2, 0xad, 0x2b, 0x04, 0xf5, 0x94, 0x24, - 0xb4, 0xf6, 0xe4, 0x2b, 0x8c, 0xf6, 0xe1, 0xc6, 0x47, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, 0x64, 0x1d, - 0xd8, 0x5f, 0x3b, 0x70, 0x67, 0xdd, 0x03, 0x3a, 0x86, 0xdb, 0xbf, 0xc7, 0xab, 0xfa, 0xfb, 0x23, - 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xbb, 0x8d, 0xe5, 0x8e, 0x14, - 0x57, 0xdf, 0x2d, 0xe0, 0xaf, 0x9a, 0xd1, 0x11, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, 0xad, 0x65, - 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, 0xfa, 0x8a, - 0x4a, 0x32, 0x25, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x35, 0x9f, 0xbb, 0x7f, 0xcd, 0x7b, 0xdc, - 0x08, 0x1e, 0xfd, 0xdd, 0xfd, 0x84, 0x25, 0x91, 0xa4, 0x49, 0x26, 0x2f, 0xfd, 0x7f, 0x70, 0xa3, - 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x6a, 0xe8, 0xff, 0x55, 0x31, 0x1a, 0x95, 0xbd, 0xba, 0x63, - 0x8d, 0xb9, 0xe1, 0x18, 0x8d, 0x61, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, 0x08, 0x1d, - 0xac, 0xf8, 0xd6, 0xb6, 0xd1, 0x3c, 0xbc, 0x9b, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0xce, 0xe6, 0x58, - 0xbb, 0x99, 0x63, 0xed, 0x76, 0x8e, 0xc1, 0xa7, 0x12, 0x83, 0x2f, 0x25, 0x06, 0xd7, 0x25, 0x06, - 0xb3, 0x12, 0x83, 0x1f, 0x25, 0x06, 0x3f, 0x4b, 0xac, 0xdd, 0x96, 0x18, 0x5c, 0x2d, 0xb0, 0x36, - 0x5b, 0x60, 0xed, 0x66, 0x81, 0xb5, 0xf7, 0x83, 0x20, 0x92, 0x61, 0x3e, 0x71, 0xce, 0x58, 0xe2, - 0x06, 0x9c, 0x9c, 0x93, 0x94, 0xb8, 0x31, 0xbb, 0x88, 0xdc, 0xf6, 0xd3, 0x9a, 0xf4, 0x94, 0xda, - 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, 0xaa, 0x57, 0xd3, 0x6d, 0x03, 0x00, 0x00, + // 532 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xf6, 0x26, 0x6e, 0xda, 0x6e, 0xfa, 0xf7, 0xaf, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x57, 0x86, + 0x43, 0x0e, 0x60, 0x4b, 0xe9, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x42, + 0x70, 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0xeb, 0x4a, 0xbd, 0xf1, 0x08, 0xe5, 0x2d, + 0x78, 0x02, 0x9e, 0xa1, 0xc7, 0x1c, 0x2b, 0x0e, 0x86, 0x38, 0x17, 0x94, 0x53, 0x1f, 0x01, 0x79, + 0x6d, 0x93, 0x50, 0x90, 0xb8, 0x6c, 0xbe, 0x99, 0x9d, 0xf9, 0xbe, 0x2f, 0xb3, 0x63, 0xf8, 0x20, + 0xbb, 0x08, 0xdc, 0x2c, 0x17, 0xa1, 0x3a, 0x9c, 0x8c, 0x33, 0xc9, 0xd0, 0x46, 0xcc, 0x02, 0x85, + 0xcc, 0xdd, 0x80, 0x05, 0x4c, 0x41, 0xb7, 0x42, 0xf5, 0xbd, 0x69, 0x05, 0x8c, 0x05, 0x31, 0x75, + 0x55, 0x34, 0xce, 0xcf, 0x5d, 0x19, 0x25, 0x54, 0x48, 0x92, 0x64, 0x75, 0x81, 0xfd, 0x16, 0xf6, + 0x4f, 0x73, 0x11, 0xfa, 0xf4, 0x43, 0x4e, 0x85, 0x44, 0xc7, 0x70, 0x5d, 0x48, 0x4e, 0x49, 0x22, + 0x0c, 0x70, 0xd8, 0x1d, 0xf4, 0x87, 0x0f, 0x9d, 0x56, 0xc1, 0x79, 0xad, 0x2e, 0x46, 0x13, 0x92, + 0x49, 0xca, 0xbd, 0xbd, 0xaf, 0x85, 0xd5, 0xab, 0x53, 0x8b, 0xc2, 0x6a, 0xbb, 0xfc, 0x16, 0xd8, + 0xdb, 0x70, 0xab, 0x26, 0x16, 0x19, 0x4b, 0x05, 0xb5, 0x3f, 0x01, 0xf8, 0xdf, 0x6f, 0x0c, 0xc8, + 0x86, 0xbd, 0x98, 0x8c, 0x69, 0x5c, 0x49, 0x81, 0xc1, 0xa6, 0x07, 0x17, 0x85, 0xd5, 0x64, 0xfc, + 0xe6, 0x17, 0x8d, 0xe0, 0x3a, 0x4d, 0x25, 0x8f, 0xa8, 0x30, 0x3a, 0xca, 0xcf, 0xfe, 0xd2, 0xcf, + 0xcb, 0x54, 0xf2, 0xab, 0xd6, 0xce, 0xff, 0x37, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, + 0xe8, 0x11, 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x7b, 0x08, 0x06, 0xba, 0xb7, 0xb6, 0x28, 0x2c, 0xf0, + 0xcc, 0x57, 0x29, 0xfb, 0x05, 0xdc, 0x39, 0xa9, 0x74, 0x4e, 0x49, 0xc4, 0x5b, 0x57, 0x08, 0xea, + 0x29, 0x49, 0x68, 0xed, 0xc9, 0x57, 0x18, 0xed, 0xc2, 0xb5, 0x4b, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, + 0x64, 0x1d, 0xd8, 0x5f, 0x3a, 0x70, 0x6b, 0xd5, 0x03, 0x3a, 0x86, 0x9b, 0xbf, 0xc6, 0xab, 0xfa, + 0xfb, 0x43, 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xdb, 0x8d, 0xe5, + 0x8e, 0x14, 0xd7, 0xdf, 0x2c, 0xe0, 0x2f, 0x9b, 0xd1, 0x01, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, + 0x8d, 0x45, 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, + 0xf2, 0x8a, 0x4a, 0x32, 0x21, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x39, 0x9f, 0xfb, 0x7f, 0xcd, + 0x7b, 0xd2, 0x08, 0x1e, 0xfc, 0xd9, 0xfd, 0x94, 0x25, 0x91, 0xa4, 0x49, 0x26, 0xaf, 0xfc, 0xbf, + 0x70, 0xa3, 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x62, 0xe8, 0xff, 0x54, 0x31, 0x1a, 0x95, 0x9d, + 0xba, 0x63, 0x85, 0xb9, 0xe1, 0x18, 0x8e, 0x60, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, + 0x08, 0xed, 0x2d, 0xf9, 0x56, 0xb6, 0xd1, 0xdc, 0xbf, 0x9f, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0x4d, + 0x67, 0x58, 0xbb, 0x9d, 0x61, 0xed, 0x6e, 0x86, 0xc1, 0xc7, 0x12, 0x83, 0xcf, 0x25, 0x06, 0x37, + 0x25, 0x06, 0xd3, 0x12, 0x83, 0xef, 0x25, 0x06, 0x3f, 0x4a, 0xac, 0xdd, 0x95, 0x18, 0x5c, 0xcf, + 0xb1, 0x36, 0x9d, 0x63, 0xed, 0x76, 0x8e, 0xb5, 0xf7, 0x8f, 0x83, 0x48, 0x86, 0xf9, 0xd8, 0x39, + 0x63, 0x89, 0x1b, 0x70, 0x72, 0x4e, 0x52, 0xe2, 0xc6, 0xec, 0x22, 0x72, 0x2f, 0x8f, 0xdc, 0xf6, + 0xeb, 0x1a, 0xf7, 0x94, 0xe0, 0xd1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xe6, 0x29, 0x58, + 0x70, 0x03, 0x00, 0x00, } func (this *PushRequest) Equal(that interface{}) bool { diff --git a/vendor/github.com/grafana/loki/pkg/push/push.proto b/vendor/github.com/grafana/loki/v3/pkg/push/push.proto similarity index 95% rename from vendor/github.com/grafana/loki/pkg/push/push.proto rename to vendor/github.com/grafana/loki/v3/pkg/push/push.proto index 3bf8ad06a8a83..e538c66903eae 100644 --- a/vendor/github.com/grafana/loki/pkg/push/push.proto +++ b/vendor/github.com/grafana/loki/v3/pkg/push/push.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; -option go_package = "github.com/grafana/loki/pkg/push"; +option go_package = "github.com/grafana/loki/v3/pkg/push"; service Pusher { rpc Push(PushRequest) returns (PushResponse) {} diff --git a/vendor/github.com/grafana/loki/pkg/push/timestamp.go b/vendor/github.com/grafana/loki/v3/pkg/push/timestamp.go similarity index 100% rename from vendor/github.com/grafana/loki/pkg/push/timestamp.go rename to vendor/github.com/grafana/loki/v3/pkg/push/timestamp.go diff --git a/vendor/github.com/grafana/loki/pkg/push/types.go b/vendor/github.com/grafana/loki/v3/pkg/push/types.go similarity index 100% rename from vendor/github.com/grafana/loki/pkg/push/types.go rename to vendor/github.com/grafana/loki/v3/pkg/push/types.go diff --git a/vendor/modules.txt b/vendor/modules.txt index 9bbf3e0af8662..10293601308e8 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -935,9 +935,9 @@ github.com/grafana/gomemcache/memcache # github.com/grafana/jsonparser v0.0.0-20240209175146-098958973a2d ## explicit; go 1.13 github.com/grafana/jsonparser -# github.com/grafana/loki/pkg/push v0.0.0-20231124142027-e52380921608 => ./pkg/push +# github.com/grafana/loki/v3/pkg/push v0.0.0-20231124142027-e52380921608 => ./pkg/push ## explicit; go 1.19 -github.com/grafana/loki/pkg/push +github.com/grafana/loki/v3/pkg/push # github.com/grafana/pyroscope-go/godeltaprof v0.1.6 ## explicit; go 1.16 github.com/grafana/pyroscope-go/godeltaprof @@ -2266,4 +2266,4 @@ sigs.k8s.io/yaml # github.com/gocql/gocql => github.com/grafana/gocql v0.0.0-20200605141915-ba5dc39ece85 # github.com/hashicorp/memberlist => github.com/grafana/memberlist v0.3.1-0.20220714140823-09ffed8adbbe # github.com/grafana/regexp => github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd -# github.com/grafana/loki/pkg/push => ./pkg/push +# github.com/grafana/loki/v3/pkg/push => ./pkg/push From a53a0cc7921f910724e3b04193cab09fc759de07 Mon Sep 17 00:00:00 2001 From: Trevor Whitney Date: Tue, 2 Apr 2024 13:57:27 -0600 Subject: [PATCH 53/54] feat: return real data in detected fields endpoint (#12421) parse log lines in the querier to return data about detected fields. this is not sustainable long term, but a short term solution to validate the data we want for the frontend. --- pkg/loghttp/labels.go | 14 - pkg/loghttp/params.go | 23 ++ pkg/loghttp/query.go | 42 +++ pkg/logproto/logproto.pb.go | 516 +++++++++++++++++----------- pkg/logproto/logproto.proto | 7 +- pkg/querier/http.go | 3 + pkg/querier/multi_tenant_querier.go | 28 +- pkg/querier/querier.go | 238 ++++++++++++- pkg/querier/queryrange/codec.go | 23 +- pkg/querier/queryrange/marshal.go | 10 + pkg/querier/queryrange/roundtrip.go | 2 +- 11 files changed, 658 insertions(+), 248 deletions(-) diff --git a/pkg/loghttp/labels.go b/pkg/loghttp/labels.go index b725340880262..b2c5a343637be 100644 --- a/pkg/loghttp/labels.go +++ b/pkg/loghttp/labels.go @@ -99,17 +99,3 @@ func ParseDetectedLabelsQuery(r *http.Request) (*logproto.DetectedLabelsRequest, Query: query(r), }, nil } - -func ParseDetectedFieldsQuery(r *http.Request) (*logproto.DetectedFieldsRequest, error) { - req := &logproto.DetectedFieldsRequest{} - - start, end, err := bounds(r) - if err != nil { - return nil, err - } - req.Start = &start - req.End = &end - - req.Query = query(r) - return req, nil -} diff --git a/pkg/loghttp/params.go b/pkg/loghttp/params.go index 7b9a7cb1f597a..4f34992df592b 100644 --- a/pkg/loghttp/params.go +++ b/pkg/loghttp/params.go @@ -19,6 +19,7 @@ import ( const ( defaultQueryLimit = 100 + defaultFieldLimit = 1000 defaultSince = 1 * time.Hour defaultDirection = logproto.BACKWARD ) @@ -34,6 +35,28 @@ func limit(r *http.Request) (uint32, error) { return uint32(l), nil } +func lineLimit(r *http.Request) (uint32, error) { + l, err := parseInt(r.Form.Get("line_limit"), defaultQueryLimit) + if err != nil { + return 0, err + } + if l <= 0 { + return 0, errors.New("limit must be a positive value") + } + return uint32(l), nil +} + +func fieldLimit(r *http.Request) (uint32, error) { + l, err := parseInt(r.Form.Get("field_limit"), defaultFieldLimit) + if err != nil { + return 0, err + } + if l <= 0 { + return 0, errors.New("limit must be a positive value") + } + return uint32(l), nil +} + func query(r *http.Request) string { return r.Form.Get("query") } diff --git a/pkg/loghttp/query.go b/pkg/loghttp/query.go index dcc07e427f1a9..5d16aefc1e43e 100644 --- a/pkg/loghttp/query.go +++ b/pkg/loghttp/query.go @@ -617,6 +617,48 @@ func ParseVolumeRangeQuery(r *http.Request) (*VolumeRangeQuery, error) { }, nil } +func ParseDetectedFieldsQuery(r *http.Request) (*logproto.DetectedFieldsRequest, error) { + var err error + result := &logproto.DetectedFieldsRequest{} + + result.Query = query(r) + result.Start, result.End, err = bounds(r) + if err != nil { + return nil, err + } + + if result.End.Before(result.Start) { + return nil, errEndBeforeStart + } + + result.LineLimit, err = lineLimit(r) + if err != nil { + return nil, err + } + + result.FieldLimit, err = fieldLimit(r) + if err != nil { + return nil, err + } + + step, err := step(r, result.Start, result.End) + result.Step = step.Milliseconds() + if err != nil { + return nil, err + } + + if result.Step <= 0 { + return nil, errZeroOrNegativeStep + } + + // For safety, limit the number of returned points per timeseries. + // This is sufficient for 60s resolution for a week or 1h resolution for a year. + if (result.End.Sub(result.Start) / step) > 11000 { + return nil, errStepTooSmall + } + return result, nil +} + func targetLabels(r *http.Request) []string { lbls := strings.Split(r.Form.Get("targetLabels"), ",") if (len(lbls) == 1 && lbls[0] == "") || len(lbls) == 0 { diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index bf8d45cef9058..5058beebfaf24 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -2513,9 +2513,12 @@ func (m *Volume) GetVolume() uint64 { } type DetectedFieldsRequest struct { - Start *time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start,omitempty"` - End *time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end,omitempty"` - Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + Start time.Time `protobuf:"bytes,1,opt,name=start,proto3,stdtime" json:"start"` + End time.Time `protobuf:"bytes,2,opt,name=end,proto3,stdtime" json:"end"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + LineLimit uint32 `protobuf:"varint,4,opt,name=lineLimit,proto3" json:"lineLimit,omitempty"` + FieldLimit uint32 `protobuf:"varint,5,opt,name=fieldLimit,proto3" json:"fieldLimit,omitempty"` + Step int64 `protobuf:"varint,6,opt,name=step,proto3" json:"step,omitempty"` } func (m *DetectedFieldsRequest) Reset() { *m = DetectedFieldsRequest{} } @@ -2550,18 +2553,18 @@ func (m *DetectedFieldsRequest) XXX_DiscardUnknown() { var xxx_messageInfo_DetectedFieldsRequest proto.InternalMessageInfo -func (m *DetectedFieldsRequest) GetStart() *time.Time { +func (m *DetectedFieldsRequest) GetStart() time.Time { if m != nil { return m.Start } - return nil + return time.Time{} } -func (m *DetectedFieldsRequest) GetEnd() *time.Time { +func (m *DetectedFieldsRequest) GetEnd() time.Time { if m != nil { return m.End } - return nil + return time.Time{} } func (m *DetectedFieldsRequest) GetQuery() string { @@ -2571,6 +2574,27 @@ func (m *DetectedFieldsRequest) GetQuery() string { return "" } +func (m *DetectedFieldsRequest) GetLineLimit() uint32 { + if m != nil { + return m.LineLimit + } + return 0 +} + +func (m *DetectedFieldsRequest) GetFieldLimit() uint32 { + if m != nil { + return m.FieldLimit + } + return 0 +} + +func (m *DetectedFieldsRequest) GetStep() int64 { + if m != nil { + return m.Step + } + return 0 +} + type DetectedFieldsResponse struct { Fields []*DetectedField `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` } @@ -2877,160 +2901,162 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2439 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x19, 0x4b, 0x8f, 0x13, 0xc9, - 0xd9, 0x6d, 0xb7, 0x5f, 0x9f, 0x3d, 0xc3, 0x50, 0x63, 0x06, 0xcb, 0xcb, 0xda, 0x43, 0x29, 0x0b, - 0x13, 0xc2, 0xda, 0xcb, 0x10, 0xc8, 0x2e, 0x84, 0x6c, 0xf0, 0xcc, 0xc2, 0x0e, 0x0c, 0x8f, 0xad, - 0x21, 0x64, 0x13, 0x09, 0xa1, 0xc6, 0xae, 0xf1, 0xb4, 0xb0, 0xbb, 0x4d, 0x77, 0x19, 0xb0, 0x94, - 0x43, 0xfe, 0x40, 0x94, 0x95, 0x72, 0x88, 0x72, 0x89, 0x14, 0x25, 0x52, 0xa2, 0xe4, 0x12, 0xe5, - 0x07, 0x24, 0x97, 0x1c, 0xc8, 0x8d, 0xbd, 0xad, 0x38, 0x38, 0x61, 0xb8, 0x44, 0x73, 0xda, 0xf3, - 0x9e, 0xa2, 0x7a, 0xf5, 0xc3, 0xe3, 0x09, 0xeb, 0x59, 0x56, 0x09, 0x17, 0x77, 0xd5, 0x57, 0x5f, - 0x7d, 0xf5, 0xbd, 0xea, 0x7b, 0x94, 0xe1, 0x8d, 0xfe, 0xfd, 0x4e, 0xa3, 0xeb, 0x76, 0xfa, 0x9e, - 0xcb, 0xdc, 0x60, 0x50, 0x17, 0xbf, 0x28, 0xa7, 0xe7, 0x95, 0x52, 0xc7, 0xed, 0xb8, 0x12, 0x87, - 0x8f, 0xe4, 0x7a, 0xa5, 0xd6, 0x71, 0xdd, 0x4e, 0x97, 0x36, 0xc4, 0xec, 0xde, 0x60, 0xb3, 0xc1, - 0xec, 0x1e, 0xf5, 0x99, 0xd5, 0xeb, 0x2b, 0x84, 0x45, 0x45, 0xfd, 0x41, 0xb7, 0xe7, 0xb6, 0x69, - 0xb7, 0xe1, 0x33, 0x8b, 0xf9, 0xf2, 0x57, 0x61, 0xcc, 0x73, 0x8c, 0xfe, 0xc0, 0xdf, 0x12, 0x3f, - 0x12, 0x88, 0x4b, 0x80, 0x36, 0x98, 0x47, 0xad, 0x1e, 0xb1, 0x18, 0xf5, 0x09, 0x7d, 0x30, 0xa0, - 0x3e, 0xc3, 0xd7, 0x60, 0x3e, 0x06, 0xf5, 0xfb, 0xae, 0xe3, 0x53, 0x74, 0x16, 0x0a, 0x7e, 0x08, - 0x2e, 0x1b, 0x8b, 0xa9, 0xa5, 0xc2, 0x72, 0xa9, 0x1e, 0x88, 0x12, 0xee, 0x21, 0x51, 0x44, 0xfc, - 0x6b, 0x03, 0x20, 0x5c, 0x43, 0x55, 0x00, 0xb9, 0xfa, 0xa1, 0xe5, 0x6f, 0x95, 0x8d, 0x45, 0x63, - 0xc9, 0x24, 0x11, 0x08, 0x3a, 0x09, 0x07, 0xc3, 0xd9, 0x75, 0x77, 0x63, 0xcb, 0xf2, 0xda, 0xe5, - 0xa4, 0x40, 0xdb, 0xbd, 0x80, 0x10, 0x98, 0x9e, 0xc5, 0x68, 0x39, 0xb5, 0x68, 0x2c, 0xa5, 0x88, - 0x18, 0xa3, 0x05, 0xc8, 0x30, 0xea, 0x58, 0x0e, 0x2b, 0x9b, 0x8b, 0xc6, 0x52, 0x9e, 0xa8, 0x19, - 0x87, 0x73, 0xd9, 0xa9, 0x5f, 0x4e, 0x2f, 0x1a, 0x4b, 0x33, 0x44, 0xcd, 0xf0, 0x1f, 0x53, 0x50, - 0xfc, 0x68, 0x40, 0xbd, 0xa1, 0x52, 0x00, 0xaa, 0x42, 0xce, 0xa7, 0x5d, 0xda, 0x62, 0xae, 0x27, - 0x18, 0xcc, 0x37, 0x93, 0x65, 0x83, 0x04, 0x30, 0x54, 0x82, 0x74, 0xd7, 0xee, 0xd9, 0x4c, 0xb0, - 0x35, 0x43, 0xe4, 0x04, 0x9d, 0x83, 0xb4, 0xcf, 0x2c, 0x8f, 0x09, 0x5e, 0x0a, 0xcb, 0x95, 0xba, - 0x34, 0x5a, 0x5d, 0x1b, 0xad, 0x7e, 0x4b, 0x1b, 0xad, 0x99, 0x7b, 0x32, 0xaa, 0x25, 0x3e, 0xf9, - 0x67, 0xcd, 0x20, 0x72, 0x0b, 0x3a, 0x0b, 0x29, 0xea, 0xb4, 0x05, 0xbf, 0x5f, 0x76, 0x27, 0xdf, - 0x80, 0x4e, 0x41, 0xbe, 0x6d, 0x7b, 0xb4, 0xc5, 0x6c, 0xd7, 0x11, 0x52, 0xcd, 0x2e, 0xcf, 0x87, - 0x16, 0x59, 0xd5, 0x4b, 0x24, 0xc4, 0x42, 0x27, 0x21, 0xe3, 0x73, 0xd5, 0xf9, 0xe5, 0xec, 0x62, - 0x6a, 0x29, 0xdf, 0x2c, 0xed, 0x8c, 0x6a, 0x73, 0x12, 0x72, 0xd2, 0xed, 0xd9, 0x8c, 0xf6, 0xfa, - 0x6c, 0x48, 0x14, 0x0e, 0x3a, 0x01, 0xd9, 0x36, 0xed, 0x52, 0x6e, 0xf0, 0x9c, 0x30, 0xf8, 0x5c, - 0x84, 0xbc, 0x58, 0x20, 0x1a, 0x01, 0xdd, 0x01, 0xb3, 0xdf, 0xb5, 0x9c, 0x72, 0x5e, 0x48, 0x31, - 0x1b, 0x22, 0xde, 0xec, 0x5a, 0x4e, 0xf3, 0xbd, 0x67, 0xa3, 0xda, 0x99, 0x8e, 0xcd, 0xb6, 0x06, - 0xf7, 0xea, 0x2d, 0xb7, 0xd7, 0xe8, 0x78, 0xd6, 0xa6, 0xe5, 0x58, 0x8d, 0xae, 0x7b, 0xdf, 0x6e, - 0x3c, 0x3c, 0xdd, 0xe0, 0xfe, 0xf9, 0x60, 0x40, 0x3d, 0x9b, 0x7a, 0x0d, 0x4e, 0xa6, 0x2e, 0x4c, - 0xc2, 0xb7, 0x12, 0x41, 0xf6, 0x8a, 0x99, 0xcb, 0xcc, 0x65, 0xf1, 0xf3, 0x24, 0xa0, 0x0d, 0xab, - 0xd7, 0xef, 0xd2, 0xa9, 0x4c, 0x16, 0x18, 0x27, 0xb9, 0x6f, 0xe3, 0xa4, 0xa6, 0x35, 0x4e, 0xa8, - 0x69, 0x73, 0x3a, 0x4d, 0xa7, 0xbf, 0xac, 0xa6, 0x33, 0x5f, 0x8b, 0xa6, 0x71, 0x19, 0x4c, 0x3e, - 0x43, 0x73, 0x90, 0xf2, 0xac, 0x47, 0x42, 0x9f, 0x45, 0xc2, 0x87, 0x78, 0x1d, 0x32, 0x92, 0x17, - 0x54, 0x19, 0x57, 0x78, 0xfc, 0x7e, 0x84, 0xca, 0x4e, 0x69, 0x35, 0xce, 0x85, 0x6a, 0x4c, 0x09, - 0x05, 0xe1, 0xdf, 0x1a, 0x30, 0xa3, 0xac, 0xa8, 0x62, 0x0c, 0x85, 0xac, 0xbc, 0xe3, 0x3a, 0xbe, - 0x1c, 0x1e, 0x8f, 0x2f, 0x17, 0xdb, 0x56, 0x9f, 0x51, 0xaf, 0xb9, 0xfc, 0x64, 0x54, 0x33, 0x9e, - 0x8d, 0x6a, 0x27, 0x5e, 0x22, 0xa8, 0x88, 0x76, 0x2a, 0xfc, 0x68, 0xda, 0xe8, 0x5b, 0x82, 0x41, - 0xe6, 0x2b, 0x6f, 0x38, 0x50, 0x97, 0x91, 0x72, 0xcd, 0xe9, 0x50, 0x9f, 0x13, 0x37, 0xb9, 0x21, - 0x89, 0xc4, 0xc1, 0x3f, 0x81, 0xf9, 0x98, 0xc3, 0x29, 0x56, 0xdf, 0x85, 0x8c, 0xcf, 0x75, 0xa8, - 0x39, 0x8d, 0x98, 0x6b, 0x43, 0xc0, 0x9b, 0xb3, 0x8a, 0xc5, 0x8c, 0x9c, 0x13, 0x85, 0x3f, 0xdd, - 0xe9, 0x7f, 0x37, 0xa0, 0xb8, 0x6e, 0xdd, 0xa3, 0x5d, 0xed, 0xe9, 0x08, 0x4c, 0xc7, 0xea, 0x51, - 0xa5, 0x74, 0x31, 0xe6, 0x91, 0xed, 0xa1, 0xd5, 0x1d, 0x50, 0x49, 0x32, 0x47, 0xd4, 0x6c, 0xda, - 0x90, 0x64, 0xec, 0x3b, 0x24, 0x19, 0xa1, 0xd7, 0x97, 0x20, 0xcd, 0x9d, 0x6b, 0x28, 0xc2, 0x51, - 0x9e, 0xc8, 0x09, 0x3e, 0x0e, 0x33, 0x4a, 0x0a, 0xa5, 0xbe, 0x90, 0x65, 0xae, 0xbe, 0xbc, 0x66, - 0x19, 0xf7, 0x20, 0x23, 0xb5, 0x8d, 0xbe, 0x01, 0xf9, 0x20, 0xcd, 0x09, 0x69, 0x53, 0xcd, 0xcc, - 0xce, 0xa8, 0x96, 0x64, 0x3e, 0x09, 0x17, 0x50, 0x0d, 0xd2, 0x62, 0xa7, 0x90, 0xdc, 0x68, 0xe6, - 0x77, 0x46, 0x35, 0x09, 0x20, 0xf2, 0x83, 0x8e, 0x80, 0xb9, 0xc5, 0x33, 0x0d, 0x57, 0x81, 0xd9, - 0xcc, 0xed, 0x8c, 0x6a, 0x62, 0x4e, 0xc4, 0x2f, 0xbe, 0x0c, 0xc5, 0x75, 0xda, 0xb1, 0x5a, 0x43, - 0x75, 0x68, 0x49, 0x93, 0xe3, 0x07, 0x1a, 0x9a, 0xc6, 0x51, 0x28, 0x06, 0x27, 0xde, 0xed, 0xf9, - 0xca, 0xaf, 0x0b, 0x01, 0xec, 0x9a, 0x8f, 0x7f, 0x65, 0x80, 0xb2, 0x33, 0xc2, 0x90, 0xe9, 0x72, - 0x59, 0x7d, 0x15, 0x89, 0x60, 0x67, 0x54, 0x53, 0x10, 0xa2, 0xbe, 0xe8, 0x3c, 0x64, 0x7d, 0x71, - 0x22, 0x27, 0x36, 0xee, 0x3e, 0x62, 0xa1, 0x79, 0x80, 0xbb, 0xc1, 0xce, 0xa8, 0xa6, 0x11, 0x89, - 0x1e, 0xa0, 0x7a, 0x2c, 0x85, 0x4a, 0xc1, 0x66, 0x77, 0x46, 0xb5, 0x08, 0x34, 0x9a, 0x52, 0xf1, - 0x17, 0x06, 0x14, 0x6e, 0x59, 0x76, 0xe0, 0x42, 0x65, 0x6d, 0xa2, 0x30, 0x52, 0x4a, 0x00, 0xbf, - 0xd5, 0x6d, 0xda, 0xb5, 0x86, 0x97, 0x5c, 0x4f, 0xd0, 0x9d, 0x21, 0xc1, 0x3c, 0xcc, 0x7a, 0xe6, - 0xc4, 0xac, 0x97, 0x9e, 0x3e, 0xb0, 0x7e, 0xbd, 0x61, 0xec, 0x8a, 0x99, 0x4b, 0xce, 0xa5, 0xf0, - 0x9f, 0x0d, 0x28, 0x4a, 0xe1, 0x95, 0xe7, 0xdd, 0x85, 0x8c, 0xd4, 0x8d, 0x10, 0xff, 0xbf, 0x84, - 0x98, 0xfa, 0x94, 0xe1, 0x45, 0x91, 0x45, 0xef, 0xc3, 0x6c, 0xdb, 0x73, 0xfb, 0x7d, 0xda, 0xde, - 0x50, 0xb1, 0x2c, 0x39, 0x1e, 0xcb, 0x56, 0xa3, 0xeb, 0x64, 0x0c, 0x1d, 0xff, 0xc3, 0x80, 0x19, - 0x15, 0x33, 0x94, 0xc5, 0x02, 0x2d, 0x1b, 0xfb, 0x4e, 0x5f, 0xc9, 0x69, 0xd3, 0xd7, 0x02, 0x64, - 0x3a, 0x9e, 0x3b, 0xe8, 0xfb, 0xe5, 0x94, 0xbc, 0xa1, 0x72, 0x36, 0x5d, 0x5a, 0xc3, 0x57, 0x60, - 0x56, 0x8b, 0xb2, 0x47, 0xe0, 0xac, 0x8c, 0x07, 0xce, 0xb5, 0x36, 0x75, 0x98, 0xbd, 0x69, 0x07, - 0xa1, 0x50, 0xe1, 0xe3, 0x9f, 0x1b, 0x30, 0x37, 0x8e, 0x82, 0x56, 0x23, 0xb7, 0x8d, 0x93, 0x3b, - 0xb6, 0x37, 0xb9, 0xba, 0x08, 0x41, 0xfe, 0x07, 0x0e, 0xf3, 0x86, 0x9a, 0xb4, 0xdc, 0x5b, 0x39, - 0x03, 0x85, 0xc8, 0x22, 0xcf, 0x55, 0xf7, 0xa9, 0xba, 0x1f, 0x84, 0x0f, 0xc3, 0xc0, 0x90, 0x94, - 0x61, 0x4d, 0x4c, 0xf0, 0x2f, 0x0d, 0x98, 0x89, 0xd9, 0x12, 0xbd, 0x0b, 0xe6, 0xa6, 0xe7, 0xf6, - 0xa6, 0x32, 0x94, 0xd8, 0x81, 0xbe, 0x0d, 0x49, 0xe6, 0x4e, 0x65, 0xa6, 0x24, 0x73, 0xb9, 0x95, - 0x94, 0xf8, 0x29, 0x59, 0xec, 0xca, 0x19, 0x3e, 0x03, 0x79, 0x21, 0xd0, 0x4d, 0xcb, 0xf6, 0x26, - 0xe6, 0x8c, 0xc9, 0x02, 0x9d, 0x87, 0x03, 0x32, 0x1e, 0x4e, 0xde, 0x5c, 0x9c, 0xb4, 0xb9, 0xa8, - 0x37, 0xbf, 0x01, 0xe9, 0x95, 0xad, 0x81, 0x73, 0x9f, 0x6f, 0x69, 0x5b, 0xcc, 0xd2, 0x5b, 0xf8, - 0x18, 0x1f, 0x82, 0x79, 0x7e, 0x0d, 0xa9, 0xe7, 0xaf, 0xb8, 0x03, 0x87, 0xe9, 0x66, 0xe3, 0x24, - 0x94, 0xe2, 0x60, 0xe5, 0x25, 0x25, 0x48, 0xb7, 0x38, 0x40, 0xd0, 0x98, 0x21, 0x72, 0x82, 0x7f, - 0x67, 0x00, 0xba, 0x4c, 0x99, 0x38, 0x65, 0x6d, 0x35, 0xb8, 0x1e, 0x15, 0xc8, 0xf5, 0x2c, 0xd6, - 0xda, 0xa2, 0x9e, 0xaf, 0x8b, 0x11, 0x3d, 0xff, 0x5f, 0x54, 0x7e, 0xf8, 0x14, 0xcc, 0xc7, 0xb8, - 0x54, 0x32, 0x55, 0x20, 0xd7, 0x52, 0x30, 0x95, 0xf5, 0x82, 0x39, 0xfe, 0x4b, 0x12, 0x72, 0x62, - 0x03, 0xa1, 0x9b, 0xe8, 0x14, 0x14, 0x36, 0x6d, 0xa7, 0x43, 0xbd, 0xbe, 0x67, 0x2b, 0x15, 0x98, - 0xcd, 0x03, 0x3b, 0xa3, 0x5a, 0x14, 0x4c, 0xa2, 0x13, 0xf4, 0x36, 0x64, 0x07, 0x3e, 0xf5, 0xee, - 0xda, 0xf2, 0xa6, 0xe7, 0x9b, 0xa5, 0xed, 0x51, 0x2d, 0xf3, 0x03, 0x9f, 0x7a, 0x6b, 0xab, 0x3c, - 0xff, 0x0c, 0xc4, 0x88, 0xc8, 0x6f, 0x1b, 0x5d, 0x55, 0x6e, 0x2a, 0xaa, 0xb1, 0xe6, 0x77, 0x38, - 0xfb, 0xcf, 0x46, 0xb5, 0xe3, 0x91, 0x68, 0xd7, 0xf7, 0xdc, 0x1e, 0x65, 0x5b, 0x74, 0xe0, 0x37, - 0x5a, 0x6e, 0xaf, 0xe7, 0x3a, 0x0d, 0xd1, 0x5a, 0x0a, 0xa1, 0x79, 0x12, 0xe5, 0xdb, 0x95, 0xe7, - 0xde, 0x82, 0x2c, 0xdb, 0xf2, 0xdc, 0x41, 0x67, 0x4b, 0xe4, 0x86, 0x54, 0xf3, 0xdc, 0xf4, 0xf4, - 0x34, 0x05, 0xa2, 0x07, 0xe8, 0x28, 0xd7, 0x16, 0x6d, 0xdd, 0xf7, 0x07, 0x3d, 0xd9, 0xb0, 0x35, - 0xd3, 0x3b, 0xa3, 0x9a, 0xf1, 0x36, 0x09, 0xc0, 0xf8, 0x67, 0x49, 0xa8, 0x09, 0x47, 0xbd, 0x2d, - 0x8a, 0x87, 0x4b, 0xae, 0x77, 0x8d, 0x32, 0xcf, 0x6e, 0x5d, 0xb7, 0x7a, 0x54, 0xfb, 0x46, 0x0d, - 0x0a, 0x3d, 0x01, 0xbc, 0x1b, 0xb9, 0x02, 0xd0, 0x0b, 0xf0, 0xd0, 0x9b, 0x00, 0xe2, 0xce, 0xc8, - 0x75, 0x79, 0x1b, 0xf2, 0x02, 0x22, 0x96, 0x57, 0x62, 0x9a, 0x6a, 0x4c, 0x29, 0x99, 0xd2, 0xd0, - 0xda, 0xb8, 0x86, 0xa6, 0xa6, 0x13, 0xa8, 0x25, 0xea, 0xeb, 0xe9, 0xb8, 0xaf, 0xe3, 0x4f, 0x0d, - 0xa8, 0xae, 0x6b, 0xce, 0xf7, 0xa9, 0x0e, 0x2d, 0x6f, 0xf2, 0x15, 0xc9, 0x9b, 0xfa, 0x6a, 0xf2, - 0xe2, 0x2a, 0xc0, 0xba, 0xed, 0xd0, 0x4b, 0x76, 0x97, 0x51, 0x6f, 0x42, 0x4b, 0xf2, 0x8b, 0x54, - 0x18, 0x12, 0x08, 0xdd, 0xd4, 0x72, 0xae, 0x44, 0xe2, 0xf0, 0xab, 0x10, 0x23, 0xf9, 0x0a, 0xcd, - 0x96, 0x1a, 0x0b, 0x51, 0x0e, 0x64, 0x37, 0x85, 0x78, 0x32, 0xa5, 0xc6, 0x5e, 0x55, 0x42, 0xd9, - 0x9b, 0xdf, 0x53, 0x87, 0x9f, 0x7d, 0x49, 0x4d, 0x22, 0xde, 0x81, 0x1a, 0xfe, 0xd0, 0x61, 0xd6, - 0xe3, 0xc8, 0x7e, 0xa2, 0x0f, 0x41, 0x96, 0xaa, 0xbb, 0xd2, 0x13, 0xeb, 0xae, 0x0b, 0xea, 0x98, - 0xaf, 0xd4, 0x42, 0x5e, 0x08, 0x23, 0xa0, 0x30, 0x8a, 0x8a, 0x80, 0xc7, 0xc0, 0xf4, 0xe8, 0xa6, - 0x4e, 0xd5, 0x28, 0x3c, 0x39, 0xc0, 0x14, 0xeb, 0xf8, 0xaf, 0x06, 0xcc, 0x5d, 0xa6, 0x2c, 0x5e, - 0x04, 0xbd, 0x46, 0x26, 0xc5, 0x1f, 0xc2, 0xc1, 0x08, 0xff, 0x4a, 0xfa, 0xd3, 0x63, 0x95, 0xcf, - 0xa1, 0x50, 0xfe, 0x35, 0xa7, 0x4d, 0x1f, 0xab, 0xbe, 0x31, 0x5e, 0xf4, 0xdc, 0x84, 0x42, 0x64, - 0x11, 0x5d, 0x1c, 0x2b, 0x77, 0x22, 0xcf, 0x3d, 0x41, 0xca, 0x6e, 0x96, 0x94, 0x4c, 0xb2, 0x73, - 0x54, 0xf5, 0x6c, 0x50, 0x1a, 0x6c, 0x00, 0x12, 0xe6, 0x12, 0x64, 0xa3, 0xc9, 0x49, 0x40, 0xaf, - 0x06, 0x75, 0x4f, 0x30, 0x47, 0x47, 0xc1, 0xf4, 0xdc, 0x47, 0xba, 0x8e, 0x9d, 0x09, 0x8f, 0x24, - 0xee, 0x23, 0x22, 0x96, 0xf0, 0x79, 0x48, 0x11, 0xf7, 0x11, 0xaa, 0x02, 0x78, 0x96, 0xd3, 0xa1, - 0xb7, 0x83, 0x26, 0xaa, 0x48, 0x22, 0x90, 0x3d, 0x0a, 0x87, 0x15, 0x38, 0x18, 0xe5, 0x48, 0x9a, - 0xbb, 0x0e, 0xd9, 0x8f, 0x06, 0x51, 0x75, 0x95, 0xc6, 0xd4, 0x25, 0xfb, 0x71, 0x8d, 0xc4, 0x7d, - 0x06, 0x42, 0x38, 0x3a, 0x02, 0x79, 0x66, 0xdd, 0xeb, 0xd2, 0xeb, 0x61, 0x98, 0x0b, 0x01, 0x7c, - 0x95, 0xf7, 0x7f, 0xb7, 0x23, 0x15, 0x50, 0x08, 0x40, 0x27, 0x60, 0x2e, 0xe4, 0xf9, 0xa6, 0x47, - 0x37, 0xed, 0xc7, 0xc2, 0xc2, 0x45, 0xb2, 0x0b, 0x8e, 0x96, 0xe0, 0x40, 0x08, 0xdb, 0x10, 0x95, - 0x86, 0x29, 0x50, 0xc7, 0xc1, 0x5c, 0x37, 0x42, 0xdc, 0x0f, 0x1e, 0x0c, 0xac, 0xae, 0xb8, 0x7c, - 0x45, 0x12, 0x81, 0xe0, 0xbf, 0x19, 0x70, 0x50, 0x9a, 0x9a, 0x77, 0xfe, 0xaf, 0xa3, 0xd7, 0xff, - 0xde, 0x00, 0x14, 0x95, 0x40, 0xb9, 0xd6, 0x5b, 0xd1, 0x57, 0x1d, 0x5e, 0xca, 0x14, 0x44, 0x5b, - 0x2b, 0x41, 0xe1, 0xab, 0x0c, 0x86, 0x8c, 0x28, 0x87, 0x64, 0x7f, 0x6d, 0xca, 0xbe, 0x59, 0x42, - 0x88, 0xfa, 0xf2, 0x76, 0xff, 0xde, 0x90, 0x51, 0x5f, 0x75, 0xbd, 0xa2, 0xdd, 0x17, 0x00, 0x22, - 0x3f, 0xfc, 0x2c, 0xea, 0x30, 0xe1, 0x35, 0x66, 0x78, 0x96, 0x02, 0x11, 0x3d, 0xc0, 0x7f, 0x4a, - 0xc2, 0xcc, 0x6d, 0xb7, 0x3b, 0x08, 0x13, 0xe3, 0xeb, 0x94, 0x30, 0x62, 0xad, 0x78, 0x5a, 0xb7, - 0xe2, 0x08, 0x4c, 0x9f, 0xd1, 0xbe, 0xf0, 0xac, 0x14, 0x11, 0x63, 0x84, 0xa1, 0xc8, 0x2c, 0xaf, - 0x43, 0x99, 0xec, 0x6e, 0xca, 0x19, 0x51, 0x76, 0xc6, 0x60, 0x68, 0x11, 0x0a, 0x56, 0xa7, 0xe3, - 0xd1, 0x8e, 0xc5, 0x68, 0x73, 0x58, 0xce, 0x8a, 0xc3, 0xa2, 0x20, 0xfc, 0x31, 0xcc, 0x6a, 0x65, - 0x29, 0x93, 0xbe, 0x03, 0xd9, 0x87, 0x02, 0x32, 0xe1, 0xf9, 0x4b, 0xa2, 0xaa, 0x30, 0xa6, 0xd1, - 0xe2, 0x8f, 0xe6, 0x9a, 0x67, 0x7c, 0x05, 0x32, 0x12, 0x1d, 0x1d, 0x89, 0xf6, 0x28, 0xf2, 0x9d, - 0x86, 0xcf, 0x55, 0xc3, 0x81, 0x21, 0x23, 0x09, 0x29, 0xc3, 0x0b, 0xdf, 0x90, 0x10, 0xa2, 0xbe, - 0xf8, 0x37, 0x06, 0x1c, 0x5a, 0xa5, 0x8c, 0xb6, 0x18, 0x6d, 0x5f, 0xb2, 0x69, 0xb7, 0xbd, 0xdf, - 0xf6, 0xd9, 0xd8, 0x77, 0xfb, 0x3c, 0xe9, 0x1d, 0x2c, 0x15, 0x7d, 0x07, 0x5b, 0x83, 0x85, 0x71, - 0x16, 0x95, 0x46, 0x1b, 0x90, 0xd9, 0x14, 0x90, 0xdd, 0x2f, 0x9f, 0xb1, 0x1d, 0x44, 0xa1, 0x61, - 0x0f, 0x66, 0x62, 0x0b, 0x42, 0xc3, 0xdc, 0xa2, 0x2a, 0xda, 0xc9, 0x09, 0xfa, 0x26, 0x98, 0x6c, - 0xd8, 0x57, 0x41, 0xae, 0x79, 0xe8, 0x8b, 0x51, 0xed, 0x60, 0x6c, 0xdb, 0xad, 0x61, 0x9f, 0x12, - 0x81, 0xc2, 0x1d, 0xa1, 0x65, 0x79, 0x6d, 0xdb, 0xb1, 0xba, 0x36, 0x93, 0x8c, 0x9b, 0x24, 0x0a, - 0x8a, 0xa9, 0x58, 0x7a, 0xcf, 0xff, 0x9f, 0x8a, 0x7f, 0x14, 0xaa, 0x58, 0xb3, 0xa8, 0x54, 0xfc, - 0x3e, 0xcc, 0xb6, 0x63, 0x2b, 0x7b, 0xab, 0x5a, 0x3e, 0x56, 0x8e, 0xa1, 0xe3, 0xb7, 0x42, 0x95, - 0x0b, 0xc8, 0x64, 0x95, 0x9f, 0x38, 0x06, 0xf9, 0xe0, 0xaf, 0x17, 0x54, 0x80, 0xec, 0xa5, 0x1b, - 0xe4, 0x87, 0x17, 0xc9, 0xea, 0x5c, 0x02, 0x15, 0x21, 0xd7, 0xbc, 0xb8, 0x72, 0x55, 0xcc, 0x8c, - 0xe5, 0x4f, 0xd3, 0x3a, 0xc5, 0x79, 0xe8, 0xbb, 0x90, 0x96, 0x79, 0x6b, 0x21, 0x64, 0x26, 0xfa, - 0x0f, 0x47, 0xe5, 0xf0, 0x2e, 0xb8, 0x94, 0x0a, 0x27, 0xde, 0x31, 0xd0, 0x75, 0x28, 0x08, 0xa0, - 0x7a, 0xc5, 0x3c, 0x32, 0xfe, 0x98, 0x18, 0xa3, 0xf4, 0xe6, 0x1e, 0xab, 0x11, 0x7a, 0xe7, 0x20, - 0x2d, 0x05, 0x5c, 0x18, 0x2b, 0x2f, 0x26, 0x70, 0x13, 0x7b, 0xd7, 0xc5, 0x09, 0xf4, 0x1e, 0x98, - 0xbc, 0xa3, 0x47, 0x91, 0xea, 0x26, 0xf2, 0xf8, 0x58, 0x59, 0x18, 0x07, 0x47, 0x8e, 0xbd, 0x10, - 0xbc, 0xa1, 0x1e, 0x1e, 0x7f, 0xc5, 0xd1, 0xdb, 0xcb, 0xbb, 0x17, 0x82, 0x93, 0x6f, 0xc8, 0x97, - 0x3e, 0xfd, 0x96, 0x80, 0xde, 0x8c, 0x1f, 0x35, 0xf6, 0xf4, 0x50, 0xa9, 0xee, 0xb5, 0x1c, 0x10, - 0x5c, 0x87, 0x42, 0xa4, 0x8f, 0x8f, 0xaa, 0x75, 0xf7, 0x23, 0x44, 0x54, 0xad, 0x13, 0x9a, 0x7f, - 0x9c, 0x40, 0x97, 0x21, 0xc7, 0x6b, 0x42, 0x9e, 0x1a, 0xd1, 0x1b, 0xe3, 0xa5, 0x5f, 0x24, 0xe5, - 0x57, 0x8e, 0x4c, 0x5e, 0x0c, 0x08, 0x7d, 0x1f, 0xf2, 0x97, 0x29, 0x53, 0x71, 0xf3, 0xf0, 0x78, - 0xe0, 0x9d, 0xa0, 0xa9, 0x78, 0xf0, 0xc6, 0x09, 0xf4, 0xb1, 0x28, 0x4f, 0xe3, 0x91, 0x08, 0xd5, - 0xf6, 0x88, 0x38, 0x01, 0x5f, 0x8b, 0x7b, 0x23, 0x68, 0xca, 0xcb, 0x77, 0xf4, 0x9f, 0xbd, 0xab, - 0x16, 0xb3, 0xd0, 0x0d, 0x98, 0x15, 0x22, 0x07, 0xff, 0x06, 0xc7, 0x5c, 0x73, 0xd7, 0x5f, 0xcf, - 0x31, 0xd7, 0xdc, 0xfd, 0x17, 0x34, 0x4e, 0x34, 0xef, 0x3c, 0x7d, 0x5e, 0x4d, 0x7c, 0xf6, 0xbc, - 0x9a, 0xf8, 0xfc, 0x79, 0xd5, 0xf8, 0xe9, 0x76, 0xd5, 0xf8, 0xc3, 0x76, 0xd5, 0x78, 0xb2, 0x5d, - 0x35, 0x9e, 0x6e, 0x57, 0x8d, 0x7f, 0x6d, 0x57, 0x8d, 0x7f, 0x6f, 0x57, 0x13, 0x9f, 0x6f, 0x57, - 0x8d, 0x4f, 0x5e, 0x54, 0x13, 0x4f, 0x5f, 0x54, 0x13, 0x9f, 0xbd, 0xa8, 0x26, 0x7e, 0x7c, 0xfc, - 0xe5, 0x1d, 0x93, 0x8c, 0x36, 0x19, 0xf1, 0x39, 0xfd, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x15, - 0x81, 0x7e, 0x4a, 0xae, 0x1f, 0x00, 0x00, + // 2475 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x39, 0xcb, 0x8f, 0x13, 0xc9, + 0xf9, 0x6e, 0xbb, 0xfd, 0xfa, 0xec, 0x19, 0x86, 0x1a, 0x33, 0x58, 0x5e, 0xd6, 0x1e, 0x4a, 0xbf, + 0x85, 0xf9, 0x11, 0xd6, 0x5e, 0x86, 0x40, 0x76, 0x21, 0x64, 0x83, 0x67, 0x16, 0x76, 0x60, 0x78, + 0x6c, 0x0d, 0x21, 0x9b, 0x48, 0x08, 0x35, 0x76, 0x8d, 0xa7, 0x45, 0xbb, 0xdb, 0x74, 0x97, 0x01, + 0x4b, 0x39, 0xe4, 0x1f, 0x88, 0xb2, 0x52, 0x0e, 0x51, 0x2e, 0x91, 0xa2, 0x44, 0x4a, 0x94, 0x5c, + 0xa2, 0xfc, 0x01, 0xc9, 0x25, 0x07, 0x72, 0x63, 0x6f, 0x2b, 0x0e, 0x4e, 0x18, 0x2e, 0xd1, 0x9c, + 0x56, 0xca, 0x6d, 0x4f, 0x51, 0x3d, 0xfa, 0x39, 0x9e, 0xb0, 0x9e, 0x65, 0xb5, 0xe2, 0x62, 0x57, + 0x7d, 0xf5, 0xd5, 0x57, 0xf5, 0x3d, 0xea, 0x7b, 0x35, 0xbc, 0x31, 0xb8, 0xdf, 0x6b, 0x59, 0x4e, + 0x6f, 0xe0, 0x3a, 0xcc, 0x09, 0x06, 0x4d, 0xf1, 0x8b, 0x0a, 0xfe, 0xbc, 0x56, 0xe9, 0x39, 0x3d, + 0x47, 0xe2, 0xf0, 0x91, 0x5c, 0xaf, 0x35, 0x7a, 0x8e, 0xd3, 0xb3, 0x68, 0x4b, 0xcc, 0xee, 0x0d, + 0x37, 0x5b, 0xcc, 0xec, 0x53, 0x8f, 0x19, 0xfd, 0x81, 0x42, 0x58, 0x54, 0xd4, 0x1f, 0x58, 0x7d, + 0xa7, 0x4b, 0xad, 0x96, 0xc7, 0x0c, 0xe6, 0xc9, 0x5f, 0x85, 0x31, 0xcf, 0x31, 0x06, 0x43, 0x6f, + 0x4b, 0xfc, 0x48, 0x20, 0xae, 0x00, 0xda, 0x60, 0x2e, 0x35, 0xfa, 0xc4, 0x60, 0xd4, 0x23, 0xf4, + 0xc1, 0x90, 0x7a, 0x0c, 0x5f, 0x83, 0xf9, 0x18, 0xd4, 0x1b, 0x38, 0xb6, 0x47, 0xd1, 0x59, 0x28, + 0x79, 0x21, 0xb8, 0xaa, 0x2d, 0x66, 0x96, 0x4a, 0xcb, 0x95, 0x66, 0xc0, 0x4a, 0xb8, 0x87, 0x44, + 0x11, 0xf1, 0xaf, 0x35, 0x80, 0x70, 0x0d, 0xd5, 0x01, 0xe4, 0xea, 0x87, 0x86, 0xb7, 0x55, 0xd5, + 0x16, 0xb5, 0x25, 0x9d, 0x44, 0x20, 0xe8, 0x24, 0x1c, 0x0c, 0x67, 0xd7, 0x9d, 0x8d, 0x2d, 0xc3, + 0xed, 0x56, 0xd3, 0x02, 0x6d, 0xf7, 0x02, 0x42, 0xa0, 0xbb, 0x06, 0xa3, 0xd5, 0xcc, 0xa2, 0xb6, + 0x94, 0x21, 0x62, 0x8c, 0x16, 0x20, 0xc7, 0xa8, 0x6d, 0xd8, 0xac, 0xaa, 0x2f, 0x6a, 0x4b, 0x45, + 0xa2, 0x66, 0x1c, 0xce, 0x79, 0xa7, 0x5e, 0x35, 0xbb, 0xa8, 0x2d, 0xcd, 0x10, 0x35, 0xc3, 0x7f, + 0xcc, 0x40, 0xf9, 0xa3, 0x21, 0x75, 0x47, 0x4a, 0x00, 0xa8, 0x0e, 0x05, 0x8f, 0x5a, 0xb4, 0xc3, + 0x1c, 0x57, 0x5c, 0xb0, 0xd8, 0x4e, 0x57, 0x35, 0x12, 0xc0, 0x50, 0x05, 0xb2, 0x96, 0xd9, 0x37, + 0x99, 0xb8, 0xd6, 0x0c, 0x91, 0x13, 0x74, 0x0e, 0xb2, 0x1e, 0x33, 0x5c, 0x26, 0xee, 0x52, 0x5a, + 0xae, 0x35, 0xa5, 0xd2, 0x9a, 0xbe, 0xd2, 0x9a, 0xb7, 0x7c, 0xa5, 0xb5, 0x0b, 0x4f, 0xc6, 0x8d, + 0xd4, 0x27, 0xff, 0x6c, 0x68, 0x44, 0x6e, 0x41, 0x67, 0x21, 0x43, 0xed, 0xae, 0xb8, 0xef, 0x97, + 0xdd, 0xc9, 0x37, 0xa0, 0x53, 0x50, 0xec, 0x9a, 0x2e, 0xed, 0x30, 0xd3, 0xb1, 0x05, 0x57, 0xb3, + 0xcb, 0xf3, 0xa1, 0x46, 0x56, 0xfd, 0x25, 0x12, 0x62, 0xa1, 0x93, 0x90, 0xf3, 0xb8, 0xe8, 0xbc, + 0x6a, 0x7e, 0x31, 0xb3, 0x54, 0x6c, 0x57, 0x76, 0xc6, 0x8d, 0x39, 0x09, 0x39, 0xe9, 0xf4, 0x4d, + 0x46, 0xfb, 0x03, 0x36, 0x22, 0x0a, 0x07, 0x9d, 0x80, 0x7c, 0x97, 0x5a, 0x94, 0x2b, 0xbc, 0x20, + 0x14, 0x3e, 0x17, 0x21, 0x2f, 0x16, 0x88, 0x8f, 0x80, 0xee, 0x80, 0x3e, 0xb0, 0x0c, 0xbb, 0x5a, + 0x14, 0x5c, 0xcc, 0x86, 0x88, 0x37, 0x2d, 0xc3, 0x6e, 0xbf, 0xf7, 0x6c, 0xdc, 0x38, 0xd3, 0x33, + 0xd9, 0xd6, 0xf0, 0x5e, 0xb3, 0xe3, 0xf4, 0x5b, 0x3d, 0xd7, 0xd8, 0x34, 0x6c, 0xa3, 0x65, 0x39, + 0xf7, 0xcd, 0xd6, 0xc3, 0xd3, 0x2d, 0x6e, 0x9f, 0x0f, 0x86, 0xd4, 0x35, 0xa9, 0xdb, 0xe2, 0x64, + 0x9a, 0x42, 0x25, 0x7c, 0x2b, 0x11, 0x64, 0xaf, 0xe8, 0x85, 0xdc, 0x5c, 0x1e, 0x3f, 0x4f, 0x03, + 0xda, 0x30, 0xfa, 0x03, 0x8b, 0x4e, 0xa5, 0xb2, 0x40, 0x39, 0xe9, 0x7d, 0x2b, 0x27, 0x33, 0xad, + 0x72, 0x42, 0x49, 0xeb, 0xd3, 0x49, 0x3a, 0xfb, 0x65, 0x25, 0x9d, 0xfb, 0x5a, 0x24, 0x8d, 0xab, + 0xa0, 0xf3, 0x19, 0x9a, 0x83, 0x8c, 0x6b, 0x3c, 0x12, 0xf2, 0x2c, 0x13, 0x3e, 0xc4, 0xeb, 0x90, + 0x93, 0x77, 0x41, 0xb5, 0xa4, 0xc0, 0xe3, 0xef, 0x23, 0x14, 0x76, 0xc6, 0x17, 0xe3, 0x5c, 0x28, + 0xc6, 0x8c, 0x10, 0x10, 0xfe, 0xad, 0x06, 0x33, 0x4a, 0x8b, 0xca, 0xc7, 0x50, 0xc8, 0xcb, 0x37, + 0xee, 0xfb, 0x97, 0xc3, 0x49, 0xff, 0x72, 0xb1, 0x6b, 0x0c, 0x18, 0x75, 0xdb, 0xcb, 0x4f, 0xc6, + 0x0d, 0xed, 0xd9, 0xb8, 0x71, 0xe2, 0x25, 0x8c, 0x0a, 0x6f, 0xa7, 0xdc, 0x8f, 0x4f, 0x1b, 0x7d, + 0x4b, 0x5c, 0x90, 0x79, 0xca, 0x1a, 0x0e, 0x34, 0xa5, 0xa7, 0x5c, 0xb3, 0x7b, 0xd4, 0xe3, 0xc4, + 0x75, 0xae, 0x48, 0x22, 0x71, 0xf0, 0x4f, 0x60, 0x3e, 0x66, 0x70, 0xea, 0xaa, 0xef, 0x42, 0xce, + 0xe3, 0x32, 0xf4, 0x6f, 0x1a, 0x51, 0xd7, 0x86, 0x80, 0xb7, 0x67, 0xd5, 0x15, 0x73, 0x72, 0x4e, + 0x14, 0xfe, 0x74, 0xa7, 0xff, 0x5d, 0x83, 0xf2, 0xba, 0x71, 0x8f, 0x5a, 0xbe, 0xa5, 0x23, 0xd0, + 0x6d, 0xa3, 0x4f, 0x95, 0xd0, 0xc5, 0x98, 0x7b, 0xb6, 0x87, 0x86, 0x35, 0xa4, 0x92, 0x64, 0x81, + 0xa8, 0xd9, 0xb4, 0x2e, 0x49, 0xdb, 0xb7, 0x4b, 0xd2, 0x42, 0xab, 0xaf, 0x40, 0x96, 0x1b, 0xd7, + 0x48, 0xb8, 0xa3, 0x22, 0x91, 0x13, 0x7c, 0x1c, 0x66, 0x14, 0x17, 0x4a, 0x7c, 0xe1, 0x95, 0xb9, + 0xf8, 0x8a, 0xfe, 0x95, 0x71, 0x1f, 0x72, 0x52, 0xda, 0xe8, 0xff, 0xa0, 0x18, 0x84, 0x39, 0xc1, + 0x6d, 0xa6, 0x9d, 0xdb, 0x19, 0x37, 0xd2, 0xcc, 0x23, 0xe1, 0x02, 0x6a, 0x40, 0x56, 0xec, 0x14, + 0x9c, 0x6b, 0xed, 0xe2, 0xce, 0xb8, 0x21, 0x01, 0x44, 0xfe, 0xa1, 0x23, 0xa0, 0x6f, 0xf1, 0x48, + 0xc3, 0x45, 0xa0, 0xb7, 0x0b, 0x3b, 0xe3, 0x86, 0x98, 0x13, 0xf1, 0x8b, 0x2f, 0x43, 0x79, 0x9d, + 0xf6, 0x8c, 0xce, 0x48, 0x1d, 0x5a, 0xf1, 0xc9, 0xf1, 0x03, 0x35, 0x9f, 0xc6, 0x51, 0x28, 0x07, + 0x27, 0xde, 0xed, 0x7b, 0xca, 0xae, 0x4b, 0x01, 0xec, 0x9a, 0x87, 0x7f, 0xa5, 0x81, 0xd2, 0x33, + 0xc2, 0x90, 0xb3, 0x38, 0xaf, 0x9e, 0xf2, 0x44, 0xb0, 0x33, 0x6e, 0x28, 0x08, 0x51, 0xff, 0xe8, + 0x3c, 0xe4, 0x3d, 0x71, 0x22, 0x27, 0x96, 0x34, 0x1f, 0xb1, 0xd0, 0x3e, 0xc0, 0xcd, 0x60, 0x67, + 0xdc, 0xf0, 0x11, 0x89, 0x3f, 0x40, 0xcd, 0x58, 0x08, 0x95, 0x8c, 0xcd, 0xee, 0x8c, 0x1b, 0x11, + 0x68, 0x34, 0xa4, 0xe2, 0x2f, 0x34, 0x28, 0xdd, 0x32, 0xcc, 0xc0, 0x84, 0xaa, 0xbe, 0x8a, 0x42, + 0x4f, 0x29, 0x01, 0xfc, 0x55, 0x77, 0xa9, 0x65, 0x8c, 0x2e, 0x39, 0xae, 0xa0, 0x3b, 0x43, 0x82, + 0x79, 0x18, 0xf5, 0xf4, 0x89, 0x51, 0x2f, 0x3b, 0xbd, 0x63, 0xfd, 0x7a, 0xdd, 0xd8, 0x15, 0xbd, + 0x90, 0x9e, 0xcb, 0xe0, 0x3f, 0x6b, 0x50, 0x96, 0xcc, 0x2b, 0xcb, 0xbb, 0x0b, 0x39, 0x29, 0x1b, + 0xc1, 0xfe, 0xff, 0x70, 0x31, 0xcd, 0x29, 0xdd, 0x8b, 0x22, 0x8b, 0xde, 0x87, 0xd9, 0xae, 0xeb, + 0x0c, 0x06, 0xb4, 0xbb, 0xa1, 0x7c, 0x59, 0x3a, 0xe9, 0xcb, 0x56, 0xa3, 0xeb, 0x24, 0x81, 0x8e, + 0xff, 0xa1, 0xc1, 0x8c, 0xf2, 0x19, 0x4a, 0x63, 0x81, 0x94, 0xb5, 0x7d, 0x87, 0xaf, 0xf4, 0xb4, + 0xe1, 0x6b, 0x01, 0x72, 0x3d, 0xd7, 0x19, 0x0e, 0xbc, 0x6a, 0x46, 0xbe, 0x50, 0x39, 0x9b, 0x2e, + 0xac, 0xe1, 0x2b, 0x30, 0xeb, 0xb3, 0xb2, 0x87, 0xe3, 0xac, 0x25, 0x1d, 0xe7, 0x5a, 0x97, 0xda, + 0xcc, 0xdc, 0x34, 0x03, 0x57, 0xa8, 0xf0, 0xf1, 0xcf, 0x35, 0x98, 0x4b, 0xa2, 0xa0, 0xd5, 0xc8, + 0x6b, 0xe3, 0xe4, 0x8e, 0xed, 0x4d, 0xae, 0x29, 0x5c, 0x90, 0xf7, 0x81, 0xcd, 0xdc, 0x91, 0x4f, + 0x5a, 0xee, 0xad, 0x9d, 0x81, 0x52, 0x64, 0x91, 0xc7, 0xaa, 0xfb, 0x54, 0xbd, 0x0f, 0xc2, 0x87, + 0xa1, 0x63, 0x48, 0x4b, 0xb7, 0x26, 0x26, 0xf8, 0x97, 0x1a, 0xcc, 0xc4, 0x74, 0x89, 0xde, 0x05, + 0x7d, 0xd3, 0x75, 0xfa, 0x53, 0x29, 0x4a, 0xec, 0x40, 0xdf, 0x86, 0x34, 0x73, 0xa6, 0x52, 0x53, + 0x9a, 0x39, 0x5c, 0x4b, 0x8a, 0xfd, 0x8c, 0x4c, 0x76, 0xe5, 0x0c, 0x9f, 0x81, 0xa2, 0x60, 0xe8, + 0xa6, 0x61, 0xba, 0x13, 0x63, 0xc6, 0x64, 0x86, 0xce, 0xc3, 0x01, 0xe9, 0x0f, 0x27, 0x6f, 0x2e, + 0x4f, 0xda, 0x5c, 0xf6, 0x37, 0xbf, 0x01, 0xd9, 0x95, 0xad, 0xa1, 0x7d, 0x9f, 0x6f, 0xe9, 0x1a, + 0xcc, 0xf0, 0xb7, 0xf0, 0x31, 0x3e, 0x04, 0xf3, 0xfc, 0x19, 0x52, 0xd7, 0x5b, 0x71, 0x86, 0x36, + 0xf3, 0x8b, 0x8d, 0x93, 0x50, 0x89, 0x83, 0x95, 0x95, 0x54, 0x20, 0xdb, 0xe1, 0x00, 0x41, 0x63, + 0x86, 0xc8, 0x09, 0xfe, 0x9d, 0x06, 0xe8, 0x32, 0x65, 0xe2, 0x94, 0xb5, 0xd5, 0xe0, 0x79, 0xd4, + 0xa0, 0xd0, 0x37, 0x58, 0x67, 0x8b, 0xba, 0x9e, 0x9f, 0x8c, 0xf8, 0xf3, 0x6f, 0x22, 0xf3, 0xc3, + 0xa7, 0x60, 0x3e, 0x76, 0x4b, 0xc5, 0x53, 0x0d, 0x0a, 0x1d, 0x05, 0x53, 0x51, 0x2f, 0x98, 0xe3, + 0xbf, 0xa4, 0xa1, 0x20, 0x36, 0x10, 0xba, 0x89, 0x4e, 0x41, 0x69, 0xd3, 0xb4, 0x7b, 0xd4, 0x1d, + 0xb8, 0xa6, 0x12, 0x81, 0xde, 0x3e, 0xb0, 0x33, 0x6e, 0x44, 0xc1, 0x24, 0x3a, 0x41, 0x6f, 0x43, + 0x7e, 0xe8, 0x51, 0xf7, 0xae, 0x29, 0x5f, 0x7a, 0xb1, 0x5d, 0xd9, 0x1e, 0x37, 0x72, 0x3f, 0xf0, + 0xa8, 0xbb, 0xb6, 0xca, 0xe3, 0xcf, 0x50, 0x8c, 0x88, 0xfc, 0xef, 0xa2, 0xab, 0xca, 0x4c, 0x45, + 0x36, 0xd6, 0xfe, 0x0e, 0xbf, 0xfe, 0xb3, 0x71, 0xe3, 0x78, 0xc4, 0xdb, 0x0d, 0x5c, 0xa7, 0x4f, + 0xd9, 0x16, 0x1d, 0x7a, 0xad, 0x8e, 0xd3, 0xef, 0x3b, 0x76, 0x4b, 0x94, 0x96, 0x82, 0x69, 0x1e, + 0x44, 0xf9, 0x76, 0x65, 0xb9, 0xb7, 0x20, 0xcf, 0xb6, 0x5c, 0x67, 0xd8, 0xdb, 0x12, 0xb1, 0x21, + 0xd3, 0x3e, 0x37, 0x3d, 0x3d, 0x9f, 0x02, 0xf1, 0x07, 0xe8, 0x28, 0x97, 0x16, 0xed, 0xdc, 0xf7, + 0x86, 0x7d, 0x59, 0xb0, 0xb5, 0xb3, 0x3b, 0xe3, 0x86, 0xf6, 0x36, 0x09, 0xc0, 0xf8, 0x67, 0x69, + 0x68, 0x08, 0x43, 0xbd, 0x2d, 0x92, 0x87, 0x4b, 0x8e, 0x7b, 0x8d, 0x32, 0xd7, 0xec, 0x5c, 0x37, + 0xfa, 0xd4, 0xb7, 0x8d, 0x06, 0x94, 0xfa, 0x02, 0x78, 0x37, 0xf2, 0x04, 0xa0, 0x1f, 0xe0, 0xa1, + 0x37, 0x01, 0xc4, 0x9b, 0x91, 0xeb, 0xf2, 0x35, 0x14, 0x05, 0x44, 0x2c, 0xaf, 0xc4, 0x24, 0xd5, + 0x9a, 0x92, 0x33, 0x25, 0xa1, 0xb5, 0xa4, 0x84, 0xa6, 0xa6, 0x13, 0x88, 0x25, 0x6a, 0xeb, 0xd9, + 0xb8, 0xad, 0xe3, 0x4f, 0x35, 0xa8, 0xaf, 0xfb, 0x37, 0xdf, 0xa7, 0x38, 0x7c, 0x7e, 0xd3, 0xaf, + 0x88, 0xdf, 0xcc, 0x57, 0xe3, 0x17, 0xd7, 0x01, 0xd6, 0x4d, 0x9b, 0x5e, 0x32, 0x2d, 0x46, 0xdd, + 0x09, 0x25, 0xc9, 0x2f, 0x32, 0xa1, 0x4b, 0x20, 0x74, 0xd3, 0xe7, 0x73, 0x25, 0xe2, 0x87, 0x5f, + 0x05, 0x1b, 0xe9, 0x57, 0xa8, 0xb6, 0x4c, 0xc2, 0x45, 0xd9, 0x90, 0xdf, 0x14, 0xec, 0xc9, 0x90, + 0x1a, 0xeb, 0xaa, 0x84, 0xbc, 0xb7, 0xbf, 0xa7, 0x0e, 0x3f, 0xfb, 0x92, 0x9c, 0x44, 0xf4, 0x81, + 0x5a, 0xde, 0xc8, 0x66, 0xc6, 0xe3, 0xc8, 0x7e, 0xe2, 0x1f, 0x82, 0x0c, 0x95, 0x77, 0x65, 0x27, + 0xe6, 0x5d, 0x17, 0xd4, 0x31, 0x5f, 0xa9, 0x84, 0xbc, 0x10, 0x7a, 0x40, 0xa1, 0x14, 0xe5, 0x01, + 0x8f, 0x81, 0xee, 0xd2, 0x4d, 0x3f, 0x54, 0xa3, 0xf0, 0xe4, 0x00, 0x53, 0xac, 0xe3, 0xbf, 0x6a, + 0x30, 0x77, 0x99, 0xb2, 0x78, 0x12, 0xf4, 0x1a, 0xa9, 0x14, 0x7f, 0x08, 0x07, 0x23, 0xf7, 0x57, + 0xdc, 0x9f, 0x4e, 0x64, 0x3e, 0x87, 0x42, 0xfe, 0xd7, 0xec, 0x2e, 0x7d, 0xac, 0xea, 0xc6, 0x78, + 0xd2, 0x73, 0x13, 0x4a, 0x91, 0x45, 0x74, 0x31, 0x91, 0xee, 0x44, 0xda, 0x3d, 0x41, 0xc8, 0x6e, + 0x57, 0x14, 0x4f, 0xb2, 0x72, 0x54, 0xf9, 0x6c, 0x90, 0x1a, 0x6c, 0x00, 0x12, 0xea, 0x12, 0x64, + 0xa3, 0xc1, 0x49, 0x40, 0xaf, 0x06, 0x79, 0x4f, 0x30, 0x47, 0x47, 0x41, 0x77, 0x9d, 0x47, 0x7e, + 0x1e, 0x3b, 0x13, 0x1e, 0x49, 0x9c, 0x47, 0x44, 0x2c, 0xe1, 0xf3, 0x90, 0x21, 0xce, 0x23, 0x54, + 0x07, 0x70, 0x0d, 0xbb, 0x47, 0x6f, 0x07, 0x45, 0x54, 0x99, 0x44, 0x20, 0x7b, 0x24, 0x0e, 0x2b, + 0x70, 0x30, 0x7a, 0x23, 0xa9, 0xee, 0x26, 0xe4, 0x3f, 0x1a, 0x46, 0xc5, 0x55, 0x49, 0x88, 0x4b, + 0xd6, 0xe3, 0x3e, 0x12, 0xb7, 0x19, 0x08, 0xe1, 0xe8, 0x08, 0x14, 0x99, 0x71, 0xcf, 0xa2, 0xd7, + 0x43, 0x37, 0x17, 0x02, 0xf8, 0x2a, 0xaf, 0xff, 0x6e, 0x47, 0x32, 0xa0, 0x10, 0x80, 0x4e, 0xc0, + 0x5c, 0x78, 0xe7, 0x9b, 0x2e, 0xdd, 0x34, 0x1f, 0x0b, 0x0d, 0x97, 0xc9, 0x2e, 0x38, 0x5a, 0x82, + 0x03, 0x21, 0x6c, 0x43, 0x64, 0x1a, 0xba, 0x40, 0x4d, 0x82, 0xb9, 0x6c, 0x04, 0xbb, 0x1f, 0x3c, + 0x18, 0x1a, 0x96, 0x78, 0x7c, 0x65, 0x12, 0x81, 0xe0, 0xbf, 0x69, 0x70, 0x50, 0xaa, 0x9a, 0x57, + 0xfe, 0xaf, 0xa3, 0xd5, 0xff, 0x5e, 0x03, 0x14, 0xe5, 0x40, 0x99, 0xd6, 0x5b, 0xd1, 0xae, 0x0e, + 0x4f, 0x65, 0x4a, 0xa2, 0xac, 0x95, 0xa0, 0xb0, 0x2b, 0x83, 0x21, 0x27, 0xd2, 0x21, 0x59, 0x5f, + 0xeb, 0xb2, 0x6e, 0x96, 0x10, 0xa2, 0xfe, 0x79, 0xb9, 0x7f, 0x6f, 0xc4, 0xa8, 0xa7, 0xaa, 0x5e, + 0x51, 0xee, 0x0b, 0x00, 0x91, 0x7f, 0xfc, 0x2c, 0x6a, 0x33, 0x61, 0x35, 0x7a, 0x78, 0x96, 0x02, + 0x11, 0x7f, 0x80, 0xff, 0x94, 0x86, 0x99, 0xdb, 0x8e, 0x35, 0x0c, 0x03, 0xe3, 0xeb, 0x14, 0x30, + 0x62, 0xa5, 0x78, 0xd6, 0x2f, 0xc5, 0x11, 0xe8, 0x1e, 0xa3, 0x03, 0x61, 0x59, 0x19, 0x22, 0xc6, + 0x08, 0x43, 0x99, 0x19, 0x6e, 0x8f, 0x32, 0x59, 0xdd, 0x54, 0x73, 0x22, 0xed, 0x8c, 0xc1, 0xd0, + 0x22, 0x94, 0x8c, 0x5e, 0xcf, 0xa5, 0x3d, 0x83, 0xd1, 0xf6, 0xa8, 0x9a, 0x17, 0x87, 0x45, 0x41, + 0xf8, 0x63, 0x98, 0xf5, 0x85, 0xa5, 0x54, 0xfa, 0x0e, 0xe4, 0x1f, 0x0a, 0xc8, 0x84, 0xf6, 0x97, + 0x44, 0x55, 0x6e, 0xcc, 0x47, 0x8b, 0x37, 0xcd, 0xfd, 0x3b, 0xe3, 0x2b, 0x90, 0x93, 0xe8, 0xe8, + 0x48, 0xb4, 0x46, 0x91, 0x7d, 0x1a, 0x3e, 0x57, 0x05, 0x07, 0x86, 0x9c, 0x24, 0xa4, 0x14, 0x2f, + 0x6c, 0x43, 0x42, 0x88, 0xfa, 0xc7, 0xff, 0xd1, 0xe0, 0xd0, 0x2a, 0x65, 0xb4, 0xc3, 0x68, 0xf7, + 0x92, 0x49, 0xad, 0xee, 0x37, 0x5a, 0x3e, 0x07, 0x7d, 0xb0, 0x4c, 0xa4, 0x0f, 0xc6, 0xfd, 0x8e, + 0x65, 0xda, 0x74, 0x3d, 0xd2, 0x48, 0x09, 0x01, 0xdc, 0x43, 0x6c, 0xf2, 0x8b, 0xcb, 0x65, 0xf9, + 0x95, 0x22, 0x02, 0x09, 0x34, 0x9c, 0x0b, 0x35, 0x8c, 0xd7, 0x60, 0x21, 0xc9, 0xb4, 0xd2, 0x51, + 0x0b, 0x72, 0x62, 0xef, 0x84, 0x5e, 0x6a, 0x6c, 0x07, 0x51, 0x68, 0xd8, 0x85, 0x99, 0xd8, 0x82, + 0xd0, 0x19, 0xb7, 0x11, 0xe5, 0x3f, 0xe5, 0x04, 0xfd, 0x3f, 0xe8, 0x6c, 0x34, 0x50, 0x6e, 0xb3, + 0x7d, 0xe8, 0x8b, 0x71, 0xe3, 0x60, 0x6c, 0xdb, 0xad, 0xd1, 0x80, 0x12, 0x81, 0xc2, 0x4d, 0xab, + 0x63, 0xb8, 0x5d, 0xd3, 0x36, 0x2c, 0x93, 0x49, 0x51, 0xe8, 0x24, 0x0a, 0xc2, 0xbf, 0x89, 0x28, + 0x4d, 0xda, 0xe3, 0x3e, 0x95, 0xa6, 0xed, 0x5b, 0x69, 0xda, 0x4b, 0x94, 0x86, 0x7f, 0x14, 0x8a, + 0xd8, 0xbf, 0xa2, 0x12, 0xf1, 0xfb, 0x30, 0xdb, 0x8d, 0xad, 0xec, 0x2d, 0x6a, 0xd9, 0xfe, 0x4c, + 0xa0, 0xe3, 0xb7, 0x42, 0x91, 0x0b, 0xc8, 0x64, 0x91, 0x9f, 0x38, 0x06, 0xc5, 0xe0, 0x63, 0x0e, + 0x2a, 0x41, 0xfe, 0xd2, 0x0d, 0xf2, 0xc3, 0x8b, 0x64, 0x75, 0x2e, 0x85, 0xca, 0x50, 0x68, 0x5f, + 0x5c, 0xb9, 0x2a, 0x66, 0xda, 0xf2, 0xa7, 0x59, 0x3f, 0x68, 0xba, 0xe8, 0xbb, 0x90, 0x95, 0x91, + 0x70, 0x21, 0xbc, 0x4c, 0xf4, 0x9b, 0x49, 0xed, 0xf0, 0x2e, 0xb8, 0xe4, 0x0a, 0xa7, 0xde, 0xd1, + 0xd0, 0x75, 0x28, 0x09, 0xa0, 0xea, 0x8b, 0x1e, 0x49, 0xb6, 0x27, 0x63, 0x94, 0xde, 0xdc, 0x63, + 0x35, 0x42, 0xef, 0x1c, 0x64, 0x25, 0x83, 0x0b, 0x89, 0x84, 0x65, 0xc2, 0x6d, 0x62, 0x9d, 0x62, + 0x9c, 0x42, 0xef, 0x81, 0x7e, 0xcb, 0x30, 0x2d, 0x14, 0xc9, 0x97, 0x22, 0xed, 0xcc, 0xda, 0x42, + 0x12, 0x1c, 0x39, 0xf6, 0x42, 0xd0, 0x95, 0x3d, 0x9c, 0xec, 0x0b, 0xf9, 0xdb, 0xab, 0xbb, 0x17, + 0x82, 0x93, 0x6f, 0xc8, 0xde, 0xa1, 0xdf, 0x9d, 0x40, 0x6f, 0xc6, 0x8f, 0x4a, 0x34, 0x33, 0x6a, + 0xf5, 0xbd, 0x96, 0x03, 0x82, 0xeb, 0x50, 0x8a, 0x74, 0x06, 0xa2, 0x62, 0xdd, 0xdd, 0xd6, 0x88, + 0x8a, 0x75, 0x42, 0x3b, 0x01, 0xa7, 0xd0, 0x65, 0x28, 0xf0, 0x2c, 0x93, 0x07, 0x5b, 0xf4, 0x46, + 0x32, 0x99, 0x8c, 0x24, 0x11, 0xb5, 0x23, 0x93, 0x17, 0x03, 0x42, 0xdf, 0x87, 0xe2, 0x65, 0xca, + 0x94, 0x27, 0x3e, 0x9c, 0x74, 0xe5, 0x13, 0x24, 0x15, 0x0f, 0x07, 0x38, 0x85, 0x3e, 0x16, 0x09, + 0x6f, 0xdc, 0x13, 0xa1, 0xc6, 0x1e, 0x1e, 0x27, 0xb8, 0xd7, 0xe2, 0xde, 0x08, 0x3e, 0xe5, 0xe5, + 0x3b, 0xfe, 0xe7, 0xe3, 0x55, 0x83, 0x19, 0xe8, 0x06, 0xcc, 0x0a, 0x96, 0x83, 0xef, 0xcb, 0x31, + 0xd3, 0xdc, 0xf5, 0x31, 0x3b, 0x66, 0x9a, 0xbb, 0x3f, 0x6a, 0xe3, 0x54, 0xfb, 0xce, 0xd3, 0xe7, + 0xf5, 0xd4, 0x67, 0xcf, 0xeb, 0xa9, 0xcf, 0x9f, 0xd7, 0xb5, 0x9f, 0x6e, 0xd7, 0xb5, 0x3f, 0x6c, + 0xd7, 0xb5, 0x27, 0xdb, 0x75, 0xed, 0xe9, 0x76, 0x5d, 0xfb, 0xd7, 0x76, 0x5d, 0xfb, 0xf7, 0x76, + 0x3d, 0xf5, 0xf9, 0x76, 0x5d, 0xfb, 0xe4, 0x45, 0x3d, 0xf5, 0xf4, 0x45, 0x3d, 0xf5, 0xd9, 0x8b, + 0x7a, 0xea, 0xc7, 0xc7, 0x5f, 0x5e, 0x83, 0x49, 0x6f, 0x93, 0x13, 0x7f, 0xa7, 0xff, 0x1b, 0x00, + 0x00, 0xff, 0xff, 0x55, 0x29, 0x90, 0x4c, 0x00, 0x20, 0x00, 0x00, } func (x Direction) String() string { @@ -4533,23 +4559,24 @@ func (this *DetectedFieldsRequest) Equal(that interface{}) bool { } else if this == nil { return false } - if that1.Start == nil { - if this.Start != nil { - return false - } - } else if !this.Start.Equal(*that1.Start) { + if !this.Start.Equal(that1.Start) { return false } - if that1.End == nil { - if this.End != nil { - return false - } - } else if !this.End.Equal(*that1.End) { + if !this.End.Equal(that1.End) { return false } if this.Query != that1.Query { return false } + if this.LineLimit != that1.LineLimit { + return false + } + if this.FieldLimit != that1.FieldLimit { + return false + } + if this.Step != that1.Step { + return false + } return true } func (this *DetectedFieldsResponse) Equal(that interface{}) bool { @@ -5287,11 +5314,14 @@ func (this *DetectedFieldsRequest) GoString() string { if this == nil { return "nil" } - s := make([]string, 0, 7) + s := make([]string, 0, 10) s = append(s, "&logproto.DetectedFieldsRequest{") s = append(s, "Start: "+fmt.Sprintf("%#v", this.Start)+",\n") s = append(s, "End: "+fmt.Sprintf("%#v", this.End)+",\n") s = append(s, "Query: "+fmt.Sprintf("%#v", this.Query)+",\n") + s = append(s, "LineLimit: "+fmt.Sprintf("%#v", this.LineLimit)+",\n") + s = append(s, "FieldLimit: "+fmt.Sprintf("%#v", this.FieldLimit)+",\n") + s = append(s, "Step: "+fmt.Sprintf("%#v", this.Step)+",\n") s = append(s, "}") return strings.Join(s, "") } @@ -7962,6 +7992,21 @@ func (m *DetectedFieldsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Step != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.Step)) + i-- + dAtA[i] = 0x30 + } + if m.FieldLimit != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.FieldLimit)) + i-- + dAtA[i] = 0x28 + } + if m.LineLimit != 0 { + i = encodeVarintLogproto(dAtA, i, uint64(m.LineLimit)) + i-- + dAtA[i] = 0x20 + } if len(m.Query) > 0 { i -= len(m.Query) copy(dAtA[i:], m.Query) @@ -7969,26 +8014,22 @@ func (m *DetectedFieldsRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x1a } - if m.End != nil { - n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.End):]) - if err21 != nil { - return 0, err21 - } - i -= n21 - i = encodeVarintLogproto(dAtA, i, uint64(n21)) - i-- - dAtA[i] = 0x12 + n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.End, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.End):]) + if err21 != nil { + return 0, err21 } - if m.Start != nil { - n22, err22 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start):]) - if err22 != nil { - return 0, err22 - } - i -= n22 - i = encodeVarintLogproto(dAtA, i, uint64(n22)) - i-- - dAtA[i] = 0xa + i -= n21 + i = encodeVarintLogproto(dAtA, i, uint64(n21)) + i-- + dAtA[i] = 0x12 + n22, err22 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Start, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Start):]) + if err22 != nil { + return 0, err22 } + i -= n22 + i = encodeVarintLogproto(dAtA, i, uint64(n22)) + i-- + dAtA[i] = 0xa return len(dAtA) - i, nil } @@ -9087,18 +9128,23 @@ func (m *DetectedFieldsRequest) Size() (n int) { } var l int _ = l - if m.Start != nil { - l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.Start) - n += 1 + l + sovLogproto(uint64(l)) - } - if m.End != nil { - l = github_com_gogo_protobuf_types.SizeOfStdTime(*m.End) - n += 1 + l + sovLogproto(uint64(l)) - } + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.Start) + n += 1 + l + sovLogproto(uint64(l)) + l = github_com_gogo_protobuf_types.SizeOfStdTime(m.End) + n += 1 + l + sovLogproto(uint64(l)) l = len(m.Query) if l > 0 { n += 1 + l + sovLogproto(uint64(l)) } + if m.LineLimit != 0 { + n += 1 + sovLogproto(uint64(m.LineLimit)) + } + if m.FieldLimit != 0 { + n += 1 + sovLogproto(uint64(m.FieldLimit)) + } + if m.Step != 0 { + n += 1 + sovLogproto(uint64(m.Step)) + } return n } @@ -9794,9 +9840,12 @@ func (this *DetectedFieldsRequest) String() string { return "nil" } s := strings.Join([]string{`&DetectedFieldsRequest{`, - `Start:` + strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1) + `,`, - `End:` + strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1) + `,`, + `Start:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Start), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`, + `End:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.End), "Timestamp", "types.Timestamp", 1), `&`, ``, 1) + `,`, `Query:` + fmt.Sprintf("%v", this.Query) + `,`, + `LineLimit:` + fmt.Sprintf("%v", this.LineLimit) + `,`, + `FieldLimit:` + fmt.Sprintf("%v", this.FieldLimit) + `,`, + `Step:` + fmt.Sprintf("%v", this.Step) + `,`, `}`, }, "") return s @@ -15834,10 +15883,7 @@ func (m *DetectedFieldsRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.Start == nil { - m.Start = new(time.Time) - } - if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.Start, dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.Start, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -15870,10 +15916,7 @@ func (m *DetectedFieldsRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.End == nil { - m.End = new(time.Time) - } - if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(m.End, dAtA[iNdEx:postIndex]); err != nil { + if err := github_com_gogo_protobuf_types.StdTimeUnmarshal(&m.End, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex @@ -15909,6 +15952,63 @@ func (m *DetectedFieldsRequest) Unmarshal(dAtA []byte) error { } m.Query = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LineLimit", wireType) + } + m.LineLimit = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.LineLimit |= uint32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field FieldLimit", wireType) + } + m.FieldLimit = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.FieldLimit |= uint32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 6: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Step", wireType) + } + m.Step = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowLogproto + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Step |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipLogproto(dAtA[iNdEx:]) diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 2bd45890dd447..39d64c30d8185 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -426,13 +426,16 @@ message Volume { message DetectedFieldsRequest { google.protobuf.Timestamp start = 1 [ (gogoproto.stdtime) = true, - (gogoproto.nullable) = true + (gogoproto.nullable) = false ]; google.protobuf.Timestamp end = 2 [ (gogoproto.stdtime) = true, - (gogoproto.nullable) = true + (gogoproto.nullable) = false ]; string query = 3; // Naming this query instead of match because this should be with queryrangebase.Request interface + uint32 lineLimit = 4; + uint32 fieldLimit = 5; + int64 step = 6; } message DetectedFieldsResponse { diff --git a/pkg/querier/http.go b/pkg/querier/http.go index e85fa2045ae9e..614fc5e46104d 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -382,6 +382,9 @@ func (q *QuerierAPI) DetectedFieldsHandler(ctx context.Context, req *logproto.De return nil, err } if resp == nil { // Some stores don't implement this + level.Debug(spanlogger.FromContext(ctx)).Log( + "msg", "queried store for detected fields that does not support it, no response from querier.DetectedFields", + ) return &logproto.DetectedFieldsResponse{ Fields: []*logproto.DetectedField{}, }, nil diff --git a/pkg/querier/multi_tenant_querier.go b/pkg/querier/multi_tenant_querier.go index 76c387c2f64bb..fb90cb0ad4472 100644 --- a/pkg/querier/multi_tenant_querier.go +++ b/pkg/querier/multi_tenant_querier.go @@ -3,11 +3,13 @@ package querier import ( "context" "fmt" + "strings" "github.com/grafana/loki/v3/pkg/querier/plan" "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/go-kit/log" + "github.com/go-kit/log/level" "github.com/grafana/dskit/user" "github.com/prometheus/prometheus/model/labels" @@ -29,12 +31,14 @@ const ( // MultiTenantQuerier is able to query across different tenants. type MultiTenantQuerier struct { Querier + logger log.Logger } // NewMultiTenantQuerier returns a new querier able to query across different tenants. -func NewMultiTenantQuerier(querier Querier, _ log.Logger) *MultiTenantQuerier { +func NewMultiTenantQuerier(querier Querier, logger log.Logger) *MultiTenantQuerier { return &MultiTenantQuerier{ Querier: querier, + logger: logger, } } @@ -258,6 +262,26 @@ func (q *MultiTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeReq return merged, nil } +func (q *MultiTenantQuerier) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + tenantIDs, err := tenant.TenantIDs(ctx) + if err != nil { + return nil, err + } + + if len(tenantIDs) == 1 { + return q.Querier.DetectedFields(ctx, req) + } + + level.Debug(q.logger).Log( + "msg", "detected fields requested for multiple tenants, but not yet supported", + "tenantIDs", strings.Join(tenantIDs, ","), + ) + + return &logproto.DetectedFieldsResponse{ + Fields: []*logproto.DetectedField{}, + }, nil +} + func (q *MultiTenantQuerier) DetectedLabels(ctx context.Context, req *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { // TODO(shantanu) tenantIDs, err := tenant.TenantID(ctx) @@ -308,7 +332,7 @@ func replaceMatchers(expr syntax.Expr, matchers []*labels.Matcher) syntax.Expr { } // See https://github.com/grafana/mimir/blob/114ab88b50638a2047e2ca2a60640f6ca6fe8c17/pkg/querier/tenantfederation/tenant_federation.go#L29-L69 -// filterValuesByMatchers applies matchers to inputed `idLabelName` and +// filterValuesByMatchers applies matchers to inputted `idLabelName` and // `ids`. A set of matched IDs is returned and also all label matchers not // targeting the `idLabelName` label. // diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index 11a153e5a61a2..b0109ae8986ca 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -3,12 +3,19 @@ package querier import ( "context" "flag" + "fmt" "net/http" + "sort" + "strconv" "time" + "github.com/axiomhq/hyperloglog" + "github.com/dustin/go-humanize" "github.com/go-kit/log" "github.com/opentracing/opentracing-go" + logql_log "github.com/grafana/loki/v3/pkg/logql/log" + "github.com/grafana/loki/v3/pkg/logqlmodel" "github.com/grafana/loki/v3/pkg/storage/stores/index" "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" "github.com/grafana/loki/v3/pkg/storage/stores/shipper/indexshipper/indexgateway" @@ -900,18 +907,6 @@ func (q *SingleTenantQuerier) Volume(ctx context.Context, req *logproto.VolumeRe return seriesvolume.Merge(responses, req.Limit), nil } -func (q *SingleTenantQuerier) DetectedFields(_ context.Context, _ *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { - return &logproto.DetectedFieldsResponse{ - Fields: []*logproto.DetectedField{ - { - Label: "foo", - Type: logproto.DetectedFieldString, - Cardinality: 1, - }, - }, - }, nil -} - func (q *SingleTenantQuerier) DetectedLabels(_ context.Context, _ *logproto.DetectedLabelsRequest) (*logproto.DetectedLabelsResponse, error) { return &logproto.DetectedLabelsResponse{ DetectedLabels: []*logproto.DetectedLabel{ @@ -922,3 +917,222 @@ func (q *SingleTenantQuerier) DetectedLabels(_ context.Context, _ *logproto.Dete }, }, nil } + +func (q *SingleTenantQuerier) DetectedFields(ctx context.Context, req *logproto.DetectedFieldsRequest) (*logproto.DetectedFieldsResponse, error) { + expr, err := syntax.ParseLogSelector(req.Query, true) + if err != nil { + return nil, err + } + params := logql.SelectLogParams{ + QueryRequest: &logproto.QueryRequest{ + Start: req.Start, + End: req.End, + Limit: req.LineLimit, + Direction: logproto.BACKWARD, + Selector: expr.String(), + Plan: &plan.QueryPlan{ + AST: expr, + }, + }, + } + + iters, err := q.SelectLogs(ctx, params) + if err != nil { + return nil, err + } + + //TODO(twhitney): converting from a step to a duration should be abstracted and reused, + // doing this in a few places now. + streams, err := streamsForFieldDetection(iters, req.LineLimit, time.Duration(req.Step*1e6)) + if err != nil { + return nil, err + } + + detectedFields := parseDetectedFields(ctx, req.FieldLimit, streams) + + fields := make([]*logproto.DetectedField, len(detectedFields)) + fieldCount := 0 + for k, v := range detectedFields { + fields[fieldCount] = &logproto.DetectedField{ + Label: k, + Type: v.fieldType, + Cardinality: v.Estimate(), + } + + fieldCount++ + } + + return &logproto.DetectedFieldsResponse{ + Fields: fields, + }, nil +} + +type parsedFields struct { + sketch *hyperloglog.Sketch + isTypeDetected bool + fieldType logproto.DetectedFieldType +} + +func newParsedFields() *parsedFields { + return &parsedFields{ + sketch: hyperloglog.New(), + isTypeDetected: false, + fieldType: logproto.DetectedFieldString, + } +} + +func (p *parsedFields) Insert(value string) { + p.sketch.Insert([]byte(value)) +} + +func (p *parsedFields) Estimate() uint64 { + return p.sketch.Estimate() +} + +func (p *parsedFields) DetermineType(value string) { + p.fieldType = determineType(value) + p.isTypeDetected = true +} + +func determineType(value string) logproto.DetectedFieldType { + if _, err := strconv.ParseInt(value, 10, 64); err == nil { + return logproto.DetectedFieldInt + } + + if _, err := strconv.ParseFloat(value, 64); err == nil { + return logproto.DetectedFieldFloat + } + + if _, err := strconv.ParseBool(value); err == nil { + return logproto.DetectedFieldBoolean + } + + if _, err := time.ParseDuration(value); err == nil { + return logproto.DetectedFieldDuration + } + + if _, err := humanize.ParseBytes(value); err == nil { + return logproto.DetectedFieldBytes + } + + return logproto.DetectedFieldString +} + +func parseDetectedFields(ctx context.Context, limit uint32, streams logqlmodel.Streams) map[string]*parsedFields { + detectedFields := make(map[string]*parsedFields, limit) + fieldCount := uint32(0) + + for _, stream := range streams { + + level.Debug(spanlogger.FromContext(ctx)).Log( + "detected_fields", "true", + "msg", fmt.Sprintf("looking for detected fields in stream %d with %d lines", stream.Hash, len(stream.Entries))) + + for _, entry := range stream.Entries { + detected := parseLine(entry.Line) + for k, vals := range detected { + if fieldCount >= limit { + return detectedFields + } + + if _, ok := detectedFields[k]; !ok { + detectedFields[k] = newParsedFields() + } + + for _, v := range vals { + parsedFields := detectedFields[k] + if !parsedFields.isTypeDetected { + parsedFields.DetermineType(v) + } + + parsedFields.Insert(v) + } + + level.Debug(spanlogger.FromContext(ctx)).Log( + "detected_fields", "true", + "msg", fmt.Sprintf("detected field %s with %d values", k, len(vals))) + + fieldCount++ + } + } + } + + return detectedFields +} + +func parseLine(line string) map[string][]string { + logFmtParser := logql_log.NewLogfmtParser(true, false) + jsonParser := logql_log.NewJSONParser() + + lbls := logql_log.NewBaseLabelsBuilder().ForLabels(labels.EmptyLabels(), 0) + _, logfmtSuccess := logFmtParser.Process(0, []byte(line), lbls) + if !logfmtSuccess || lbls.HasErr() { + lbls.Reset() + _, jsonSuccess := jsonParser.Process(0, []byte(line), lbls) + if !jsonSuccess || lbls.HasErr() { + return map[string][]string{} + } + } + + parsedLabels := map[string]map[string]struct{}{} + for _, lbl := range lbls.LabelsResult().Labels() { + if values, ok := parsedLabels[lbl.Name]; ok { + values[lbl.Value] = struct{}{} + } else { + parsedLabels[lbl.Name] = map[string]struct{}{lbl.Value: {}} + } + } + + result := make(map[string][]string, len(parsedLabels)) + for lbl, values := range parsedLabels { + vals := make([]string, 0, len(values)) + for v := range values { + vals = append(vals, v) + } + result[lbl] = vals + } + + return result +} + +// readStreams reads the streams from the iterator and returns them sorted. +// If categorizeLabels is true, the stream labels contains just the stream labels and entries inside each stream have their +// structuredMetadata and parsed fields populated with structured metadata labels plus the parsed labels respectively. +// Otherwise, the stream labels are the whole series labels including the stream labels, structured metadata labels and parsed labels. +func streamsForFieldDetection(i iter.EntryIterator, size uint32, interval time.Duration) (logqlmodel.Streams, error) { + streams := map[string]*logproto.Stream{} + respSize := uint32(0) + // lastEntry should be a really old time so that the first comparison is always true, we use a negative + // value here because many unit tests start at time.Unix(0,0) + lastEntry := time.Unix(-100, 0) + for respSize < size && i.Next() { + streamLabels, entry := i.Labels(), i.Entry() + + // Always going backward + shouldOutput := entry.Timestamp.Equal(lastEntry.Add(-interval)) || + entry.Timestamp.Before(lastEntry.Add(-interval)) + + // If step == 0 output every line. + // If lastEntry.Unix < 0 this is the first pass through the loop and we should output the line. + // Then check to see if the entry is equal to, or past a forward step + if interval == 0 || lastEntry.Unix() < 0 || shouldOutput { + stream, ok := streams[streamLabels] + if !ok { + stream = &logproto.Stream{ + Labels: streamLabels, + } + streams[streamLabels] = stream + } + stream.Entries = append(stream.Entries, entry) + lastEntry = i.Entry().Timestamp + respSize++ + } + } + + result := make(logqlmodel.Streams, 0, len(streams)) + for _, stream := range streams { + result = append(result, *stream) + } + sort.Sort(result) + return result, i.Error() +} diff --git a/pkg/querier/queryrange/codec.go b/pkg/querier/queryrange/codec.go index c657730b2f483..b9fe39c3a7145 100644 --- a/pkg/querier/queryrange/codec.go +++ b/pkg/querier/queryrange/codec.go @@ -469,6 +469,11 @@ func (Codec) DecodeRequest(_ context.Context, r *http.Request, _ []string) (quer return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) } + _, err = syntax.ParseExpr(req.Query) + if err != nil { + return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) + } + return &DetectedFieldsRequest{ DetectedFieldsRequest: *req, path: r.URL.Path, @@ -520,7 +525,7 @@ func (Codec) DecodeHTTPGrpcRequest(ctx context.Context, r *httpgrpc.HTTPRequest) ctx = httpreq.InjectQueryTags(ctx, queryTags) } - // Add disable pipleine wrappers + // Add disable pipeline wrappers if disableWrappers := httpReq.Header.Get(httpreq.LokiDisablePipelineWrappersHeader); disableWrappers != "" { httpreq.InjectHeader(ctx, httpreq.LokiDisablePipelineWrappersHeader, disableWrappers) } @@ -1992,8 +1997,8 @@ type DetectedFieldsRequest struct { func NewDetectedFieldsRequest(start, end time.Time, query, path string) *DetectedFieldsRequest { return &DetectedFieldsRequest{ DetectedFieldsRequest: logproto.DetectedFieldsRequest{ - Start: &start, - End: &end, + Start: start, + End: end, Query: query, }, path: path, @@ -2005,19 +2010,19 @@ func (r *DetectedFieldsRequest) AsProto() *logproto.DetectedFieldsRequest { } func (r *DetectedFieldsRequest) GetEnd() time.Time { - return *r.End + return r.End } func (r *DetectedFieldsRequest) GetEndTs() time.Time { - return *r.End + return r.End } func (r *DetectedFieldsRequest) GetStart() time.Time { - return *r.Start + return r.Start } func (r *DetectedFieldsRequest) GetStartTs() time.Time { - return *r.Start + return r.Start } func (r *DetectedFieldsRequest) GetStep() int64 { @@ -2030,8 +2035,8 @@ func (r *DetectedFieldsRequest) Path() string { func (r *DetectedFieldsRequest) WithStartEnd(s, e time.Time) queryrangebase.Request { clone := *r - clone.Start = &s - clone.End = &e + clone.Start = s + clone.End = e return &clone } diff --git a/pkg/querier/queryrange/marshal.go b/pkg/querier/queryrange/marshal.go index a47b51607c26a..cc8602d1ce87a 100644 --- a/pkg/querier/queryrange/marshal.go +++ b/pkg/querier/queryrange/marshal.go @@ -214,6 +214,8 @@ func QueryResponseUnwrap(res *QueryResponse) (queryrangebase.Response, error) { return concrete.QuantileSketches, nil case *QueryResponse_DetectedLabels: return concrete.DetectedLabels, nil + case *QueryResponse_DetectedFields: + return concrete.DetectedFields, nil default: return nil, fmt.Errorf("unsupported QueryResponse response type, got (%T)", res.Response) } @@ -251,6 +253,8 @@ func QueryResponseWrap(res queryrangebase.Response) (*QueryResponse, error) { p.Response = &QueryResponse_ShardsResponse{response} case *DetectedLabelsResponse: p.Response = &QueryResponse_DetectedLabels{response} + case *DetectedFieldsResponse: + p.Response = &QueryResponse_DetectedFields{response} default: return nil, fmt.Errorf("invalid response format, got (%T)", res) } @@ -343,6 +347,10 @@ func (Codec) QueryRequestUnwrap(ctx context.Context, req *QueryRequest) (queryra return &DetectedLabelsRequest{ DetectedLabelsRequest: *concrete.DetectedLabels, }, ctx, nil + case *QueryRequest_DetectedFields: + return &DetectedFieldsRequest{ + DetectedFieldsRequest: *concrete.DetectedFields, + }, ctx, nil default: return nil, ctx, fmt.Errorf("unsupported request type while unwrapping, got (%T)", req.Request) } @@ -371,6 +379,8 @@ func (Codec) QueryRequestWrap(ctx context.Context, r queryrangebase.Request) (*Q result.Request = &QueryRequest_ShardsRequest{ShardsRequest: req} case *DetectedLabelsRequest: result.Request = &QueryRequest_DetectedLabels{DetectedLabels: &req.DetectedLabelsRequest} + case *DetectedFieldsRequest: + result.Request = &QueryRequest_DetectedFields{DetectedFields: &req.DetectedFieldsRequest} default: return nil, fmt.Errorf("unsupported request type while wrapping, got (%T)", r) } diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 83657d8a5964c..3d64c50231d02 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -374,7 +374,7 @@ func (r roundTripper) Do(ctx context.Context, req base.Request) (base.Response, "msg", "executing query", "type", "detected fields", "query", op.Query, - "length", op.End.Sub(*op.Start), + "length", op.End.Sub(op.Start), "start", op.Start, "end", op.End, ) From 824f5aa20aaaeaf99539dae8b10754ed812185e5 Mon Sep 17 00:00:00 2001 From: Ed Welch Date: Tue, 2 Apr 2024 16:42:50 -0400 Subject: [PATCH 54/54] chore: fix submodule for v3 (#12438) Signed-off-by: Edward Welch --- .../stages/structuredmetadata_test.go | 3 +- clients/pkg/promtail/client/client_test.go | 3 +- clients/pkg/promtail/promtail_wal_test.go | 3 +- go.mod | 4 +- .../loki_micro_services_delete_test.go | 3 +- pkg/chunkenc/memchunk_test.go | 3 +- pkg/distributor/distributor_test.go | 3 +- pkg/distributor/tee_test.go | 2 +- pkg/distributor/validator_test.go | 3 +- pkg/loghttp/push/otlp.go | 2 +- pkg/loghttp/push/otlp_test.go | 3 +- pkg/loghttp/push/push.go | 2 +- pkg/logproto/alias.go | 2 +- pkg/logproto/logproto.pb.go | 328 +++++++++--------- pkg/logproto/logproto.proto | 4 +- pkg/logqlmodel/logqlmodel.go | 2 +- pkg/push/go.mod | 2 +- pkg/push/push.pb.go | 69 ++-- pkg/push/push.proto | 2 +- pkg/querier/queryrange/queryrange.pb.go | 228 ++++++------ pkg/querier/queryrange/queryrange.proto | 2 +- pkg/querier/queryrange/volume_test.go | 3 +- pkg/storage/bloom/v1/bloom_tokenizer_test.go | 3 +- pkg/storage/store_test.go | 3 +- .../grafana/loki/{v3 => }/pkg/push/LICENSE | 0 .../grafana/loki/{v3 => }/pkg/push/push.pb.go | 69 ++-- .../grafana/loki/{v3 => }/pkg/push/push.proto | 2 +- .../loki/{v3 => }/pkg/push/timestamp.go | 0 .../grafana/loki/{v3 => }/pkg/push/types.go | 0 vendor/modules.txt | 6 +- 30 files changed, 384 insertions(+), 375 deletions(-) rename vendor/github.com/grafana/loki/{v3 => }/pkg/push/LICENSE (100%) rename vendor/github.com/grafana/loki/{v3 => }/pkg/push/push.pb.go (92%) rename vendor/github.com/grafana/loki/{v3 => }/pkg/push/push.proto (95%) rename vendor/github.com/grafana/loki/{v3 => }/pkg/push/timestamp.go (100%) rename vendor/github.com/grafana/loki/{v3 => }/pkg/push/types.go (100%) diff --git a/clients/pkg/logentry/stages/structuredmetadata_test.go b/clients/pkg/logentry/stages/structuredmetadata_test.go index 6bcb7a1ee9d25..2b48c641ef81d 100644 --- a/clients/pkg/logentry/stages/structuredmetadata_test.go +++ b/clients/pkg/logentry/stages/structuredmetadata_test.go @@ -8,7 +8,8 @@ import ( "github.com/prometheus/common/model" "github.com/stretchr/testify/require" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) diff --git a/clients/pkg/promtail/client/client_test.go b/clients/pkg/promtail/client/client_test.go index b775bdb8eb8a8..ea3039879605b 100644 --- a/clients/pkg/promtail/client/client_test.go +++ b/clients/pkg/promtail/client/client_test.go @@ -22,8 +22,9 @@ import ( "github.com/grafana/loki/v3/clients/pkg/promtail/api" "github.com/grafana/loki/v3/clients/pkg/promtail/utils" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/logproto" - "github.com/grafana/loki/v3/pkg/push" lokiflag "github.com/grafana/loki/v3/pkg/util/flagext" ) diff --git a/clients/pkg/promtail/promtail_wal_test.go b/clients/pkg/promtail/promtail_wal_test.go index f2013ca93e7b8..dfc7ce7273453 100644 --- a/clients/pkg/promtail/promtail_wal_test.go +++ b/clients/pkg/promtail/promtail_wal_test.go @@ -27,7 +27,8 @@ import ( "github.com/grafana/loki/v3/clients/pkg/promtail/utils" "github.com/grafana/loki/v3/clients/pkg/promtail/wal" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" + util_log "github.com/grafana/loki/v3/pkg/util/log" ) diff --git a/go.mod b/go.mod index 379a29274ff4f..0d0659a92220b 100644 --- a/go.mod +++ b/go.mod @@ -125,7 +125,7 @@ require ( github.com/fsnotify/fsnotify v1.7.0 github.com/gogo/googleapis v1.4.0 github.com/grafana/jsonparser v0.0.0-20240209175146-098958973a2d - github.com/grafana/loki/v3/pkg/push v0.0.0-20231124142027-e52380921608 + github.com/grafana/loki/pkg/push v0.0.0-20231124142027-e52380921608 github.com/heroku/x v0.0.61 github.com/influxdata/tdigest v0.0.2-0.20210216194612-fc98d27c9e8b github.com/prometheus/alertmanager v0.27.0 @@ -358,4 +358,4 @@ replace github.com/hashicorp/memberlist => github.com/grafana/memberlist v0.3.1- // Insist on the optimised version of grafana/regexp replace github.com/grafana/regexp => github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd -replace github.com/grafana/loki/v3/pkg/push => ./pkg/push +replace github.com/grafana/loki/pkg/push => ./pkg/push diff --git a/integration/loki_micro_services_delete_test.go b/integration/loki_micro_services_delete_test.go index 0ddce4aa11cbd..ce83cdb4d9f5a 100644 --- a/integration/loki_micro_services_delete_test.go +++ b/integration/loki_micro_services_delete_test.go @@ -16,9 +16,10 @@ import ( "github.com/grafana/loki/v3/integration/client" "github.com/grafana/loki/v3/integration/cluster" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql/syntax" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/storage" ) diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index 06c137823ac46..8fc3eaab5ab34 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -18,13 +18,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/chunkenc/testdata" "github.com/grafana/loki/v3/pkg/iter" "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql/log" "github.com/grafana/loki/v3/pkg/logql/syntax" "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/storage/chunk" "github.com/grafana/loki/v3/pkg/util/filter" ) diff --git a/pkg/distributor/distributor_test.go b/pkg/distributor/distributor_test.go index 09ee068c2d53d..b51c545166f25 100644 --- a/pkg/distributor/distributor_test.go +++ b/pkg/distributor/distributor_test.go @@ -30,12 +30,13 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/health/grpc_health_v1" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/ingester" "github.com/grafana/loki/v3/pkg/ingester/client" loghttp_push "github.com/grafana/loki/v3/pkg/loghttp/push" "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql/syntax" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/runtime" "github.com/grafana/loki/v3/pkg/util/constants" fe "github.com/grafana/loki/v3/pkg/util/flagext" diff --git a/pkg/distributor/tee_test.go b/pkg/distributor/tee_test.go index ece0e97c1ce69..f953e09b75111 100644 --- a/pkg/distributor/tee_test.go +++ b/pkg/distributor/tee_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/mock" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" ) type mockedTee struct { diff --git a/pkg/distributor/validator_test.go b/pkg/distributor/validator_test.go index 75fee909000cc..9e51099dfad38 100644 --- a/pkg/distributor/validator_test.go +++ b/pkg/distributor/validator_test.go @@ -11,9 +11,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/assert" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql/syntax" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/validation" ) diff --git a/pkg/loghttp/push/otlp.go b/pkg/loghttp/push/otlp.go index 2fa645ff4581d..a361bbbf196de 100644 --- a/pkg/loghttp/push/otlp.go +++ b/pkg/loghttp/push/otlp.go @@ -18,7 +18,7 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/plog/plogotlp" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" "github.com/grafana/loki/v3/pkg/logproto" loki_util "github.com/grafana/loki/v3/pkg/util" diff --git a/pkg/loghttp/push/otlp_test.go b/pkg/loghttp/push/otlp_test.go index d9147e061db3a..bcdeb18d17069 100644 --- a/pkg/loghttp/push/otlp_test.go +++ b/pkg/loghttp/push/otlp_test.go @@ -13,8 +13,9 @@ import ( "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/logproto" - "github.com/grafana/loki/v3/pkg/push" ) func TestOTLPToLokiPushRequest(t *testing.T) { diff --git a/pkg/loghttp/push/push.go b/pkg/loghttp/push/push.go index 74c8a20198f2c..c63b32c6111bb 100644 --- a/pkg/loghttp/push/push.go +++ b/pkg/loghttp/push/push.go @@ -12,7 +12,7 @@ import ( "github.com/go-kit/log/level" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" "github.com/dustin/go-humanize" "github.com/go-kit/log" diff --git a/pkg/logproto/alias.go b/pkg/logproto/alias.go index 294b19e85dcad..ab378fcbd0e81 100644 --- a/pkg/logproto/alias.go +++ b/pkg/logproto/alias.go @@ -3,7 +3,7 @@ package logproto import ( "google.golang.org/grpc" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" ) // Aliases to avoid renaming all the imports of logproto diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 5058beebfaf24..3459fa836529b 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -12,10 +12,10 @@ import ( proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + _ "github.com/grafana/loki/pkg/push" + github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" github_com_grafana_loki_v3_pkg_logql_syntax "github.com/grafana/loki/v3/pkg/logql/syntax" stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - _ "github.com/grafana/loki/v3/pkg/push" - github_com_grafana_loki_v3_pkg_push "github.com/grafana/loki/v3/pkg/push" github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" github_com_prometheus_common_model "github.com/prometheus/common/model" grpc "google.golang.org/grpc" @@ -490,8 +490,8 @@ func (m *Delete) GetEnd() int64 { } type QueryResponse struct { - Streams []github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,1,rep,name=streams,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"streams,omitempty"` - Stats stats.Ingester `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` + Streams []github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,1,rep,name=streams,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"streams,omitempty"` + Stats stats.Ingester `protobuf:"bytes,2,opt,name=stats,proto3" json:"stats"` } func (m *QueryResponse) Reset() { *m = QueryResponse{} } @@ -935,8 +935,8 @@ func (m *TailRequest) GetStart() time.Time { } type TailResponse struct { - Stream *github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,1,opt,name=stream,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"stream,omitempty"` - DroppedStreams []*DroppedStream `protobuf:"bytes,2,rep,name=droppedStreams,proto3" json:"droppedStreams,omitempty"` + Stream *github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,1,opt,name=stream,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"stream,omitempty"` + DroppedStreams []*DroppedStream `protobuf:"bytes,2,rep,name=droppedStreams,proto3" json:"droppedStreams,omitempty"` } func (m *TailResponse) Reset() { *m = TailResponse{} } @@ -2901,162 +2901,162 @@ func init() { func init() { proto.RegisterFile("pkg/logproto/logproto.proto", fileDescriptor_c28a5f14f1f4c79a) } var fileDescriptor_c28a5f14f1f4c79a = []byte{ - // 2475 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x39, 0xcb, 0x8f, 0x13, 0xc9, - 0xf9, 0x6e, 0xbb, 0xfd, 0xfa, 0xec, 0x19, 0x86, 0x1a, 0x33, 0x58, 0x5e, 0xd6, 0x1e, 0x4a, 0xbf, - 0x85, 0xf9, 0x11, 0xd6, 0x5e, 0x86, 0x40, 0x76, 0x21, 0x64, 0x83, 0x67, 0x16, 0x76, 0x60, 0x78, - 0x6c, 0x0d, 0x21, 0x9b, 0x48, 0x08, 0x35, 0x76, 0x8d, 0xa7, 0x45, 0xbb, 0xdb, 0x74, 0x97, 0x01, - 0x4b, 0x39, 0xe4, 0x1f, 0x88, 0xb2, 0x52, 0x0e, 0x51, 0x2e, 0x91, 0xa2, 0x44, 0x4a, 0x94, 0x5c, - 0xa2, 0xfc, 0x01, 0xc9, 0x25, 0x07, 0x72, 0x63, 0x6f, 0x2b, 0x0e, 0x4e, 0x18, 0x2e, 0xd1, 0x9c, - 0x56, 0xca, 0x6d, 0x4f, 0x51, 0x3d, 0xfa, 0x39, 0x9e, 0xb0, 0x9e, 0x65, 0xb5, 0xe2, 0x62, 0x57, - 0x7d, 0xf5, 0xd5, 0x57, 0xf5, 0x3d, 0xea, 0x7b, 0x35, 0xbc, 0x31, 0xb8, 0xdf, 0x6b, 0x59, 0x4e, - 0x6f, 0xe0, 0x3a, 0xcc, 0x09, 0x06, 0x4d, 0xf1, 0x8b, 0x0a, 0xfe, 0xbc, 0x56, 0xe9, 0x39, 0x3d, - 0x47, 0xe2, 0xf0, 0x91, 0x5c, 0xaf, 0x35, 0x7a, 0x8e, 0xd3, 0xb3, 0x68, 0x4b, 0xcc, 0xee, 0x0d, - 0x37, 0x5b, 0xcc, 0xec, 0x53, 0x8f, 0x19, 0xfd, 0x81, 0x42, 0x58, 0x54, 0xd4, 0x1f, 0x58, 0x7d, - 0xa7, 0x4b, 0xad, 0x96, 0xc7, 0x0c, 0xe6, 0xc9, 0x5f, 0x85, 0x31, 0xcf, 0x31, 0x06, 0x43, 0x6f, - 0x4b, 0xfc, 0x48, 0x20, 0xae, 0x00, 0xda, 0x60, 0x2e, 0x35, 0xfa, 0xc4, 0x60, 0xd4, 0x23, 0xf4, - 0xc1, 0x90, 0x7a, 0x0c, 0x5f, 0x83, 0xf9, 0x18, 0xd4, 0x1b, 0x38, 0xb6, 0x47, 0xd1, 0x59, 0x28, - 0x79, 0x21, 0xb8, 0xaa, 0x2d, 0x66, 0x96, 0x4a, 0xcb, 0x95, 0x66, 0xc0, 0x4a, 0xb8, 0x87, 0x44, - 0x11, 0xf1, 0xaf, 0x35, 0x80, 0x70, 0x0d, 0xd5, 0x01, 0xe4, 0xea, 0x87, 0x86, 0xb7, 0x55, 0xd5, - 0x16, 0xb5, 0x25, 0x9d, 0x44, 0x20, 0xe8, 0x24, 0x1c, 0x0c, 0x67, 0xd7, 0x9d, 0x8d, 0x2d, 0xc3, - 0xed, 0x56, 0xd3, 0x02, 0x6d, 0xf7, 0x02, 0x42, 0xa0, 0xbb, 0x06, 0xa3, 0xd5, 0xcc, 0xa2, 0xb6, - 0x94, 0x21, 0x62, 0x8c, 0x16, 0x20, 0xc7, 0xa8, 0x6d, 0xd8, 0xac, 0xaa, 0x2f, 0x6a, 0x4b, 0x45, - 0xa2, 0x66, 0x1c, 0xce, 0x79, 0xa7, 0x5e, 0x35, 0xbb, 0xa8, 0x2d, 0xcd, 0x10, 0x35, 0xc3, 0x7f, - 0xcc, 0x40, 0xf9, 0xa3, 0x21, 0x75, 0x47, 0x4a, 0x00, 0xa8, 0x0e, 0x05, 0x8f, 0x5a, 0xb4, 0xc3, - 0x1c, 0x57, 0x5c, 0xb0, 0xd8, 0x4e, 0x57, 0x35, 0x12, 0xc0, 0x50, 0x05, 0xb2, 0x96, 0xd9, 0x37, - 0x99, 0xb8, 0xd6, 0x0c, 0x91, 0x13, 0x74, 0x0e, 0xb2, 0x1e, 0x33, 0x5c, 0x26, 0xee, 0x52, 0x5a, - 0xae, 0x35, 0xa5, 0xd2, 0x9a, 0xbe, 0xd2, 0x9a, 0xb7, 0x7c, 0xa5, 0xb5, 0x0b, 0x4f, 0xc6, 0x8d, - 0xd4, 0x27, 0xff, 0x6c, 0x68, 0x44, 0x6e, 0x41, 0x67, 0x21, 0x43, 0xed, 0xae, 0xb8, 0xef, 0x97, - 0xdd, 0xc9, 0x37, 0xa0, 0x53, 0x50, 0xec, 0x9a, 0x2e, 0xed, 0x30, 0xd3, 0xb1, 0x05, 0x57, 0xb3, - 0xcb, 0xf3, 0xa1, 0x46, 0x56, 0xfd, 0x25, 0x12, 0x62, 0xa1, 0x93, 0x90, 0xf3, 0xb8, 0xe8, 0xbc, - 0x6a, 0x7e, 0x31, 0xb3, 0x54, 0x6c, 0x57, 0x76, 0xc6, 0x8d, 0x39, 0x09, 0x39, 0xe9, 0xf4, 0x4d, - 0x46, 0xfb, 0x03, 0x36, 0x22, 0x0a, 0x07, 0x9d, 0x80, 0x7c, 0x97, 0x5a, 0x94, 0x2b, 0xbc, 0x20, - 0x14, 0x3e, 0x17, 0x21, 0x2f, 0x16, 0x88, 0x8f, 0x80, 0xee, 0x80, 0x3e, 0xb0, 0x0c, 0xbb, 0x5a, - 0x14, 0x5c, 0xcc, 0x86, 0x88, 0x37, 0x2d, 0xc3, 0x6e, 0xbf, 0xf7, 0x6c, 0xdc, 0x38, 0xd3, 0x33, - 0xd9, 0xd6, 0xf0, 0x5e, 0xb3, 0xe3, 0xf4, 0x5b, 0x3d, 0xd7, 0xd8, 0x34, 0x6c, 0xa3, 0x65, 0x39, - 0xf7, 0xcd, 0xd6, 0xc3, 0xd3, 0x2d, 0x6e, 0x9f, 0x0f, 0x86, 0xd4, 0x35, 0xa9, 0xdb, 0xe2, 0x64, - 0x9a, 0x42, 0x25, 0x7c, 0x2b, 0x11, 0x64, 0xaf, 0xe8, 0x85, 0xdc, 0x5c, 0x1e, 0x3f, 0x4f, 0x03, - 0xda, 0x30, 0xfa, 0x03, 0x8b, 0x4e, 0xa5, 0xb2, 0x40, 0x39, 0xe9, 0x7d, 0x2b, 0x27, 0x33, 0xad, - 0x72, 0x42, 0x49, 0xeb, 0xd3, 0x49, 0x3a, 0xfb, 0x65, 0x25, 0x9d, 0xfb, 0x5a, 0x24, 0x8d, 0xab, - 0xa0, 0xf3, 0x19, 0x9a, 0x83, 0x8c, 0x6b, 0x3c, 0x12, 0xf2, 0x2c, 0x13, 0x3e, 0xc4, 0xeb, 0x90, - 0x93, 0x77, 0x41, 0xb5, 0xa4, 0xc0, 0xe3, 0xef, 0x23, 0x14, 0x76, 0xc6, 0x17, 0xe3, 0x5c, 0x28, - 0xc6, 0x8c, 0x10, 0x10, 0xfe, 0xad, 0x06, 0x33, 0x4a, 0x8b, 0xca, 0xc7, 0x50, 0xc8, 0xcb, 0x37, - 0xee, 0xfb, 0x97, 0xc3, 0x49, 0xff, 0x72, 0xb1, 0x6b, 0x0c, 0x18, 0x75, 0xdb, 0xcb, 0x4f, 0xc6, - 0x0d, 0xed, 0xd9, 0xb8, 0x71, 0xe2, 0x25, 0x8c, 0x0a, 0x6f, 0xa7, 0xdc, 0x8f, 0x4f, 0x1b, 0x7d, - 0x4b, 0x5c, 0x90, 0x79, 0xca, 0x1a, 0x0e, 0x34, 0xa5, 0xa7, 0x5c, 0xb3, 0x7b, 0xd4, 0xe3, 0xc4, - 0x75, 0xae, 0x48, 0x22, 0x71, 0xf0, 0x4f, 0x60, 0x3e, 0x66, 0x70, 0xea, 0xaa, 0xef, 0x42, 0xce, - 0xe3, 0x32, 0xf4, 0x6f, 0x1a, 0x51, 0xd7, 0x86, 0x80, 0xb7, 0x67, 0xd5, 0x15, 0x73, 0x72, 0x4e, - 0x14, 0xfe, 0x74, 0xa7, 0xff, 0x5d, 0x83, 0xf2, 0xba, 0x71, 0x8f, 0x5a, 0xbe, 0xa5, 0x23, 0xd0, - 0x6d, 0xa3, 0x4f, 0x95, 0xd0, 0xc5, 0x98, 0x7b, 0xb6, 0x87, 0x86, 0x35, 0xa4, 0x92, 0x64, 0x81, - 0xa8, 0xd9, 0xb4, 0x2e, 0x49, 0xdb, 0xb7, 0x4b, 0xd2, 0x42, 0xab, 0xaf, 0x40, 0x96, 0x1b, 0xd7, - 0x48, 0xb8, 0xa3, 0x22, 0x91, 0x13, 0x7c, 0x1c, 0x66, 0x14, 0x17, 0x4a, 0x7c, 0xe1, 0x95, 0xb9, - 0xf8, 0x8a, 0xfe, 0x95, 0x71, 0x1f, 0x72, 0x52, 0xda, 0xe8, 0xff, 0xa0, 0x18, 0x84, 0x39, 0xc1, - 0x6d, 0xa6, 0x9d, 0xdb, 0x19, 0x37, 0xd2, 0xcc, 0x23, 0xe1, 0x02, 0x6a, 0x40, 0x56, 0xec, 0x14, - 0x9c, 0x6b, 0xed, 0xe2, 0xce, 0xb8, 0x21, 0x01, 0x44, 0xfe, 0xa1, 0x23, 0xa0, 0x6f, 0xf1, 0x48, - 0xc3, 0x45, 0xa0, 0xb7, 0x0b, 0x3b, 0xe3, 0x86, 0x98, 0x13, 0xf1, 0x8b, 0x2f, 0x43, 0x79, 0x9d, - 0xf6, 0x8c, 0xce, 0x48, 0x1d, 0x5a, 0xf1, 0xc9, 0xf1, 0x03, 0x35, 0x9f, 0xc6, 0x51, 0x28, 0x07, - 0x27, 0xde, 0xed, 0x7b, 0xca, 0xae, 0x4b, 0x01, 0xec, 0x9a, 0x87, 0x7f, 0xa5, 0x81, 0xd2, 0x33, - 0xc2, 0x90, 0xb3, 0x38, 0xaf, 0x9e, 0xf2, 0x44, 0xb0, 0x33, 0x6e, 0x28, 0x08, 0x51, 0xff, 0xe8, - 0x3c, 0xe4, 0x3d, 0x71, 0x22, 0x27, 0x96, 0x34, 0x1f, 0xb1, 0xd0, 0x3e, 0xc0, 0xcd, 0x60, 0x67, - 0xdc, 0xf0, 0x11, 0x89, 0x3f, 0x40, 0xcd, 0x58, 0x08, 0x95, 0x8c, 0xcd, 0xee, 0x8c, 0x1b, 0x11, - 0x68, 0x34, 0xa4, 0xe2, 0x2f, 0x34, 0x28, 0xdd, 0x32, 0xcc, 0xc0, 0x84, 0xaa, 0xbe, 0x8a, 0x42, - 0x4f, 0x29, 0x01, 0xfc, 0x55, 0x77, 0xa9, 0x65, 0x8c, 0x2e, 0x39, 0xae, 0xa0, 0x3b, 0x43, 0x82, - 0x79, 0x18, 0xf5, 0xf4, 0x89, 0x51, 0x2f, 0x3b, 0xbd, 0x63, 0xfd, 0x7a, 0xdd, 0xd8, 0x15, 0xbd, - 0x90, 0x9e, 0xcb, 0xe0, 0x3f, 0x6b, 0x50, 0x96, 0xcc, 0x2b, 0xcb, 0xbb, 0x0b, 0x39, 0x29, 0x1b, - 0xc1, 0xfe, 0xff, 0x70, 0x31, 0xcd, 0x29, 0xdd, 0x8b, 0x22, 0x8b, 0xde, 0x87, 0xd9, 0xae, 0xeb, - 0x0c, 0x06, 0xb4, 0xbb, 0xa1, 0x7c, 0x59, 0x3a, 0xe9, 0xcb, 0x56, 0xa3, 0xeb, 0x24, 0x81, 0x8e, - 0xff, 0xa1, 0xc1, 0x8c, 0xf2, 0x19, 0x4a, 0x63, 0x81, 0x94, 0xb5, 0x7d, 0x87, 0xaf, 0xf4, 0xb4, - 0xe1, 0x6b, 0x01, 0x72, 0x3d, 0xd7, 0x19, 0x0e, 0xbc, 0x6a, 0x46, 0xbe, 0x50, 0x39, 0x9b, 0x2e, - 0xac, 0xe1, 0x2b, 0x30, 0xeb, 0xb3, 0xb2, 0x87, 0xe3, 0xac, 0x25, 0x1d, 0xe7, 0x5a, 0x97, 0xda, - 0xcc, 0xdc, 0x34, 0x03, 0x57, 0xa8, 0xf0, 0xf1, 0xcf, 0x35, 0x98, 0x4b, 0xa2, 0xa0, 0xd5, 0xc8, - 0x6b, 0xe3, 0xe4, 0x8e, 0xed, 0x4d, 0xae, 0x29, 0x5c, 0x90, 0xf7, 0x81, 0xcd, 0xdc, 0x91, 0x4f, - 0x5a, 0xee, 0xad, 0x9d, 0x81, 0x52, 0x64, 0x91, 0xc7, 0xaa, 0xfb, 0x54, 0xbd, 0x0f, 0xc2, 0x87, - 0xa1, 0x63, 0x48, 0x4b, 0xb7, 0x26, 0x26, 0xf8, 0x97, 0x1a, 0xcc, 0xc4, 0x74, 0x89, 0xde, 0x05, - 0x7d, 0xd3, 0x75, 0xfa, 0x53, 0x29, 0x4a, 0xec, 0x40, 0xdf, 0x86, 0x34, 0x73, 0xa6, 0x52, 0x53, - 0x9a, 0x39, 0x5c, 0x4b, 0x8a, 0xfd, 0x8c, 0x4c, 0x76, 0xe5, 0x0c, 0x9f, 0x81, 0xa2, 0x60, 0xe8, - 0xa6, 0x61, 0xba, 0x13, 0x63, 0xc6, 0x64, 0x86, 0xce, 0xc3, 0x01, 0xe9, 0x0f, 0x27, 0x6f, 0x2e, - 0x4f, 0xda, 0x5c, 0xf6, 0x37, 0xbf, 0x01, 0xd9, 0x95, 0xad, 0xa1, 0x7d, 0x9f, 0x6f, 0xe9, 0x1a, - 0xcc, 0xf0, 0xb7, 0xf0, 0x31, 0x3e, 0x04, 0xf3, 0xfc, 0x19, 0x52, 0xd7, 0x5b, 0x71, 0x86, 0x36, - 0xf3, 0x8b, 0x8d, 0x93, 0x50, 0x89, 0x83, 0x95, 0x95, 0x54, 0x20, 0xdb, 0xe1, 0x00, 0x41, 0x63, - 0x86, 0xc8, 0x09, 0xfe, 0x9d, 0x06, 0xe8, 0x32, 0x65, 0xe2, 0x94, 0xb5, 0xd5, 0xe0, 0x79, 0xd4, - 0xa0, 0xd0, 0x37, 0x58, 0x67, 0x8b, 0xba, 0x9e, 0x9f, 0x8c, 0xf8, 0xf3, 0x6f, 0x22, 0xf3, 0xc3, - 0xa7, 0x60, 0x3e, 0x76, 0x4b, 0xc5, 0x53, 0x0d, 0x0a, 0x1d, 0x05, 0x53, 0x51, 0x2f, 0x98, 0xe3, - 0xbf, 0xa4, 0xa1, 0x20, 0x36, 0x10, 0xba, 0x89, 0x4e, 0x41, 0x69, 0xd3, 0xb4, 0x7b, 0xd4, 0x1d, - 0xb8, 0xa6, 0x12, 0x81, 0xde, 0x3e, 0xb0, 0x33, 0x6e, 0x44, 0xc1, 0x24, 0x3a, 0x41, 0x6f, 0x43, - 0x7e, 0xe8, 0x51, 0xf7, 0xae, 0x29, 0x5f, 0x7a, 0xb1, 0x5d, 0xd9, 0x1e, 0x37, 0x72, 0x3f, 0xf0, - 0xa8, 0xbb, 0xb6, 0xca, 0xe3, 0xcf, 0x50, 0x8c, 0x88, 0xfc, 0xef, 0xa2, 0xab, 0xca, 0x4c, 0x45, - 0x36, 0xd6, 0xfe, 0x0e, 0xbf, 0xfe, 0xb3, 0x71, 0xe3, 0x78, 0xc4, 0xdb, 0x0d, 0x5c, 0xa7, 0x4f, - 0xd9, 0x16, 0x1d, 0x7a, 0xad, 0x8e, 0xd3, 0xef, 0x3b, 0x76, 0x4b, 0x94, 0x96, 0x82, 0x69, 0x1e, - 0x44, 0xf9, 0x76, 0x65, 0xb9, 0xb7, 0x20, 0xcf, 0xb6, 0x5c, 0x67, 0xd8, 0xdb, 0x12, 0xb1, 0x21, - 0xd3, 0x3e, 0x37, 0x3d, 0x3d, 0x9f, 0x02, 0xf1, 0x07, 0xe8, 0x28, 0x97, 0x16, 0xed, 0xdc, 0xf7, - 0x86, 0x7d, 0x59, 0xb0, 0xb5, 0xb3, 0x3b, 0xe3, 0x86, 0xf6, 0x36, 0x09, 0xc0, 0xf8, 0x67, 0x69, - 0x68, 0x08, 0x43, 0xbd, 0x2d, 0x92, 0x87, 0x4b, 0x8e, 0x7b, 0x8d, 0x32, 0xd7, 0xec, 0x5c, 0x37, - 0xfa, 0xd4, 0xb7, 0x8d, 0x06, 0x94, 0xfa, 0x02, 0x78, 0x37, 0xf2, 0x04, 0xa0, 0x1f, 0xe0, 0xa1, - 0x37, 0x01, 0xc4, 0x9b, 0x91, 0xeb, 0xf2, 0x35, 0x14, 0x05, 0x44, 0x2c, 0xaf, 0xc4, 0x24, 0xd5, - 0x9a, 0x92, 0x33, 0x25, 0xa1, 0xb5, 0xa4, 0x84, 0xa6, 0xa6, 0x13, 0x88, 0x25, 0x6a, 0xeb, 0xd9, - 0xb8, 0xad, 0xe3, 0x4f, 0x35, 0xa8, 0xaf, 0xfb, 0x37, 0xdf, 0xa7, 0x38, 0x7c, 0x7e, 0xd3, 0xaf, - 0x88, 0xdf, 0xcc, 0x57, 0xe3, 0x17, 0xd7, 0x01, 0xd6, 0x4d, 0x9b, 0x5e, 0x32, 0x2d, 0x46, 0xdd, - 0x09, 0x25, 0xc9, 0x2f, 0x32, 0xa1, 0x4b, 0x20, 0x74, 0xd3, 0xe7, 0x73, 0x25, 0xe2, 0x87, 0x5f, - 0x05, 0x1b, 0xe9, 0x57, 0xa8, 0xb6, 0x4c, 0xc2, 0x45, 0xd9, 0x90, 0xdf, 0x14, 0xec, 0xc9, 0x90, - 0x1a, 0xeb, 0xaa, 0x84, 0xbc, 0xb7, 0xbf, 0xa7, 0x0e, 0x3f, 0xfb, 0x92, 0x9c, 0x44, 0xf4, 0x81, - 0x5a, 0xde, 0xc8, 0x66, 0xc6, 0xe3, 0xc8, 0x7e, 0xe2, 0x1f, 0x82, 0x0c, 0x95, 0x77, 0x65, 0x27, - 0xe6, 0x5d, 0x17, 0xd4, 0x31, 0x5f, 0xa9, 0x84, 0xbc, 0x10, 0x7a, 0x40, 0xa1, 0x14, 0xe5, 0x01, - 0x8f, 0x81, 0xee, 0xd2, 0x4d, 0x3f, 0x54, 0xa3, 0xf0, 0xe4, 0x00, 0x53, 0xac, 0xe3, 0xbf, 0x6a, - 0x30, 0x77, 0x99, 0xb2, 0x78, 0x12, 0xf4, 0x1a, 0xa9, 0x14, 0x7f, 0x08, 0x07, 0x23, 0xf7, 0x57, - 0xdc, 0x9f, 0x4e, 0x64, 0x3e, 0x87, 0x42, 0xfe, 0xd7, 0xec, 0x2e, 0x7d, 0xac, 0xea, 0xc6, 0x78, - 0xd2, 0x73, 0x13, 0x4a, 0x91, 0x45, 0x74, 0x31, 0x91, 0xee, 0x44, 0xda, 0x3d, 0x41, 0xc8, 0x6e, - 0x57, 0x14, 0x4f, 0xb2, 0x72, 0x54, 0xf9, 0x6c, 0x90, 0x1a, 0x6c, 0x00, 0x12, 0xea, 0x12, 0x64, - 0xa3, 0xc1, 0x49, 0x40, 0xaf, 0x06, 0x79, 0x4f, 0x30, 0x47, 0x47, 0x41, 0x77, 0x9d, 0x47, 0x7e, - 0x1e, 0x3b, 0x13, 0x1e, 0x49, 0x9c, 0x47, 0x44, 0x2c, 0xe1, 0xf3, 0x90, 0x21, 0xce, 0x23, 0x54, - 0x07, 0x70, 0x0d, 0xbb, 0x47, 0x6f, 0x07, 0x45, 0x54, 0x99, 0x44, 0x20, 0x7b, 0x24, 0x0e, 0x2b, - 0x70, 0x30, 0x7a, 0x23, 0xa9, 0xee, 0x26, 0xe4, 0x3f, 0x1a, 0x46, 0xc5, 0x55, 0x49, 0x88, 0x4b, - 0xd6, 0xe3, 0x3e, 0x12, 0xb7, 0x19, 0x08, 0xe1, 0xe8, 0x08, 0x14, 0x99, 0x71, 0xcf, 0xa2, 0xd7, - 0x43, 0x37, 0x17, 0x02, 0xf8, 0x2a, 0xaf, 0xff, 0x6e, 0x47, 0x32, 0xa0, 0x10, 0x80, 0x4e, 0xc0, - 0x5c, 0x78, 0xe7, 0x9b, 0x2e, 0xdd, 0x34, 0x1f, 0x0b, 0x0d, 0x97, 0xc9, 0x2e, 0x38, 0x5a, 0x82, - 0x03, 0x21, 0x6c, 0x43, 0x64, 0x1a, 0xba, 0x40, 0x4d, 0x82, 0xb9, 0x6c, 0x04, 0xbb, 0x1f, 0x3c, - 0x18, 0x1a, 0x96, 0x78, 0x7c, 0x65, 0x12, 0x81, 0xe0, 0xbf, 0x69, 0x70, 0x50, 0xaa, 0x9a, 0x57, - 0xfe, 0xaf, 0xa3, 0xd5, 0xff, 0x5e, 0x03, 0x14, 0xe5, 0x40, 0x99, 0xd6, 0x5b, 0xd1, 0xae, 0x0e, - 0x4f, 0x65, 0x4a, 0xa2, 0xac, 0x95, 0xa0, 0xb0, 0x2b, 0x83, 0x21, 0x27, 0xd2, 0x21, 0x59, 0x5f, - 0xeb, 0xb2, 0x6e, 0x96, 0x10, 0xa2, 0xfe, 0x79, 0xb9, 0x7f, 0x6f, 0xc4, 0xa8, 0xa7, 0xaa, 0x5e, - 0x51, 0xee, 0x0b, 0x00, 0x91, 0x7f, 0xfc, 0x2c, 0x6a, 0x33, 0x61, 0x35, 0x7a, 0x78, 0x96, 0x02, - 0x11, 0x7f, 0x80, 0xff, 0x94, 0x86, 0x99, 0xdb, 0x8e, 0x35, 0x0c, 0x03, 0xe3, 0xeb, 0x14, 0x30, - 0x62, 0xa5, 0x78, 0xd6, 0x2f, 0xc5, 0x11, 0xe8, 0x1e, 0xa3, 0x03, 0x61, 0x59, 0x19, 0x22, 0xc6, - 0x08, 0x43, 0x99, 0x19, 0x6e, 0x8f, 0x32, 0x59, 0xdd, 0x54, 0x73, 0x22, 0xed, 0x8c, 0xc1, 0xd0, - 0x22, 0x94, 0x8c, 0x5e, 0xcf, 0xa5, 0x3d, 0x83, 0xd1, 0xf6, 0xa8, 0x9a, 0x17, 0x87, 0x45, 0x41, - 0xf8, 0x63, 0x98, 0xf5, 0x85, 0xa5, 0x54, 0xfa, 0x0e, 0xe4, 0x1f, 0x0a, 0xc8, 0x84, 0xf6, 0x97, - 0x44, 0x55, 0x6e, 0xcc, 0x47, 0x8b, 0x37, 0xcd, 0xfd, 0x3b, 0xe3, 0x2b, 0x90, 0x93, 0xe8, 0xe8, - 0x48, 0xb4, 0x46, 0x91, 0x7d, 0x1a, 0x3e, 0x57, 0x05, 0x07, 0x86, 0x9c, 0x24, 0xa4, 0x14, 0x2f, - 0x6c, 0x43, 0x42, 0x88, 0xfa, 0xc7, 0xff, 0xd1, 0xe0, 0xd0, 0x2a, 0x65, 0xb4, 0xc3, 0x68, 0xf7, - 0x92, 0x49, 0xad, 0xee, 0x37, 0x5a, 0x3e, 0x07, 0x7d, 0xb0, 0x4c, 0xa4, 0x0f, 0xc6, 0xfd, 0x8e, - 0x65, 0xda, 0x74, 0x3d, 0xd2, 0x48, 0x09, 0x01, 0xdc, 0x43, 0x6c, 0xf2, 0x8b, 0xcb, 0x65, 0xf9, - 0x95, 0x22, 0x02, 0x09, 0x34, 0x9c, 0x0b, 0x35, 0x8c, 0xd7, 0x60, 0x21, 0xc9, 0xb4, 0xd2, 0x51, - 0x0b, 0x72, 0x62, 0xef, 0x84, 0x5e, 0x6a, 0x6c, 0x07, 0x51, 0x68, 0xd8, 0x85, 0x99, 0xd8, 0x82, - 0xd0, 0x19, 0xb7, 0x11, 0xe5, 0x3f, 0xe5, 0x04, 0xfd, 0x3f, 0xe8, 0x6c, 0x34, 0x50, 0x6e, 0xb3, - 0x7d, 0xe8, 0x8b, 0x71, 0xe3, 0x60, 0x6c, 0xdb, 0xad, 0xd1, 0x80, 0x12, 0x81, 0xc2, 0x4d, 0xab, - 0x63, 0xb8, 0x5d, 0xd3, 0x36, 0x2c, 0x93, 0x49, 0x51, 0xe8, 0x24, 0x0a, 0xc2, 0xbf, 0x89, 0x28, - 0x4d, 0xda, 0xe3, 0x3e, 0x95, 0xa6, 0xed, 0x5b, 0x69, 0xda, 0x4b, 0x94, 0x86, 0x7f, 0x14, 0x8a, - 0xd8, 0xbf, 0xa2, 0x12, 0xf1, 0xfb, 0x30, 0xdb, 0x8d, 0xad, 0xec, 0x2d, 0x6a, 0xd9, 0xfe, 0x4c, - 0xa0, 0xe3, 0xb7, 0x42, 0x91, 0x0b, 0xc8, 0x64, 0x91, 0x9f, 0x38, 0x06, 0xc5, 0xe0, 0x63, 0x0e, - 0x2a, 0x41, 0xfe, 0xd2, 0x0d, 0xf2, 0xc3, 0x8b, 0x64, 0x75, 0x2e, 0x85, 0xca, 0x50, 0x68, 0x5f, - 0x5c, 0xb9, 0x2a, 0x66, 0xda, 0xf2, 0xa7, 0x59, 0x3f, 0x68, 0xba, 0xe8, 0xbb, 0x90, 0x95, 0x91, - 0x70, 0x21, 0xbc, 0x4c, 0xf4, 0x9b, 0x49, 0xed, 0xf0, 0x2e, 0xb8, 0xe4, 0x0a, 0xa7, 0xde, 0xd1, - 0xd0, 0x75, 0x28, 0x09, 0xa0, 0xea, 0x8b, 0x1e, 0x49, 0xb6, 0x27, 0x63, 0x94, 0xde, 0xdc, 0x63, - 0x35, 0x42, 0xef, 0x1c, 0x64, 0x25, 0x83, 0x0b, 0x89, 0x84, 0x65, 0xc2, 0x6d, 0x62, 0x9d, 0x62, - 0x9c, 0x42, 0xef, 0x81, 0x7e, 0xcb, 0x30, 0x2d, 0x14, 0xc9, 0x97, 0x22, 0xed, 0xcc, 0xda, 0x42, - 0x12, 0x1c, 0x39, 0xf6, 0x42, 0xd0, 0x95, 0x3d, 0x9c, 0xec, 0x0b, 0xf9, 0xdb, 0xab, 0xbb, 0x17, - 0x82, 0x93, 0x6f, 0xc8, 0xde, 0xa1, 0xdf, 0x9d, 0x40, 0x6f, 0xc6, 0x8f, 0x4a, 0x34, 0x33, 0x6a, - 0xf5, 0xbd, 0x96, 0x03, 0x82, 0xeb, 0x50, 0x8a, 0x74, 0x06, 0xa2, 0x62, 0xdd, 0xdd, 0xd6, 0x88, - 0x8a, 0x75, 0x42, 0x3b, 0x01, 0xa7, 0xd0, 0x65, 0x28, 0xf0, 0x2c, 0x93, 0x07, 0x5b, 0xf4, 0x46, - 0x32, 0x99, 0x8c, 0x24, 0x11, 0xb5, 0x23, 0x93, 0x17, 0x03, 0x42, 0xdf, 0x87, 0xe2, 0x65, 0xca, - 0x94, 0x27, 0x3e, 0x9c, 0x74, 0xe5, 0x13, 0x24, 0x15, 0x0f, 0x07, 0x38, 0x85, 0x3e, 0x16, 0x09, - 0x6f, 0xdc, 0x13, 0xa1, 0xc6, 0x1e, 0x1e, 0x27, 0xb8, 0xd7, 0xe2, 0xde, 0x08, 0x3e, 0xe5, 0xe5, - 0x3b, 0xfe, 0xe7, 0xe3, 0x55, 0x83, 0x19, 0xe8, 0x06, 0xcc, 0x0a, 0x96, 0x83, 0xef, 0xcb, 0x31, - 0xd3, 0xdc, 0xf5, 0x31, 0x3b, 0x66, 0x9a, 0xbb, 0x3f, 0x6a, 0xe3, 0x54, 0xfb, 0xce, 0xd3, 0xe7, - 0xf5, 0xd4, 0x67, 0xcf, 0xeb, 0xa9, 0xcf, 0x9f, 0xd7, 0xb5, 0x9f, 0x6e, 0xd7, 0xb5, 0x3f, 0x6c, - 0xd7, 0xb5, 0x27, 0xdb, 0x75, 0xed, 0xe9, 0x76, 0x5d, 0xfb, 0xd7, 0x76, 0x5d, 0xfb, 0xf7, 0x76, - 0x3d, 0xf5, 0xf9, 0x76, 0x5d, 0xfb, 0xe4, 0x45, 0x3d, 0xf5, 0xf4, 0x45, 0x3d, 0xf5, 0xd9, 0x8b, - 0x7a, 0xea, 0xc7, 0xc7, 0x5f, 0x5e, 0x83, 0x49, 0x6f, 0x93, 0x13, 0x7f, 0xa7, 0xff, 0x1b, 0x00, - 0x00, 0xff, 0xff, 0x55, 0x29, 0x90, 0x4c, 0x00, 0x20, 0x00, 0x00, + // 2472 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x39, 0xcb, 0x6f, 0x1b, 0xc7, + 0xf9, 0x5c, 0x72, 0xf9, 0xfa, 0x48, 0xc9, 0xf2, 0x88, 0x96, 0x09, 0xc6, 0x21, 0xe5, 0xc1, 0x2f, + 0xb6, 0x7e, 0x8e, 0x23, 0xc6, 0x76, 0xed, 0x26, 0x76, 0xdd, 0xd4, 0x94, 0x62, 0x47, 0xb6, 0xfc, + 0xc8, 0xc8, 0x75, 0xd3, 0xa2, 0x86, 0xb1, 0x22, 0x47, 0xd4, 0xc2, 0xe4, 0x2e, 0xbd, 0x3b, 0xb4, + 0x2d, 0xa0, 0x87, 0xfe, 0x03, 0x45, 0x03, 0xf4, 0x50, 0xf4, 0x52, 0xb4, 0x40, 0x81, 0x16, 0x2d, + 0x7a, 0xe9, 0x1f, 0xd0, 0x5e, 0x7a, 0x70, 0x6f, 0xce, 0x2d, 0xc8, 0x81, 0xad, 0xe5, 0x4b, 0xa1, + 0x53, 0x80, 0xde, 0x72, 0x2a, 0xe6, 0xb5, 0x2f, 0x51, 0x75, 0xa8, 0x38, 0x08, 0x7c, 0x21, 0x67, + 0xbe, 0xf9, 0xe6, 0x9b, 0xf9, 0x1e, 0xf3, 0xbd, 0x16, 0x5e, 0x1b, 0xdc, 0xef, 0x36, 0x7b, 0x6e, + 0x77, 0xe0, 0xb9, 0xcc, 0x0d, 0x06, 0x8b, 0xe2, 0x17, 0x15, 0xf4, 0xbc, 0x56, 0xe9, 0xba, 0x5d, + 0x57, 0xe2, 0xf0, 0x91, 0x5c, 0xaf, 0x35, 0xba, 0xae, 0xdb, 0xed, 0xd1, 0xa6, 0x98, 0xad, 0x0f, + 0x37, 0x9a, 0xcc, 0xee, 0x53, 0x9f, 0x59, 0xfd, 0x81, 0x42, 0x98, 0x57, 0xd4, 0x1f, 0xf4, 0xfa, + 0x6e, 0x87, 0xf6, 0x9a, 0x3e, 0xb3, 0x98, 0x2f, 0x7f, 0x15, 0xc6, 0x2c, 0xc7, 0x18, 0x0c, 0xfd, + 0x4d, 0xf1, 0x23, 0x81, 0xb8, 0x02, 0x68, 0x8d, 0x79, 0xd4, 0xea, 0x13, 0x8b, 0x51, 0x9f, 0xd0, + 0x07, 0x43, 0xea, 0x33, 0x7c, 0x1d, 0x66, 0x63, 0x50, 0x7f, 0xe0, 0x3a, 0x3e, 0x45, 0xe7, 0xa0, + 0xe4, 0x87, 0xe0, 0xaa, 0x31, 0x9f, 0x59, 0x28, 0x9d, 0xae, 0x2c, 0x06, 0xac, 0x84, 0x7b, 0x48, + 0x14, 0x11, 0xff, 0xda, 0x00, 0x08, 0xd7, 0x50, 0x1d, 0x40, 0xae, 0x7e, 0x60, 0xf9, 0x9b, 0x55, + 0x63, 0xde, 0x58, 0x30, 0x49, 0x04, 0x82, 0x4e, 0xc2, 0xc1, 0x70, 0x76, 0xc3, 0x5d, 0xdb, 0xb4, + 0xbc, 0x4e, 0x35, 0x2d, 0xd0, 0x76, 0x2f, 0x20, 0x04, 0xa6, 0x67, 0x31, 0x5a, 0xcd, 0xcc, 0x1b, + 0x0b, 0x19, 0x22, 0xc6, 0x68, 0x0e, 0x72, 0x8c, 0x3a, 0x96, 0xc3, 0xaa, 0xe6, 0xbc, 0xb1, 0x50, + 0x24, 0x6a, 0xc6, 0xe1, 0x9c, 0x77, 0xea, 0x57, 0xb3, 0xf3, 0xc6, 0xc2, 0x14, 0x51, 0x33, 0xfc, + 0xc7, 0x0c, 0x94, 0x3f, 0x1c, 0x52, 0x6f, 0x4b, 0x09, 0x00, 0xd5, 0xa1, 0xe0, 0xd3, 0x1e, 0x6d, + 0x33, 0xd7, 0x13, 0x17, 0x2c, 0xb6, 0xd2, 0x55, 0x83, 0x04, 0x30, 0x54, 0x81, 0x6c, 0xcf, 0xee, + 0xdb, 0x4c, 0x5c, 0x6b, 0x8a, 0xc8, 0x09, 0x3a, 0x0f, 0x59, 0x9f, 0x59, 0x1e, 0x13, 0x77, 0x29, + 0x9d, 0xae, 0x2d, 0x4a, 0xa5, 0x2d, 0x6a, 0xa5, 0x2d, 0xde, 0xd6, 0x4a, 0x6b, 0x15, 0x9e, 0x8c, + 0x1a, 0xa9, 0x8f, 0xff, 0xd9, 0x30, 0x88, 0xdc, 0x82, 0xce, 0x41, 0x86, 0x3a, 0x1d, 0x71, 0xdf, + 0x2f, 0xbb, 0x93, 0x6f, 0x40, 0xa7, 0xa0, 0xd8, 0xb1, 0x3d, 0xda, 0x66, 0xb6, 0xeb, 0x08, 0xae, + 0xa6, 0x4f, 0xcf, 0x86, 0x1a, 0x59, 0xd6, 0x4b, 0x24, 0xc4, 0x42, 0x27, 0x21, 0xe7, 0x73, 0xd1, + 0xf9, 0xd5, 0xfc, 0x7c, 0x66, 0xa1, 0xd8, 0xaa, 0xec, 0x8c, 0x1a, 0x33, 0x12, 0x72, 0xd2, 0xed, + 0xdb, 0x8c, 0xf6, 0x07, 0x6c, 0x8b, 0x28, 0x1c, 0x74, 0x02, 0xf2, 0x1d, 0xda, 0xa3, 0x5c, 0xe1, + 0x05, 0xa1, 0xf0, 0x99, 0x08, 0x79, 0xb1, 0x40, 0x34, 0x02, 0xba, 0x0b, 0xe6, 0xa0, 0x67, 0x39, + 0xd5, 0xa2, 0xe0, 0x62, 0x3a, 0x44, 0xbc, 0xd5, 0xb3, 0x9c, 0xd6, 0xbb, 0x9f, 0x8d, 0x1a, 0x67, + 0xbb, 0x36, 0xdb, 0x1c, 0xae, 0x2f, 0xb6, 0xdd, 0x7e, 0xb3, 0xeb, 0x59, 0x1b, 0x96, 0x63, 0x35, + 0x7b, 0xee, 0x7d, 0xbb, 0xf9, 0xf0, 0x4c, 0x93, 0xdb, 0xe7, 0x83, 0x21, 0xf5, 0x6c, 0xea, 0x35, + 0x39, 0x99, 0x45, 0xa1, 0x12, 0xbe, 0x95, 0x08, 0xb2, 0x57, 0xcd, 0x42, 0x6e, 0x26, 0x8f, 0x9f, + 0xa5, 0x01, 0xad, 0x59, 0xfd, 0x41, 0x8f, 0x4e, 0xa4, 0xb2, 0x40, 0x39, 0xe9, 0x7d, 0x2b, 0x27, + 0x33, 0xa9, 0x72, 0x42, 0x49, 0x9b, 0x93, 0x49, 0x3a, 0xfb, 0x65, 0x25, 0x9d, 0xfb, 0x5a, 0x24, + 0x8d, 0xab, 0x60, 0xf2, 0x19, 0x9a, 0x81, 0x8c, 0x67, 0x3d, 0x12, 0xf2, 0x2c, 0x13, 0x3e, 0xc4, + 0xab, 0x90, 0x93, 0x77, 0x41, 0xb5, 0xa4, 0xc0, 0xe3, 0xef, 0x23, 0x14, 0x76, 0x46, 0x8b, 0x71, + 0x26, 0x14, 0x63, 0x46, 0x08, 0x08, 0xff, 0xc6, 0x80, 0x29, 0xa5, 0x45, 0xe5, 0x63, 0xd6, 0x21, + 0x2f, 0xdf, 0xb8, 0xf6, 0x2f, 0x87, 0x93, 0xfe, 0xe5, 0x52, 0xc7, 0x1a, 0x30, 0xea, 0xb5, 0x9a, + 0x4f, 0x46, 0x0d, 0xe3, 0xb3, 0x51, 0xe3, 0xf8, 0x5e, 0x8c, 0x6a, 0x7f, 0xa7, 0xfd, 0x92, 0x26, + 0x8c, 0xde, 0x14, 0xb7, 0x63, 0xbe, 0x32, 0x85, 0x03, 0x8b, 0xd2, 0x4d, 0xae, 0x38, 0x5d, 0xea, + 0x73, 0xca, 0x26, 0xd7, 0x22, 0x91, 0x38, 0xf8, 0x27, 0x30, 0x1b, 0xb3, 0x36, 0x75, 0xcf, 0x77, + 0x20, 0xe7, 0x73, 0x01, 0xea, 0x6b, 0x46, 0x74, 0xb5, 0x26, 0xe0, 0xad, 0x69, 0x75, 0xbf, 0x9c, + 0x9c, 0x13, 0x85, 0x3f, 0xd9, 0xe9, 0x7f, 0x37, 0xa0, 0xbc, 0x6a, 0xad, 0xd3, 0x9e, 0x36, 0x73, + 0x04, 0xa6, 0x63, 0xf5, 0xa9, 0x92, 0xb8, 0x18, 0x73, 0xb7, 0xf6, 0xd0, 0xea, 0x0d, 0xa9, 0x24, + 0x59, 0x20, 0x6a, 0x36, 0xa9, 0x3f, 0x32, 0xf6, 0xed, 0x8f, 0x8c, 0xd0, 0xe4, 0x2b, 0x90, 0xe5, + 0x96, 0xb5, 0x25, 0x7c, 0x51, 0x91, 0xc8, 0x09, 0x3e, 0x0e, 0x53, 0x8a, 0x0b, 0x25, 0xbe, 0xf0, + 0xca, 0x5c, 0x7c, 0x45, 0x7d, 0x65, 0xdc, 0x87, 0x9c, 0x94, 0x36, 0xfa, 0x3f, 0x28, 0x06, 0x31, + 0x4e, 0x70, 0x9b, 0x69, 0xe5, 0x76, 0x46, 0x8d, 0x34, 0xf3, 0x49, 0xb8, 0x80, 0x1a, 0x90, 0x15, + 0x3b, 0x05, 0xe7, 0x46, 0xab, 0xb8, 0x33, 0x6a, 0x48, 0x00, 0x91, 0x7f, 0xe8, 0x08, 0x98, 0x9b, + 0x3c, 0xcc, 0x70, 0x11, 0x98, 0xad, 0xc2, 0xce, 0xa8, 0x21, 0xe6, 0x44, 0xfc, 0xe2, 0x2b, 0x50, + 0x5e, 0xa5, 0x5d, 0xab, 0xbd, 0xa5, 0x0e, 0xad, 0x68, 0x72, 0xfc, 0x40, 0x43, 0xd3, 0x38, 0x0a, + 0xe5, 0xe0, 0xc4, 0x7b, 0x7d, 0x5f, 0x19, 0x75, 0x29, 0x80, 0x5d, 0xf7, 0xf1, 0xaf, 0x0c, 0x50, + 0x7a, 0x46, 0x18, 0x72, 0x3d, 0xce, 0xab, 0xaf, 0xdc, 0x10, 0xec, 0x8c, 0x1a, 0x0a, 0x42, 0xd4, + 0x3f, 0xba, 0x00, 0x79, 0x5f, 0x9c, 0xc8, 0x89, 0x25, 0xcd, 0x47, 0x2c, 0xb4, 0x0e, 0x70, 0x33, + 0xd8, 0x19, 0x35, 0x34, 0x22, 0xd1, 0x03, 0xb4, 0x18, 0x8b, 0x9f, 0x92, 0xb1, 0xe9, 0x9d, 0x51, + 0x23, 0x02, 0x8d, 0xc6, 0x53, 0xfc, 0x85, 0x01, 0xa5, 0xdb, 0x96, 0x1d, 0x98, 0x50, 0x55, 0xab, + 0x28, 0x74, 0x93, 0x12, 0xc0, 0x9f, 0x74, 0x87, 0xf6, 0xac, 0xad, 0xcb, 0xae, 0x27, 0xe8, 0x4e, + 0x91, 0x60, 0x1e, 0x86, 0x3c, 0x73, 0x6c, 0xc8, 0xcb, 0x4e, 0xee, 0x55, 0xbf, 0x5e, 0x1f, 0x76, + 0xd5, 0x2c, 0xa4, 0x67, 0x32, 0xf8, 0xcf, 0x06, 0x94, 0x25, 0xf3, 0xca, 0xf2, 0x7e, 0x0c, 0x39, + 0x29, 0x1b, 0xc1, 0xfe, 0xff, 0xf0, 0x2f, 0x6f, 0x4e, 0xe2, 0x5b, 0x14, 0x4d, 0xf4, 0x1e, 0x4c, + 0x77, 0x3c, 0x77, 0x30, 0xa0, 0x9d, 0x35, 0xe5, 0xc5, 0xd2, 0x49, 0x2f, 0xb6, 0x1c, 0x5d, 0x27, + 0x09, 0x74, 0xfc, 0x0f, 0x03, 0xa6, 0x94, 0xc3, 0x50, 0xea, 0x0a, 0x44, 0x6c, 0xec, 0x3b, 0x70, + 0xa5, 0x27, 0x0d, 0x5c, 0x73, 0x90, 0xeb, 0x7a, 0xee, 0x70, 0xe0, 0x57, 0x33, 0xf2, 0x79, 0xca, + 0xd9, 0x64, 0x01, 0x0d, 0x5f, 0x85, 0x69, 0xcd, 0xca, 0x1e, 0x5e, 0xb3, 0x96, 0xf4, 0x9a, 0x2b, + 0x1d, 0xea, 0x30, 0x7b, 0xc3, 0x0e, 0xfc, 0xa0, 0xc2, 0xc7, 0x3f, 0x37, 0x60, 0x26, 0x89, 0x82, + 0x96, 0x23, 0x4f, 0x8d, 0x93, 0x3b, 0xb6, 0x37, 0xb9, 0x45, 0xe1, 0x7f, 0xfc, 0xf7, 0x1d, 0xe6, + 0x6d, 0x69, 0xd2, 0x72, 0x6f, 0xed, 0x2c, 0x94, 0x22, 0x8b, 0x3c, 0x4a, 0xdd, 0xa7, 0xea, 0x71, + 0x10, 0x3e, 0x0c, 0xbd, 0x42, 0x5a, 0xfa, 0x34, 0x31, 0xc1, 0xbf, 0x34, 0x60, 0x2a, 0xa6, 0x4b, + 0xf4, 0x0e, 0x98, 0x1b, 0x9e, 0xdb, 0x9f, 0x48, 0x51, 0x62, 0x07, 0xfa, 0x16, 0xa4, 0x99, 0x3b, + 0x91, 0x9a, 0xd2, 0xcc, 0xe5, 0x5a, 0x52, 0xec, 0x67, 0x64, 0x9a, 0x2b, 0x67, 0xf8, 0x2c, 0x14, + 0x05, 0x43, 0xb7, 0x2c, 0xdb, 0x1b, 0x1b, 0x30, 0xc6, 0x33, 0x74, 0x01, 0x0e, 0x48, 0x67, 0x38, + 0x7e, 0x73, 0x79, 0xdc, 0xe6, 0xb2, 0xde, 0xfc, 0x1a, 0x64, 0x97, 0x36, 0x87, 0xce, 0x7d, 0xbe, + 0xa5, 0x63, 0x31, 0x4b, 0x6f, 0xe1, 0x63, 0x7c, 0x08, 0x66, 0xf9, 0x1b, 0xa4, 0x9e, 0xbf, 0xe4, + 0x0e, 0x1d, 0xa6, 0xcb, 0x8c, 0x93, 0x50, 0x89, 0x83, 0x95, 0x95, 0x54, 0x20, 0xdb, 0xe6, 0x00, + 0x41, 0x63, 0x8a, 0xc8, 0x09, 0xfe, 0x9d, 0x01, 0xe8, 0x0a, 0x65, 0xe2, 0x94, 0x95, 0xe5, 0xe0, + 0x79, 0xd4, 0xa0, 0xd0, 0xb7, 0x58, 0x7b, 0x93, 0x7a, 0xbe, 0x4e, 0x43, 0xf4, 0xfc, 0x9b, 0xc8, + 0xf9, 0xf0, 0x29, 0x98, 0x8d, 0xdd, 0x52, 0xf1, 0x54, 0x83, 0x42, 0x5b, 0xc1, 0x54, 0xc8, 0x0b, + 0xe6, 0xf8, 0x2f, 0x69, 0x28, 0x88, 0x0d, 0x84, 0x6e, 0xa0, 0x53, 0x50, 0xda, 0xb0, 0x9d, 0x2e, + 0xf5, 0x06, 0x9e, 0xad, 0x44, 0x60, 0xb6, 0x0e, 0xec, 0x8c, 0x1a, 0x51, 0x30, 0x89, 0x4e, 0xd0, + 0x5b, 0x90, 0x1f, 0xfa, 0xd4, 0xbb, 0x67, 0xcb, 0x97, 0x5e, 0x6c, 0x55, 0xb6, 0x47, 0x8d, 0xdc, + 0xf7, 0x7d, 0xea, 0xad, 0x2c, 0xf3, 0xe0, 0x33, 0x14, 0x23, 0x22, 0xff, 0x3b, 0xe8, 0x9a, 0x32, + 0x53, 0x91, 0x87, 0xb5, 0xbe, 0xcd, 0xaf, 0x9f, 0x70, 0x75, 0x03, 0xcf, 0xed, 0x53, 0xb6, 0x49, + 0x87, 0x7e, 0xb3, 0xed, 0xf6, 0xfb, 0xae, 0xd3, 0x14, 0x45, 0xa5, 0x60, 0x9a, 0x47, 0x50, 0xbe, + 0x5d, 0x59, 0xee, 0x6d, 0xc8, 0xb3, 0x4d, 0xcf, 0x1d, 0x76, 0x37, 0x45, 0x60, 0xc8, 0xb4, 0xce, + 0x4f, 0x4e, 0x4f, 0x53, 0x20, 0x7a, 0x80, 0x8e, 0x72, 0x69, 0xd1, 0xf6, 0x7d, 0x7f, 0xd8, 0x97, + 0xa5, 0x5a, 0x2b, 0xbb, 0x33, 0x6a, 0x18, 0x6f, 0x91, 0x00, 0x8c, 0x7f, 0x96, 0x86, 0x86, 0x30, + 0xd4, 0x3b, 0x22, 0x73, 0xb8, 0xec, 0x7a, 0xd7, 0x29, 0xf3, 0xec, 0xf6, 0x0d, 0xab, 0x4f, 0xb5, + 0x6d, 0x34, 0xa0, 0xd4, 0x17, 0xc0, 0x7b, 0x91, 0x27, 0x00, 0xfd, 0x00, 0x0f, 0xbd, 0x0e, 0x20, + 0xde, 0x8c, 0x5c, 0x97, 0xaf, 0xa1, 0x28, 0x20, 0x62, 0x79, 0x29, 0x26, 0xa9, 0xe6, 0x84, 0x9c, + 0x29, 0x09, 0xad, 0x24, 0x25, 0x34, 0x31, 0x9d, 0x40, 0x2c, 0x51, 0x5b, 0xcf, 0xc6, 0x6d, 0x1d, + 0x7f, 0x62, 0x40, 0x7d, 0x55, 0xdf, 0x7c, 0x9f, 0xe2, 0xd0, 0xfc, 0xa6, 0x5f, 0x12, 0xbf, 0x99, + 0xaf, 0xc6, 0x2f, 0xae, 0x03, 0xac, 0xda, 0x0e, 0xbd, 0x6c, 0xf7, 0x18, 0xf5, 0xc6, 0x14, 0x23, + 0xbf, 0xc8, 0x84, 0x2e, 0x81, 0xd0, 0x0d, 0xcd, 0xe7, 0x52, 0xc4, 0x0f, 0xbf, 0x0c, 0x36, 0xd2, + 0x2f, 0x51, 0x6d, 0x99, 0x84, 0x8b, 0x72, 0x20, 0xbf, 0x21, 0xd8, 0x93, 0x21, 0x35, 0xd6, 0x4f, + 0x09, 0x79, 0x6f, 0x7d, 0x57, 0x1d, 0x7e, 0xee, 0x05, 0x19, 0x91, 0xe8, 0x00, 0x35, 0xfd, 0x2d, + 0x87, 0x59, 0x8f, 0x23, 0xfb, 0x89, 0x3e, 0x04, 0x59, 0x2a, 0xe9, 0xca, 0x8e, 0x4d, 0xba, 0x2e, + 0xaa, 0x63, 0xbe, 0x52, 0xf1, 0x78, 0x31, 0xf4, 0x80, 0x42, 0x29, 0xca, 0x03, 0x1e, 0x03, 0xd3, + 0xa3, 0x1b, 0x3a, 0x54, 0xa3, 0xf0, 0xe4, 0x00, 0x53, 0xac, 0xe3, 0xbf, 0x1a, 0x30, 0x73, 0x85, + 0xb2, 0x78, 0x12, 0xf4, 0x0a, 0xa9, 0x14, 0x7f, 0x00, 0x07, 0x23, 0xf7, 0x57, 0xdc, 0x9f, 0x49, + 0x64, 0x3e, 0x87, 0x42, 0xfe, 0x57, 0x9c, 0x0e, 0x7d, 0xac, 0x8a, 0xc6, 0x78, 0xd2, 0x73, 0x0b, + 0x4a, 0x91, 0x45, 0x74, 0x29, 0x91, 0xee, 0x44, 0x1a, 0x3d, 0x41, 0xc8, 0x6e, 0x55, 0x14, 0x4f, + 0xb2, 0x6c, 0x54, 0xc9, 0x6c, 0x90, 0x1a, 0xac, 0x01, 0x12, 0xea, 0x12, 0x64, 0xa3, 0xc1, 0x49, + 0x40, 0xaf, 0x05, 0x79, 0x4f, 0x30, 0x47, 0x47, 0xc1, 0xf4, 0xdc, 0x47, 0x3a, 0x8f, 0x9d, 0x0a, + 0x8f, 0x24, 0xee, 0x23, 0x22, 0x96, 0xf0, 0x05, 0xc8, 0x10, 0xf7, 0x11, 0xaa, 0x03, 0x78, 0x96, + 0xd3, 0xa5, 0x77, 0x82, 0x0a, 0xaa, 0x4c, 0x22, 0x90, 0x3d, 0x12, 0x87, 0x25, 0x38, 0x18, 0xbd, + 0x91, 0x54, 0xf7, 0x22, 0xe4, 0x3f, 0x1c, 0x46, 0xc5, 0x55, 0x49, 0x88, 0x4b, 0x16, 0xe3, 0x1a, + 0x89, 0xdb, 0x0c, 0x84, 0x70, 0x74, 0x04, 0x8a, 0xcc, 0x5a, 0xef, 0xd1, 0x1b, 0xa1, 0x9b, 0x0b, + 0x01, 0x7c, 0x95, 0x17, 0x7f, 0x77, 0x22, 0x19, 0x50, 0x08, 0x40, 0x27, 0x60, 0x26, 0xbc, 0xf3, + 0x2d, 0x8f, 0x6e, 0xd8, 0x8f, 0x85, 0x86, 0xcb, 0x64, 0x17, 0x1c, 0x2d, 0xc0, 0x81, 0x10, 0xb6, + 0x26, 0x32, 0x0d, 0x53, 0xa0, 0x26, 0xc1, 0x5c, 0x36, 0x82, 0xdd, 0xf7, 0x1f, 0x0c, 0xad, 0x9e, + 0x78, 0x7c, 0x65, 0x12, 0x81, 0xe0, 0xbf, 0x19, 0x70, 0x50, 0xaa, 0x9a, 0x97, 0xfd, 0xaf, 0xa2, + 0xd5, 0xff, 0xde, 0x00, 0x14, 0xe5, 0x40, 0x99, 0xd6, 0x1b, 0xd1, 0x7e, 0x0e, 0x4f, 0x65, 0x4a, + 0xa2, 0xa6, 0x95, 0xa0, 0xb0, 0x25, 0x83, 0x21, 0x27, 0xd2, 0x21, 0x59, 0x5c, 0x9b, 0xb2, 0x68, + 0x96, 0x10, 0xa2, 0xfe, 0x79, 0xad, 0xbf, 0xbe, 0xc5, 0xa8, 0xaf, 0x4a, 0x5e, 0x51, 0xeb, 0x0b, + 0x00, 0x91, 0x7f, 0xfc, 0x2c, 0xea, 0x30, 0x61, 0x35, 0x66, 0x78, 0x96, 0x02, 0x11, 0x3d, 0xc0, + 0x7f, 0x4a, 0xc3, 0xd4, 0x1d, 0xb7, 0x37, 0x0c, 0x03, 0xe3, 0xab, 0x14, 0x30, 0x62, 0x75, 0x78, + 0x56, 0xd7, 0xe1, 0x08, 0x4c, 0x9f, 0xd1, 0x81, 0xb0, 0xac, 0x0c, 0x11, 0x63, 0x84, 0xa1, 0xcc, + 0x2c, 0xaf, 0x4b, 0x99, 0xac, 0x6e, 0xaa, 0x39, 0x91, 0x76, 0xc6, 0x60, 0x68, 0x1e, 0x4a, 0x56, + 0xb7, 0xeb, 0xd1, 0xae, 0xc5, 0x68, 0x6b, 0xab, 0x9a, 0x17, 0x87, 0x45, 0x41, 0xf8, 0x23, 0x98, + 0xd6, 0xc2, 0x52, 0x2a, 0x7d, 0x1b, 0xf2, 0x0f, 0x05, 0x64, 0x4c, 0xef, 0x4b, 0xa2, 0x2a, 0x37, + 0xa6, 0xd1, 0xe2, 0xed, 0x72, 0x7d, 0x67, 0x7c, 0x15, 0x72, 0x12, 0x1d, 0x1d, 0x89, 0xd6, 0x28, + 0xb2, 0x49, 0xc3, 0xe7, 0xaa, 0xe0, 0xc0, 0x90, 0x93, 0x84, 0x94, 0xe2, 0x85, 0x6d, 0x48, 0x08, + 0x51, 0xff, 0xf8, 0x3f, 0x06, 0x1c, 0x5a, 0xa6, 0x8c, 0xb6, 0x19, 0xed, 0x5c, 0xb6, 0x69, 0xaf, + 0xf3, 0x8d, 0x96, 0xcf, 0x41, 0x13, 0x2c, 0x13, 0x69, 0x82, 0x71, 0xbf, 0xd3, 0xb3, 0x1d, 0xba, + 0x1a, 0xe9, 0xa2, 0x84, 0x00, 0xee, 0x21, 0x36, 0xf8, 0xc5, 0xe5, 0xb2, 0xfc, 0x3e, 0x11, 0x81, + 0x04, 0x1a, 0xce, 0x85, 0x1a, 0xc6, 0x2b, 0x30, 0x97, 0x64, 0x5a, 0xe9, 0xa8, 0x09, 0x39, 0xb1, + 0x77, 0x4c, 0x17, 0x35, 0xb6, 0x83, 0x28, 0x34, 0xec, 0xc1, 0x54, 0x6c, 0x41, 0xe8, 0x8c, 0xdb, + 0x88, 0xf2, 0x9f, 0x72, 0x82, 0xfe, 0x1f, 0x4c, 0xb6, 0x35, 0x50, 0x6e, 0xb3, 0x75, 0xe8, 0x8b, + 0x51, 0xe3, 0x60, 0x6c, 0xdb, 0xed, 0xad, 0x01, 0x25, 0x02, 0x85, 0x9b, 0x56, 0xdb, 0xf2, 0x3a, + 0xb6, 0x63, 0xf5, 0x6c, 0x26, 0x45, 0x61, 0x92, 0x28, 0x08, 0xff, 0x36, 0xa2, 0x34, 0x69, 0x8f, + 0xfb, 0x54, 0x9a, 0xb1, 0x6f, 0xa5, 0x19, 0x2f, 0x50, 0x1a, 0xfe, 0x61, 0x28, 0x62, 0x7d, 0x45, + 0x25, 0xe2, 0xf7, 0x60, 0xba, 0x13, 0x5b, 0xd9, 0x5b, 0xd4, 0xb2, 0xf7, 0x99, 0x40, 0xc7, 0x6f, + 0x84, 0x22, 0x17, 0x90, 0xf1, 0x22, 0x3f, 0x71, 0x0c, 0x8a, 0xc1, 0x67, 0x1c, 0x54, 0x82, 0xfc, + 0xe5, 0x9b, 0xe4, 0x07, 0x97, 0xc8, 0xf2, 0x4c, 0x0a, 0x95, 0xa1, 0xd0, 0xba, 0xb4, 0x74, 0x4d, + 0xcc, 0x8c, 0xd3, 0x9f, 0x64, 0x75, 0xd0, 0xf4, 0xd0, 0x77, 0x20, 0x2b, 0x23, 0xe1, 0x5c, 0x78, + 0x99, 0xe8, 0xd7, 0x92, 0xda, 0xe1, 0x5d, 0x70, 0xc9, 0x15, 0x4e, 0xbd, 0x6d, 0xa0, 0x1b, 0x50, + 0x12, 0x40, 0xd5, 0x14, 0x3d, 0x92, 0xec, 0x4d, 0xc6, 0x28, 0xbd, 0xbe, 0xc7, 0x6a, 0x84, 0xde, + 0x79, 0xc8, 0x4a, 0x06, 0xe7, 0x12, 0x09, 0xcb, 0x98, 0xdb, 0xc4, 0xda, 0xc4, 0x38, 0x85, 0xde, + 0x05, 0xf3, 0xb6, 0x65, 0xf7, 0x50, 0x24, 0x5f, 0x8a, 0xf4, 0x32, 0x6b, 0x73, 0x49, 0x70, 0xe4, + 0xd8, 0x8b, 0x41, 0x4b, 0xf6, 0x70, 0xb2, 0x2f, 0xa4, 0xb7, 0x57, 0x77, 0x2f, 0x04, 0x27, 0xdf, + 0x94, 0x8d, 0x43, 0xdd, 0x9d, 0x40, 0xaf, 0xc7, 0x8f, 0x4a, 0x34, 0x33, 0x6a, 0xf5, 0xbd, 0x96, + 0x03, 0x82, 0xab, 0x50, 0x8a, 0x74, 0x06, 0xa2, 0x62, 0xdd, 0xdd, 0xd6, 0x88, 0x8a, 0x75, 0x4c, + 0x3b, 0x01, 0xa7, 0xd0, 0x15, 0x28, 0xf0, 0x2c, 0x93, 0x07, 0x5b, 0xf4, 0x5a, 0x32, 0x99, 0x8c, + 0x24, 0x11, 0xb5, 0x23, 0xe3, 0x17, 0x03, 0x42, 0xdf, 0x83, 0xe2, 0x15, 0xca, 0x94, 0x27, 0x3e, + 0x9c, 0x74, 0xe5, 0x63, 0x24, 0x15, 0x0f, 0x07, 0x38, 0x85, 0x3e, 0x12, 0x09, 0x6f, 0xdc, 0x13, + 0xa1, 0xc6, 0x1e, 0x1e, 0x27, 0xb8, 0xd7, 0xfc, 0xde, 0x08, 0x9a, 0xf2, 0xe9, 0xbb, 0xfa, 0xc3, + 0xf1, 0xb2, 0xc5, 0x2c, 0x74, 0x13, 0xa6, 0x05, 0xcb, 0xc1, 0x97, 0xe5, 0x98, 0x69, 0xee, 0xfa, + 0x8c, 0x1d, 0x33, 0xcd, 0xdd, 0x9f, 0xb3, 0x71, 0xaa, 0x75, 0xf7, 0xe9, 0xb3, 0x7a, 0xea, 0xd3, + 0x67, 0xf5, 0xd4, 0xe7, 0xcf, 0xea, 0xc6, 0x4f, 0xb7, 0xeb, 0xc6, 0x1f, 0xb6, 0xeb, 0xc6, 0x93, + 0xed, 0xba, 0xf1, 0x74, 0xbb, 0x6e, 0xfc, 0x6b, 0xbb, 0x6e, 0xfc, 0x7b, 0xbb, 0x9e, 0xfa, 0x7c, + 0xbb, 0x6e, 0x7c, 0xfc, 0xbc, 0x9e, 0x7a, 0xfa, 0xbc, 0x9e, 0xfa, 0xf4, 0x79, 0x3d, 0xf5, 0xa3, + 0xe3, 0x2f, 0xae, 0xc1, 0xa4, 0xb7, 0xc9, 0x89, 0xbf, 0x33, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, + 0x05, 0xeb, 0x00, 0x3f, 0xfa, 0x1f, 0x00, 0x00, } func (x Direction) String() string { @@ -11035,7 +11035,7 @@ func (m *QueryResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Streams = append(m.Streams, github_com_grafana_loki_v3_pkg_push.Stream{}) + m.Streams = append(m.Streams, github_com_grafana_loki_pkg_push.Stream{}) if err := m.Streams[len(m.Streams)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } @@ -12085,7 +12085,7 @@ func (m *TailResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Stream == nil { - m.Stream = &github_com_grafana_loki_v3_pkg_push.Stream{} + m.Stream = &github_com_grafana_loki_pkg_push.Stream{} } if err := m.Stream.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err diff --git a/pkg/logproto/logproto.proto b/pkg/logproto/logproto.proto index 39d64c30d8185..892e0f599ebae 100644 --- a/pkg/logproto/logproto.proto +++ b/pkg/logproto/logproto.proto @@ -98,7 +98,7 @@ message Delete { message QueryResponse { repeated StreamAdapter streams = 1 [ - (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream", + (gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream", (gogoproto.nullable) = true ]; stats.Ingester stats = 2 [(gogoproto.nullable) = false]; @@ -169,7 +169,7 @@ message TailRequest { } message TailResponse { - StreamAdapter stream = 1 [(gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream"]; + StreamAdapter stream = 1 [(gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream"]; repeated DroppedStream droppedStreams = 2; } diff --git a/pkg/logqlmodel/logqlmodel.go b/pkg/logqlmodel/logqlmodel.go index 9de0d5be9f756..8da990f0e74bb 100644 --- a/pkg/logqlmodel/logqlmodel.go +++ b/pkg/logqlmodel/logqlmodel.go @@ -5,7 +5,7 @@ import ( "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" - "github.com/grafana/loki/v3/pkg/push" + "github.com/grafana/loki/pkg/push" "github.com/grafana/loki/v3/pkg/logqlmodel/stats" ) diff --git a/pkg/push/go.mod b/pkg/push/go.mod index f35acdfd50762..067ca8f02c80c 100644 --- a/pkg/push/go.mod +++ b/pkg/push/go.mod @@ -1,4 +1,4 @@ -module github.com/grafana/loki/v3/pkg/push +module github.com/grafana/loki/pkg/push go 1.19 diff --git a/pkg/push/push.pb.go b/pkg/push/push.pb.go index 7979872929611..3b07d850ff162 100644 --- a/pkg/push/push.pb.go +++ b/pkg/push/push.pb.go @@ -296,41 +296,40 @@ func init() { func init() { proto.RegisterFile("pkg/push/push.proto", fileDescriptor_35ec442956852c9e) } var fileDescriptor_35ec442956852c9e = []byte{ - // 532 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, - 0x10, 0xf6, 0x26, 0x6e, 0xda, 0x6e, 0xfa, 0xf7, 0xaf, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x57, 0x86, - 0x43, 0x0e, 0x60, 0x4b, 0xe9, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x42, - 0x70, 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0xeb, 0x4a, 0xbd, 0xf1, 0x08, 0xe5, 0x2d, - 0x78, 0x02, 0x9e, 0xa1, 0xc7, 0x1c, 0x2b, 0x0e, 0x86, 0x38, 0x17, 0x94, 0x53, 0x1f, 0x01, 0x79, - 0x6d, 0x93, 0x50, 0x90, 0xb8, 0x6c, 0xbe, 0x99, 0x9d, 0xf9, 0xbe, 0x2f, 0xb3, 0x63, 0xf8, 0x20, - 0xbb, 0x08, 0xdc, 0x2c, 0x17, 0xa1, 0x3a, 0x9c, 0x8c, 0x33, 0xc9, 0xd0, 0x46, 0xcc, 0x02, 0x85, - 0xcc, 0xdd, 0x80, 0x05, 0x4c, 0x41, 0xb7, 0x42, 0xf5, 0xbd, 0x69, 0x05, 0x8c, 0x05, 0x31, 0x75, - 0x55, 0x34, 0xce, 0xcf, 0x5d, 0x19, 0x25, 0x54, 0x48, 0x92, 0x64, 0x75, 0x81, 0xfd, 0x16, 0xf6, - 0x4f, 0x73, 0x11, 0xfa, 0xf4, 0x43, 0x4e, 0x85, 0x44, 0xc7, 0x70, 0x5d, 0x48, 0x4e, 0x49, 0x22, - 0x0c, 0x70, 0xd8, 0x1d, 0xf4, 0x87, 0x0f, 0x9d, 0x56, 0xc1, 0x79, 0xad, 0x2e, 0x46, 0x13, 0x92, - 0x49, 0xca, 0xbd, 0xbd, 0xaf, 0x85, 0xd5, 0xab, 0x53, 0x8b, 0xc2, 0x6a, 0xbb, 0xfc, 0x16, 0xd8, - 0xdb, 0x70, 0xab, 0x26, 0x16, 0x19, 0x4b, 0x05, 0xb5, 0x3f, 0x01, 0xf8, 0xdf, 0x6f, 0x0c, 0xc8, - 0x86, 0xbd, 0x98, 0x8c, 0x69, 0x5c, 0x49, 0x81, 0xc1, 0xa6, 0x07, 0x17, 0x85, 0xd5, 0x64, 0xfc, - 0xe6, 0x17, 0x8d, 0xe0, 0x3a, 0x4d, 0x25, 0x8f, 0xa8, 0x30, 0x3a, 0xca, 0xcf, 0xfe, 0xd2, 0xcf, - 0xcb, 0x54, 0xf2, 0xab, 0xd6, 0xce, 0xff, 0x37, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, - 0xe8, 0x11, 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x7b, 0x08, 0x06, 0xba, 0xb7, 0xb6, 0x28, 0x2c, 0xf0, - 0xcc, 0x57, 0x29, 0xfb, 0x05, 0xdc, 0x39, 0xa9, 0x74, 0x4e, 0x49, 0xc4, 0x5b, 0x57, 0x08, 0xea, - 0x29, 0x49, 0x68, 0xed, 0xc9, 0x57, 0x18, 0xed, 0xc2, 0xb5, 0x4b, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, - 0x64, 0x1d, 0xd8, 0x5f, 0x3a, 0x70, 0x6b, 0xd5, 0x03, 0x3a, 0x86, 0x9b, 0xbf, 0xc6, 0xab, 0xfa, - 0xfb, 0x43, 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xdb, 0x8d, 0xe5, - 0x8e, 0x14, 0xd7, 0xdf, 0x2c, 0xe0, 0x2f, 0x9b, 0xd1, 0x01, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, - 0x8d, 0x45, 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, - 0xf2, 0x8a, 0x4a, 0x32, 0x21, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x39, 0x9f, 0xfb, 0x7f, 0xcd, - 0x7b, 0xd2, 0x08, 0x1e, 0xfc, 0xd9, 0xfd, 0x94, 0x25, 0x91, 0xa4, 0x49, 0x26, 0xaf, 0xfc, 0xbf, - 0x70, 0xa3, 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x62, 0xe8, 0xff, 0x54, 0x31, 0x1a, 0x95, 0x9d, - 0xba, 0x63, 0x85, 0xb9, 0xe1, 0x18, 0x8e, 0x60, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, - 0x08, 0xed, 0x2d, 0xf9, 0x56, 0xb6, 0xd1, 0xdc, 0xbf, 0x9f, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0x4d, - 0x67, 0x58, 0xbb, 0x9d, 0x61, 0xed, 0x6e, 0x86, 0xc1, 0xc7, 0x12, 0x83, 0xcf, 0x25, 0x06, 0x37, - 0x25, 0x06, 0xd3, 0x12, 0x83, 0xef, 0x25, 0x06, 0x3f, 0x4a, 0xac, 0xdd, 0x95, 0x18, 0x5c, 0xcf, - 0xb1, 0x36, 0x9d, 0x63, 0xed, 0x76, 0x8e, 0xb5, 0xf7, 0x8f, 0x83, 0x48, 0x86, 0xf9, 0xd8, 0x39, - 0x63, 0x89, 0x1b, 0x70, 0x72, 0x4e, 0x52, 0xe2, 0xc6, 0xec, 0x22, 0x72, 0x2f, 0x8f, 0xdc, 0xf6, - 0xeb, 0x1a, 0xf7, 0x94, 0xe0, 0xd1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xe6, 0x29, 0x58, - 0x70, 0x03, 0x00, 0x00, + // 527 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0xf5, 0x26, 0x6e, 0xda, 0x6e, 0x4a, 0xa9, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x47, 0x16, 0x87, + 0x1c, 0xc0, 0x96, 0xc2, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x20, 0x71, + 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0x6b, 0xa4, 0xde, 0xf8, 0x84, 0xf2, 0x17, 0x7c, + 0x01, 0xdf, 0xd0, 0x63, 0x8e, 0x15, 0x07, 0x43, 0x9c, 0x0b, 0xca, 0xa9, 0x9f, 0x80, 0xbc, 0xb6, + 0x49, 0x28, 0x48, 0x5c, 0x36, 0x6f, 0x66, 0x67, 0xde, 0x7b, 0x99, 0x1d, 0xc3, 0x07, 0xd9, 0x45, + 0xe0, 0x66, 0xb9, 0x08, 0xd5, 0xe1, 0x64, 0x9c, 0x49, 0x86, 0xb6, 0x62, 0x16, 0x28, 0x64, 0xee, + 0x07, 0x2c, 0x60, 0x0a, 0xba, 0x15, 0xaa, 0xef, 0x4d, 0x2b, 0x60, 0x2c, 0x88, 0xa9, 0xab, 0xa2, + 0x49, 0x7e, 0xee, 0xca, 0x28, 0xa1, 0x42, 0x92, 0x24, 0xab, 0x0b, 0xec, 0x77, 0xb0, 0x7f, 0x9a, + 0x8b, 0xd0, 0xa7, 0x1f, 0x72, 0x2a, 0x24, 0x3a, 0x86, 0x9b, 0x42, 0x72, 0x4a, 0x12, 0x61, 0x80, + 0x41, 0x77, 0xd8, 0x1f, 0x3d, 0x74, 0x5a, 0x05, 0xe7, 0xb5, 0xba, 0x18, 0x4f, 0x49, 0x26, 0x29, + 0xf7, 0x0e, 0xbe, 0x15, 0x56, 0xaf, 0x4e, 0x2d, 0x0b, 0xab, 0xed, 0xf2, 0x5b, 0x60, 0xef, 0xc2, + 0x9d, 0x9a, 0x58, 0x64, 0x2c, 0x15, 0xd4, 0xfe, 0x0c, 0xe0, 0xbd, 0x3f, 0x18, 0x90, 0x0d, 0x7b, + 0x31, 0x99, 0xd0, 0xb8, 0x92, 0x02, 0xc3, 0x6d, 0x0f, 0x2e, 0x0b, 0xab, 0xc9, 0xf8, 0xcd, 0x2f, + 0x1a, 0xc3, 0x4d, 0x9a, 0x4a, 0x1e, 0x51, 0x61, 0x74, 0x94, 0x9f, 0xc3, 0x95, 0x9f, 0x97, 0xa9, + 0xe4, 0x97, 0xad, 0x9d, 0xfb, 0xd7, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, 0xe8, 0x11, + 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x3b, 0x00, 0x43, 0xdd, 0xdb, 0x58, 0x16, 0x16, 0x78, 0xea, 0xab, + 0x94, 0xfd, 0x02, 0xee, 0x9d, 0x54, 0x3a, 0xa7, 0x24, 0xe2, 0xad, 0x2b, 0x04, 0xf5, 0x94, 0x24, + 0xb4, 0xf6, 0xe4, 0x2b, 0x8c, 0xf6, 0xe1, 0xc6, 0x47, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, 0x64, 0x1d, + 0xd8, 0x5f, 0x3b, 0x70, 0x67, 0xdd, 0x03, 0x3a, 0x86, 0xdb, 0xbf, 0xc7, 0xab, 0xfa, 0xfb, 0x23, + 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xbb, 0x8d, 0xe5, 0x8e, 0x14, + 0x57, 0xdf, 0x2d, 0xe0, 0xaf, 0x9a, 0xd1, 0x11, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, 0xad, 0x65, + 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, 0xfa, 0x8a, + 0x4a, 0x32, 0x25, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x35, 0x9f, 0xbb, 0x7f, 0xcd, 0x7b, 0xdc, + 0x08, 0x1e, 0xfd, 0xdd, 0xfd, 0x84, 0x25, 0x91, 0xa4, 0x49, 0x26, 0x2f, 0xfd, 0x7f, 0x70, 0xa3, + 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x6a, 0xe8, 0xff, 0x55, 0x31, 0x1a, 0x95, 0xbd, 0xba, 0x63, + 0x8d, 0xb9, 0xe1, 0x18, 0x8d, 0x61, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, 0x08, 0x1d, + 0xac, 0xf8, 0xd6, 0xb6, 0xd1, 0x3c, 0xbc, 0x9b, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0xce, 0xe6, 0x58, + 0xbb, 0x99, 0x63, 0xed, 0x76, 0x8e, 0xc1, 0xa7, 0x12, 0x83, 0x2f, 0x25, 0x06, 0xd7, 0x25, 0x06, + 0xb3, 0x12, 0x83, 0x1f, 0x25, 0x06, 0x3f, 0x4b, 0xac, 0xdd, 0x96, 0x18, 0x5c, 0x2d, 0xb0, 0x36, + 0x5b, 0x60, 0xed, 0x66, 0x81, 0xb5, 0xf7, 0x83, 0x20, 0x92, 0x61, 0x3e, 0x71, 0xce, 0x58, 0xe2, + 0x06, 0x9c, 0x9c, 0x93, 0x94, 0xb8, 0x31, 0xbb, 0x88, 0xdc, 0xf6, 0xd3, 0x9a, 0xf4, 0x94, 0xda, + 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, 0xaa, 0x57, 0xd3, 0x6d, 0x03, 0x00, 0x00, } func (this *PushRequest) Equal(that interface{}) bool { diff --git a/pkg/push/push.proto b/pkg/push/push.proto index e538c66903eae..3bf8ad06a8a83 100644 --- a/pkg/push/push.proto +++ b/pkg/push/push.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; -option go_package = "github.com/grafana/loki/v3/pkg/push"; +option go_package = "github.com/grafana/loki/pkg/push"; service Pusher { rpc Push(PushRequest) returns (PushResponse) {} diff --git a/pkg/querier/queryrange/queryrange.pb.go b/pkg/querier/queryrange/queryrange.pb.go index a78b9efbaee74..a602dcafcd802 100644 --- a/pkg/querier/queryrange/queryrange.pb.go +++ b/pkg/querier/queryrange/queryrange.pb.go @@ -12,11 +12,11 @@ import ( github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys" _ "github.com/gogo/protobuf/types" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + _ "github.com/grafana/loki/pkg/push" + github_com_grafana_loki_pkg_push "github.com/grafana/loki/pkg/push" github_com_grafana_loki_v3_pkg_logproto "github.com/grafana/loki/v3/pkg/logproto" logproto "github.com/grafana/loki/v3/pkg/logproto" stats "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - _ "github.com/grafana/loki/v3/pkg/push" - github_com_grafana_loki_v3_pkg_push "github.com/grafana/loki/v3/pkg/push" github_com_grafana_loki_v3_pkg_querier_plan "github.com/grafana/loki/v3/pkg/querier/plan" queryrangebase "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" _ "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase/definitions" @@ -588,8 +588,8 @@ func (m *LokiLabelNamesResponse) GetStatistics() stats.Result { } type LokiData struct { - ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` - Result []github_com_grafana_loki_v3_pkg_push.Stream `protobuf:"bytes,2,rep,name=Result,proto3,customtype=github.com/grafana/loki/v3/pkg/push.Stream" json:"result"` + ResultType string `protobuf:"bytes,1,opt,name=ResultType,proto3" json:"resultType"` + Result []github_com_grafana_loki_pkg_push.Stream `protobuf:"bytes,2,rep,name=Result,proto3,customtype=github.com/grafana/loki/pkg/push.Stream" json:"result"` } func (m *LokiData) Reset() { *m = LokiData{} } @@ -1363,115 +1363,115 @@ func init() { } var fileDescriptor_51b9d53b40d11902 = []byte{ - // 1717 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4b, 0x6f, 0xdb, 0xc6, - 0x16, 0x16, 0xf5, 0xb4, 0xc6, 0x8f, 0xeb, 0x3b, 0x36, 0x1c, 0x5e, 0x27, 0x11, 0x05, 0x01, 0x37, - 0xd7, 0x37, 0x68, 0xa5, 0x46, 0x4e, 0xdc, 0xd4, 0x35, 0x82, 0x84, 0x75, 0x02, 0x19, 0x4d, 0x8a, - 0x84, 0x36, 0xba, 0xe8, 0xa6, 0x18, 0x4b, 0x63, 0x89, 0x30, 0x45, 0xd2, 0xe4, 0xc8, 0x89, 0x81, - 0x2e, 0xf2, 0x07, 0x8a, 0x06, 0xe8, 0x9f, 0x68, 0x37, 0x2d, 0x5a, 0x74, 0xd5, 0x55, 0xbb, 0x33, - 0x0a, 0x14, 0xc8, 0x32, 0x10, 0x50, 0xb5, 0x71, 0x36, 0x85, 0x57, 0x01, 0xfa, 0x07, 0x8a, 0x79, - 0x90, 0x1a, 0x8a, 0x72, 0x2d, 0xa5, 0xe8, 0xc2, 0x45, 0x37, 0xd2, 0x3c, 0xce, 0x77, 0x38, 0xfc, - 0xce, 0x77, 0x86, 0x67, 0x06, 0xfc, 0xcf, 0xdd, 0x6d, 0x56, 0xf6, 0x3a, 0xd8, 0x33, 0xb1, 0xc7, - 0xfe, 0x0f, 0x3c, 0x64, 0x37, 0xb1, 0xd4, 0x2c, 0xbb, 0x9e, 0x43, 0x1c, 0x08, 0xfa, 0x23, 0x8b, - 0xd5, 0xa6, 0x49, 0x5a, 0x9d, 0xed, 0x72, 0xdd, 0x69, 0x57, 0x9a, 0x4e, 0xd3, 0xa9, 0x34, 0x1d, - 0xa7, 0x69, 0x61, 0xe4, 0x9a, 0xbe, 0x68, 0x56, 0x3c, 0xb7, 0x5e, 0xf1, 0x09, 0x22, 0x1d, 0x9f, - 0xe3, 0x17, 0xe7, 0xa9, 0x21, 0x6b, 0x32, 0x88, 0x18, 0xd5, 0x84, 0x39, 0xeb, 0x6d, 0x77, 0x76, - 0x2a, 0xc4, 0x6c, 0x63, 0x9f, 0xa0, 0xb6, 0x1b, 0x18, 0xd0, 0xf5, 0x59, 0x4e, 0x93, 0x23, 0x4d, - 0xbb, 0x81, 0x1f, 0x35, 0x11, 0xc1, 0x0f, 0xd1, 0x81, 0x30, 0x38, 0x1f, 0x31, 0x08, 0x1a, 0x62, - 0xf2, 0x3f, 0x91, 0x49, 0x7f, 0x17, 0x93, 0x7a, 0x4b, 0x4c, 0x15, 0xc5, 0xd4, 0x9e, 0xd5, 0x76, - 0x1a, 0xd8, 0x62, 0x8b, 0xf5, 0xf9, 0xaf, 0xb0, 0x98, 0xa3, 0x16, 0x6e, 0xc7, 0x6f, 0xb1, 0x1f, - 0x31, 0xf8, 0xce, 0xa9, 0x7c, 0x6d, 0x23, 0x1f, 0x57, 0x1a, 0x78, 0xc7, 0xb4, 0x4d, 0x62, 0x3a, - 0xb6, 0x2f, 0xb7, 0x85, 0x93, 0x95, 0xd1, 0x9c, 0x0c, 0xc6, 0xa0, 0xf4, 0x55, 0x0a, 0x4c, 0xde, - 0x75, 0x76, 0x4d, 0x03, 0xef, 0x75, 0xb0, 0x4f, 0xe0, 0x3c, 0xc8, 0x30, 0x1b, 0x55, 0x29, 0x2a, - 0x4b, 0x79, 0x83, 0x77, 0xe8, 0xa8, 0x65, 0xb6, 0x4d, 0xa2, 0x26, 0x8b, 0xca, 0xd2, 0xb4, 0xc1, - 0x3b, 0x10, 0x82, 0xb4, 0x4f, 0xb0, 0xab, 0xa6, 0x8a, 0xca, 0x52, 0xca, 0x60, 0x6d, 0xb8, 0x08, - 0x26, 0x4c, 0x9b, 0x60, 0x6f, 0x1f, 0x59, 0x6a, 0x9e, 0x8d, 0x87, 0x7d, 0x78, 0x03, 0xe4, 0x7c, - 0x82, 0x3c, 0xb2, 0xe5, 0xab, 0xe9, 0xa2, 0xb2, 0x34, 0x59, 0x5d, 0x2c, 0xf3, 0x58, 0x95, 0x83, - 0x58, 0x95, 0xb7, 0x82, 0x58, 0xe9, 0x13, 0x87, 0x3d, 0x2d, 0xf1, 0xe4, 0x67, 0x4d, 0x31, 0x02, - 0x10, 0x5c, 0x05, 0x19, 0x6c, 0x37, 0xb6, 0x7c, 0x35, 0x33, 0x06, 0x9a, 0x43, 0xe0, 0x15, 0x90, - 0x6f, 0x98, 0x1e, 0xae, 0x53, 0xce, 0xd4, 0x6c, 0x51, 0x59, 0x9a, 0xa9, 0xce, 0x95, 0xc3, 0xd0, - 0xae, 0x07, 0x53, 0x46, 0xdf, 0x8a, 0xbe, 0x9e, 0x8b, 0x48, 0x4b, 0xcd, 0x31, 0x26, 0x58, 0x1b, - 0x96, 0x40, 0xd6, 0x6f, 0x21, 0xaf, 0xe1, 0xab, 0x13, 0xc5, 0xd4, 0x52, 0x5e, 0x07, 0xc7, 0x3d, - 0x4d, 0x8c, 0x18, 0xe2, 0x1f, 0x7e, 0x08, 0xd2, 0xae, 0x85, 0x6c, 0x15, 0xb0, 0x55, 0xce, 0x96, - 0x25, 0xce, 0xef, 0x5b, 0xc8, 0xd6, 0xdf, 0xea, 0xf6, 0xb4, 0x6b, 0xb2, 0xdc, 0x3d, 0xb4, 0x83, - 0x6c, 0x54, 0xb1, 0x9c, 0x5d, 0xb3, 0xb2, 0xbf, 0x5c, 0x91, 0x23, 0x49, 0x1d, 0x95, 0x1f, 0x50, - 0x07, 0x14, 0x6a, 0x30, 0xc7, 0xa5, 0x1f, 0x92, 0x00, 0xd2, 0x98, 0x6d, 0xd8, 0x3e, 0x41, 0x36, - 0x79, 0x95, 0xd0, 0xad, 0x81, 0x2c, 0x4d, 0x8b, 0x2d, 0x9f, 0x05, 0x6f, 0x54, 0x2e, 0x05, 0x26, - 0x4a, 0x66, 0x7a, 0x2c, 0x32, 0x33, 0x43, 0xc9, 0xcc, 0x9e, 0x4a, 0x66, 0xee, 0xaf, 0x22, 0x53, - 0x05, 0x69, 0xda, 0x83, 0xb3, 0x20, 0xe5, 0xa1, 0x87, 0x8c, 0xbb, 0x29, 0x83, 0x36, 0x4b, 0x5f, - 0xa4, 0xc1, 0x14, 0x4f, 0x0d, 0xdf, 0x75, 0x6c, 0x1f, 0xd3, 0xf5, 0x6e, 0xb2, 0xfd, 0x87, 0x33, - 0x2c, 0xd6, 0xcb, 0x46, 0x0c, 0x31, 0x03, 0x6f, 0x82, 0xf4, 0x3a, 0x22, 0x88, 0xb1, 0x3d, 0x59, - 0x9d, 0x97, 0xd7, 0x4b, 0x7d, 0xd1, 0x39, 0x7d, 0x81, 0x12, 0x7a, 0xdc, 0xd3, 0x66, 0x1a, 0x88, - 0xa0, 0xd7, 0x9c, 0xb6, 0x49, 0x70, 0xdb, 0x25, 0x07, 0x06, 0x43, 0xc2, 0x6b, 0x20, 0x7f, 0xdb, - 0xf3, 0x1c, 0x6f, 0xeb, 0xc0, 0xc5, 0x2c, 0x3a, 0x79, 0xfd, 0xdc, 0x71, 0x4f, 0x9b, 0xc3, 0xc1, - 0xa0, 0x84, 0xe8, 0x5b, 0xc2, 0xff, 0x83, 0x0c, 0xeb, 0xb0, 0x78, 0xe4, 0xf5, 0xb9, 0xe3, 0x9e, - 0xf6, 0x2f, 0x06, 0x91, 0xcc, 0xb9, 0x45, 0x34, 0x7c, 0x99, 0x91, 0xc2, 0x17, 0xaa, 0x28, 0x2b, - 0xab, 0x48, 0x05, 0xb9, 0x7d, 0xec, 0xf9, 0xd4, 0x4d, 0x8e, 0x8d, 0x07, 0x5d, 0x78, 0x0b, 0x00, - 0x4a, 0x8c, 0xe9, 0x13, 0xb3, 0x4e, 0x73, 0x85, 0x92, 0x31, 0x5d, 0xe6, 0x5b, 0xa1, 0x81, 0xfd, - 0x8e, 0x45, 0x74, 0x28, 0x58, 0x90, 0x0c, 0x0d, 0xa9, 0x0d, 0xbf, 0x54, 0x40, 0xae, 0x86, 0x51, - 0x03, 0x7b, 0xbe, 0x9a, 0x2f, 0xa6, 0x96, 0x26, 0xab, 0xff, 0x2d, 0xcb, 0xfb, 0xde, 0x7d, 0xcf, - 0x69, 0x63, 0xd2, 0xc2, 0x1d, 0x3f, 0x08, 0x10, 0xb7, 0xd6, 0xed, 0x6e, 0x4f, 0xc3, 0x23, 0x4a, - 0x62, 0xa4, 0xed, 0xf6, 0xc4, 0x47, 0x1d, 0xf7, 0x34, 0xe5, 0x75, 0x23, 0x58, 0x65, 0xe9, 0x27, - 0x05, 0xfc, 0x9b, 0x06, 0x79, 0x93, 0xfa, 0xf6, 0xa5, 0xb4, 0x6c, 0x23, 0x52, 0x6f, 0xa9, 0x0a, - 0x15, 0xb9, 0xc1, 0x3b, 0xf2, 0x5e, 0x98, 0xfc, 0x53, 0x7b, 0x61, 0x6a, 0xfc, 0xbd, 0x30, 0xc8, - 0xc5, 0xf4, 0xd0, 0x5c, 0xcc, 0x9c, 0x94, 0x8b, 0xa5, 0x4f, 0x52, 0x7c, 0xdf, 0x09, 0xde, 0x6f, - 0x8c, 0xb4, 0xb8, 0x13, 0xa6, 0x45, 0x8a, 0xad, 0x36, 0x54, 0x1b, 0xf7, 0xb5, 0xd1, 0xc0, 0x36, - 0x31, 0x77, 0x4c, 0xec, 0x9d, 0x92, 0x1c, 0x92, 0xe2, 0x52, 0x51, 0xc5, 0xc9, 0x72, 0x49, 0x9f, - 0x05, 0xb9, 0x0c, 0xe4, 0x48, 0xe6, 0x15, 0x72, 0xa4, 0xf4, 0x5b, 0x12, 0x2c, 0xd0, 0x88, 0xdc, - 0x45, 0xdb, 0xd8, 0x7a, 0x0f, 0xb5, 0xc7, 0x8c, 0xca, 0x25, 0x29, 0x2a, 0x79, 0x1d, 0xfe, 0xc3, - 0xfa, 0x68, 0xac, 0x7f, 0xae, 0x80, 0x89, 0x60, 0x33, 0x87, 0x65, 0x00, 0x38, 0x8c, 0xed, 0xd7, - 0x9c, 0xeb, 0x19, 0x0a, 0xf6, 0xc2, 0x51, 0x43, 0xb2, 0x80, 0x6d, 0x90, 0xe5, 0x3d, 0x91, 0x0b, - 0xe7, 0xa4, 0x5c, 0x20, 0x1e, 0x46, 0xed, 0x5b, 0x0d, 0xe4, 0x12, 0xec, 0xe9, 0x6b, 0x74, 0x15, - 0xdd, 0x9e, 0x76, 0xf9, 0x14, 0x96, 0x58, 0x25, 0xc9, 0xa1, 0x34, 0xc4, 0xfc, 0xb1, 0x86, 0x78, - 0x48, 0xe9, 0x63, 0x05, 0xcc, 0xd2, 0xb5, 0x52, 0x76, 0x42, 0x6d, 0xac, 0x83, 0x09, 0x4f, 0xb4, - 0xd9, 0x8a, 0x27, 0xab, 0xa5, 0x72, 0x94, 0xd9, 0x21, 0x6c, 0xea, 0xe9, 0xc3, 0x9e, 0xa6, 0x18, - 0x21, 0x12, 0x2e, 0x47, 0x98, 0x4c, 0x0e, 0x63, 0x92, 0x42, 0x12, 0x11, 0xee, 0xbe, 0x4b, 0x02, - 0xb8, 0x41, 0x4b, 0x6e, 0x2a, 0xc1, 0xbe, 0x5a, 0x1f, 0xc5, 0x56, 0x74, 0xa1, 0xcf, 0x4b, 0xdc, - 0x5e, 0xbf, 0xd1, 0xed, 0x69, 0xab, 0xa7, 0x10, 0xf3, 0x07, 0x78, 0xe9, 0x2d, 0x64, 0x05, 0x27, - 0xcf, 0xc4, 0x67, 0xe6, 0x9b, 0x24, 0x98, 0x79, 0xdf, 0xb1, 0x3a, 0x6d, 0x1c, 0xd2, 0xe7, 0xc6, - 0xe8, 0x53, 0xfb, 0xf4, 0x45, 0x6d, 0xf5, 0xd5, 0x6e, 0x4f, 0x5b, 0x19, 0x95, 0xba, 0x28, 0xf6, - 0x4c, 0xd3, 0xf6, 0x75, 0x12, 0xcc, 0x6f, 0x39, 0xee, 0xbb, 0x9b, 0xec, 0xc8, 0x26, 0xed, 0x94, - 0xad, 0x18, 0x79, 0xf3, 0x7d, 0xf2, 0x28, 0xe2, 0x1e, 0x22, 0x9e, 0xf9, 0x48, 0x5f, 0xe9, 0xf6, - 0xb4, 0xea, 0xa8, 0xc4, 0xf5, 0x71, 0x67, 0x9a, 0xb4, 0xc3, 0x24, 0x58, 0x78, 0xd0, 0x41, 0x36, - 0x31, 0x2d, 0xcc, 0x89, 0x0b, 0x69, 0xfb, 0x28, 0x46, 0x5b, 0xa1, 0x4f, 0x5b, 0x14, 0x23, 0x08, - 0xbc, 0xd9, 0xed, 0x69, 0x6b, 0xa3, 0x12, 0x38, 0xcc, 0xc3, 0x99, 0xa6, 0xf2, 0xdb, 0x24, 0x98, - 0xd9, 0xe4, 0x05, 0x55, 0xf0, 0x12, 0xfb, 0x43, 0x28, 0x94, 0xef, 0x23, 0xdc, 0xed, 0x72, 0x14, - 0x31, 0x5e, 0xf2, 0x46, 0xb1, 0x67, 0x9a, 0xbc, 0x1f, 0x93, 0x60, 0x61, 0x1d, 0x13, 0x5c, 0x27, - 0xb8, 0x71, 0xc7, 0xc4, 0x96, 0x44, 0xe2, 0x63, 0x25, 0xc6, 0x62, 0x51, 0x3a, 0xcd, 0x0c, 0x05, - 0xe9, 0x7a, 0xb7, 0xa7, 0xdd, 0x18, 0x95, 0xc7, 0xe1, 0x3e, 0xfe, 0x36, 0x7c, 0xb2, 0xe2, 0x71, - 0x5c, 0x3e, 0xa3, 0xa0, 0x57, 0xe3, 0x33, 0xea, 0xe3, 0x4c, 0xf3, 0xf9, 0x69, 0x16, 0x4c, 0xb3, - 0x9b, 0x85, 0x90, 0xc6, 0xcb, 0x40, 0x54, 0xdb, 0x82, 0x43, 0x18, 0x9c, 0xd0, 0x3c, 0xb7, 0x5e, - 0xde, 0x14, 0x75, 0x38, 0xb7, 0x80, 0xd7, 0x41, 0xd6, 0x67, 0xe7, 0x20, 0x51, 0x45, 0x15, 0x06, - 0xaf, 0x0d, 0xa2, 0x27, 0xae, 0x5a, 0xc2, 0x10, 0xf6, 0x70, 0x0d, 0x64, 0x2d, 0xc6, 0xa2, 0x38, - 0x07, 0x96, 0x06, 0x91, 0xf1, 0x93, 0x01, 0x45, 0x73, 0x0c, 0x5c, 0x01, 0x19, 0x56, 0xae, 0x89, - 0xeb, 0xb8, 0xc8, 0x63, 0xe3, 0x45, 0x53, 0x2d, 0x61, 0x70, 0x73, 0x58, 0x05, 0x69, 0xd7, 0x73, - 0xda, 0xa2, 0x7a, 0xbe, 0x30, 0xf8, 0x4c, 0xb9, 0xd6, 0xac, 0x25, 0x0c, 0x66, 0x0b, 0xaf, 0xd2, - 0x03, 0x2f, 0x2d, 0x52, 0x7d, 0x76, 0x87, 0x40, 0x2b, 0x94, 0x01, 0x98, 0x04, 0x09, 0x4c, 0xe1, - 0x55, 0x90, 0xdd, 0x67, 0x25, 0x88, 0xb8, 0x00, 0x5a, 0x94, 0x41, 0xd1, 0xe2, 0x84, 0xbe, 0x17, - 0xb7, 0x85, 0x77, 0xc0, 0x14, 0x71, 0xdc, 0xdd, 0xe0, 0x4b, 0x2f, 0xee, 0x1f, 0x8a, 0x32, 0x76, - 0x58, 0x25, 0x50, 0x4b, 0x18, 0x11, 0x1c, 0xbc, 0x0f, 0x66, 0xf7, 0x22, 0x9f, 0x21, 0xec, 0xb3, - 0x4b, 0xcd, 0x01, 0x9e, 0x87, 0x7f, 0x20, 0x6b, 0x09, 0x23, 0x86, 0x86, 0xeb, 0x60, 0xc6, 0x8f, - 0xec, 0xca, 0xe2, 0x96, 0x30, 0xf2, 0x5e, 0xd1, 0x7d, 0xbb, 0x96, 0x30, 0x06, 0x30, 0xf0, 0x2e, - 0x98, 0x69, 0x44, 0xf6, 0x24, 0x75, 0x32, 0xbe, 0xaa, 0xe1, 0xbb, 0x16, 0xf5, 0x16, 0xc5, 0xca, - 0xde, 0x78, 0x46, 0xaa, 0x53, 0x27, 0x7b, 0x8b, 0xe6, 0xac, 0xec, 0x8d, 0xcf, 0xe8, 0xa0, 0xbf, - 0x7b, 0x94, 0xbe, 0xcf, 0x80, 0x29, 0x91, 0x15, 0xfc, 0x2e, 0xe4, 0xcd, 0x50, 0xe8, 0x3c, 0x29, - 0x2e, 0x9e, 0x24, 0x74, 0x66, 0x2e, 0xe9, 0xfc, 0x8d, 0x50, 0xe7, 0x3c, 0x43, 0x16, 0xfa, 0x3b, - 0x12, 0x7b, 0xae, 0x84, 0x10, 0xda, 0x5e, 0x0e, 0xb4, 0xcd, 0x13, 0xe3, 0xfc, 0xf0, 0xe3, 0x44, - 0x80, 0x12, 0xc2, 0x5e, 0x05, 0x39, 0x93, 0x5f, 0xaa, 0x0e, 0x4b, 0x89, 0xf8, 0x9d, 0x2b, 0x95, - 0xaa, 0x00, 0xc0, 0xe5, 0xbe, 0xc0, 0x79, 0x5e, 0x9c, 0x8b, 0x0b, 0x3c, 0x04, 0x05, 0xfa, 0xbe, - 0x12, 0xea, 0x3b, 0x2b, 0x30, 0xb1, 0xd2, 0x3b, 0x7c, 0x31, 0x21, 0xee, 0xdb, 0x60, 0x3a, 0x90, - 0x03, 0x9b, 0x12, 0xea, 0xbe, 0x78, 0x52, 0xe5, 0x10, 0xe0, 0xa3, 0x28, 0xb8, 0x11, 0xd3, 0x10, - 0x57, 0xb6, 0x76, 0xf2, 0xb7, 0x33, 0xf0, 0x34, 0x28, 0xa0, 0x8d, 0x98, 0x80, 0xc0, 0x49, 0xae, - 0x02, 0xf9, 0xc4, 0x5c, 0xf1, 0x09, 0x58, 0x03, 0x13, 0x6d, 0x4c, 0x50, 0x03, 0x11, 0xa4, 0xe6, - 0xd8, 0xce, 0x7f, 0x29, 0x9a, 0x69, 0x7d, 0x31, 0x95, 0xef, 0x09, 0xc3, 0xdb, 0x36, 0xf1, 0x0e, - 0xc4, 0x51, 0x33, 0x44, 0x2f, 0xbe, 0x0d, 0xa6, 0x23, 0x06, 0x70, 0x16, 0xa4, 0x76, 0x71, 0x70, - 0x39, 0x4e, 0x9b, 0x70, 0x1e, 0x64, 0xf6, 0x91, 0xd5, 0xc1, 0x4c, 0x53, 0x79, 0x83, 0x77, 0x56, - 0x93, 0xd7, 0x15, 0x3d, 0x0f, 0x72, 0x1e, 0x7f, 0x8a, 0xde, 0x7c, 0xfa, 0xbc, 0x90, 0x78, 0xf6, - 0xbc, 0x90, 0x78, 0xf9, 0xbc, 0xa0, 0x3c, 0x3e, 0x2a, 0x28, 0x9f, 0x1d, 0x15, 0x94, 0xc3, 0xa3, - 0x82, 0xf2, 0xf4, 0xa8, 0xa0, 0xfc, 0x72, 0x54, 0x50, 0x7e, 0x3d, 0x2a, 0x24, 0x5e, 0x1e, 0x15, - 0x94, 0x27, 0x2f, 0x0a, 0x89, 0xa7, 0x2f, 0x0a, 0x89, 0x67, 0x2f, 0x0a, 0x89, 0x0f, 0xae, 0x8c, - 0xfd, 0x11, 0xda, 0xce, 0x32, 0xa6, 0x96, 0x7f, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x5a, 0x6d, - 0x97, 0x3f, 0x1b, 0x00, 0x00, + // 1720 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x4d, 0x6f, 0xdb, 0x46, + 0x1a, 0x16, 0xf5, 0x69, 0x8d, 0x3f, 0xd6, 0x3b, 0x36, 0x1c, 0xae, 0x93, 0x88, 0x82, 0x80, 0x4d, + 0xbc, 0x8b, 0x5d, 0x69, 0x23, 0x27, 0xde, 0xc4, 0x1b, 0x04, 0x09, 0xd7, 0x09, 0x64, 0x6c, 0xb2, + 0x48, 0x68, 0xa3, 0x87, 0x5e, 0x8a, 0xb1, 0x34, 0x96, 0x58, 0x53, 0x24, 0x4d, 0x8e, 0x9c, 0x18, + 0xe8, 0x21, 0x7f, 0xa0, 0x68, 0x80, 0xfe, 0x87, 0xa2, 0xa7, 0x16, 0x2d, 0x7a, 0xea, 0xa9, 0xbd, + 0x19, 0x05, 0x0a, 0xe4, 0x18, 0x08, 0xa8, 0xda, 0x38, 0x97, 0xc2, 0xa7, 0x00, 0xfd, 0x03, 0xc5, + 0x7c, 0x90, 0x1a, 0x8a, 0x52, 0x2d, 0xa5, 0xe8, 0xc1, 0x45, 0x2f, 0xd2, 0x7c, 0xbc, 0xcf, 0xcb, + 0xe1, 0xf3, 0x3e, 0xef, 0xf0, 0x9d, 0x01, 0x97, 0xdd, 0xbd, 0x66, 0x65, 0xbf, 0x83, 0x3d, 0x13, + 0x7b, 0xec, 0xff, 0xd0, 0x43, 0x76, 0x13, 0x4b, 0xcd, 0xb2, 0xeb, 0x39, 0xc4, 0x81, 0xa0, 0x3f, + 0xb2, 0x5c, 0x6d, 0x9a, 0xa4, 0xd5, 0xd9, 0x29, 0xd7, 0x9d, 0x76, 0xa5, 0xe9, 0x34, 0x9d, 0x4a, + 0xd3, 0x71, 0x9a, 0x16, 0x46, 0xae, 0xe9, 0x8b, 0x66, 0xc5, 0x73, 0xeb, 0x15, 0x9f, 0x20, 0xd2, + 0xf1, 0x39, 0x7e, 0x79, 0x91, 0x1a, 0xb2, 0x26, 0x83, 0x88, 0x51, 0x4d, 0x98, 0xb3, 0xde, 0x4e, + 0x67, 0xb7, 0x42, 0xcc, 0x36, 0xf6, 0x09, 0x6a, 0xbb, 0x81, 0x01, 0x5d, 0x9f, 0xe5, 0x34, 0x39, + 0xd2, 0xb4, 0x1b, 0xf8, 0x49, 0x13, 0x11, 0xfc, 0x18, 0x1d, 0x0a, 0x83, 0xf3, 0x11, 0x83, 0xa0, + 0x21, 0x26, 0xff, 0x12, 0x99, 0xf4, 0xf7, 0x30, 0xa9, 0xb7, 0xc4, 0x54, 0x51, 0x4c, 0xed, 0x5b, + 0x6d, 0xa7, 0x81, 0x2d, 0xb6, 0x58, 0x9f, 0xff, 0x0a, 0x8b, 0x05, 0x6a, 0xe1, 0x76, 0xfc, 0x16, + 0xfb, 0x11, 0x83, 0xff, 0x3d, 0x95, 0xaf, 0x1d, 0xe4, 0xe3, 0x4a, 0x03, 0xef, 0x9a, 0xb6, 0x49, + 0x4c, 0xc7, 0xf6, 0xe5, 0xb6, 0x70, 0xb2, 0x36, 0x9e, 0x93, 0xc1, 0x18, 0x94, 0x3e, 0x4b, 0x81, + 0xe9, 0xfb, 0xce, 0x9e, 0x69, 0xe0, 0xfd, 0x0e, 0xf6, 0x09, 0x5c, 0x04, 0x19, 0x66, 0xa3, 0x2a, + 0x45, 0x65, 0x25, 0x6f, 0xf0, 0x0e, 0x1d, 0xb5, 0xcc, 0xb6, 0x49, 0xd4, 0x64, 0x51, 0x59, 0x99, + 0x35, 0x78, 0x07, 0x42, 0x90, 0xf6, 0x09, 0x76, 0xd5, 0x54, 0x51, 0x59, 0x49, 0x19, 0xac, 0x0d, + 0x97, 0xc1, 0x94, 0x69, 0x13, 0xec, 0x1d, 0x20, 0x4b, 0xcd, 0xb3, 0xf1, 0xb0, 0x0f, 0x6f, 0x81, + 0x9c, 0x4f, 0x90, 0x47, 0xb6, 0x7d, 0x35, 0x5d, 0x54, 0x56, 0xa6, 0xab, 0xcb, 0x65, 0x1e, 0xab, + 0x72, 0x10, 0xab, 0xf2, 0x76, 0x10, 0x2b, 0x7d, 0xea, 0xa8, 0xa7, 0x25, 0x9e, 0x7d, 0xaf, 0x29, + 0x46, 0x00, 0x82, 0xeb, 0x20, 0x83, 0xed, 0xc6, 0xb6, 0xaf, 0x66, 0x26, 0x40, 0x73, 0x08, 0xbc, + 0x02, 0xf2, 0x0d, 0xd3, 0xc3, 0x75, 0xca, 0x99, 0x9a, 0x2d, 0x2a, 0x2b, 0x73, 0xd5, 0x85, 0x72, + 0x18, 0xda, 0x8d, 0x60, 0xca, 0xe8, 0x5b, 0xd1, 0xd7, 0x73, 0x11, 0x69, 0xa9, 0x39, 0xc6, 0x04, + 0x6b, 0xc3, 0x12, 0xc8, 0xfa, 0x2d, 0xe4, 0x35, 0x7c, 0x75, 0xaa, 0x98, 0x5a, 0xc9, 0xeb, 0xe0, + 0xa4, 0xa7, 0x89, 0x11, 0x43, 0xfc, 0xc3, 0x77, 0x40, 0xda, 0xb5, 0x90, 0xad, 0x02, 0xb6, 0xca, + 0xf9, 0xb2, 0xc4, 0xf9, 0x43, 0x0b, 0xd9, 0xfa, 0x8d, 0x6e, 0x4f, 0xbb, 0x26, 0xcb, 0xdd, 0x43, + 0xbb, 0xc8, 0x46, 0x15, 0xcb, 0xd9, 0x33, 0x2b, 0x07, 0xab, 0x15, 0x39, 0x92, 0xd4, 0x51, 0xf9, + 0x11, 0x75, 0x40, 0xa1, 0x06, 0x73, 0x5c, 0xfa, 0x26, 0x09, 0x20, 0x8d, 0xd9, 0xa6, 0xed, 0x13, + 0x64, 0x93, 0x37, 0x09, 0xdd, 0x4d, 0x90, 0xa5, 0x69, 0xb1, 0xed, 0xb3, 0xe0, 0x8d, 0xcb, 0xa5, + 0xc0, 0x44, 0xc9, 0x4c, 0x4f, 0x44, 0x66, 0x66, 0x28, 0x99, 0xd9, 0x53, 0xc9, 0xcc, 0xfd, 0x56, + 0x64, 0xaa, 0x20, 0x4d, 0x7b, 0x70, 0x1e, 0xa4, 0x3c, 0xf4, 0x98, 0x71, 0x37, 0x63, 0xd0, 0x66, + 0xe9, 0x93, 0x34, 0x98, 0xe1, 0xa9, 0xe1, 0xbb, 0x8e, 0xed, 0x63, 0xba, 0xde, 0x2d, 0xb6, 0xff, + 0x70, 0x86, 0xc5, 0x7a, 0xd9, 0x88, 0x21, 0x66, 0xe0, 0x6d, 0x90, 0xde, 0x40, 0x04, 0x31, 0xb6, + 0xa7, 0xab, 0x8b, 0xf2, 0x7a, 0xa9, 0x2f, 0x3a, 0xa7, 0x2f, 0x51, 0x42, 0x4f, 0x7a, 0xda, 0x5c, + 0x03, 0x11, 0xf4, 0x0f, 0xa7, 0x6d, 0x12, 0xdc, 0x76, 0xc9, 0xa1, 0xc1, 0x90, 0xf0, 0x1a, 0xc8, + 0xdf, 0xf5, 0x3c, 0xc7, 0xdb, 0x3e, 0x74, 0x31, 0x8b, 0x4e, 0x5e, 0x3f, 0x77, 0xd2, 0xd3, 0x16, + 0x70, 0x30, 0x28, 0x21, 0xfa, 0x96, 0xf0, 0x6f, 0x20, 0xc3, 0x3a, 0x2c, 0x1e, 0x79, 0x7d, 0xe1, + 0xa4, 0xa7, 0xfd, 0x89, 0x41, 0x24, 0x73, 0x6e, 0x11, 0x0d, 0x5f, 0x66, 0xac, 0xf0, 0x85, 0x2a, + 0xca, 0xca, 0x2a, 0x52, 0x41, 0xee, 0x00, 0x7b, 0x3e, 0x75, 0x93, 0x63, 0xe3, 0x41, 0x17, 0xde, + 0x01, 0x80, 0x12, 0x63, 0xfa, 0xc4, 0xac, 0xd3, 0x5c, 0xa1, 0x64, 0xcc, 0x96, 0xf9, 0x56, 0x68, + 0x60, 0xbf, 0x63, 0x11, 0x1d, 0x0a, 0x16, 0x24, 0x43, 0x43, 0x6a, 0xc3, 0x4f, 0x15, 0x90, 0xab, + 0x61, 0xd4, 0xc0, 0x9e, 0xaf, 0xe6, 0x8b, 0xa9, 0x95, 0xe9, 0xea, 0x5f, 0xcb, 0xf2, 0xbe, 0xf7, + 0xd0, 0x73, 0xda, 0x98, 0xb4, 0x70, 0xc7, 0x0f, 0x02, 0xc4, 0xad, 0x75, 0xbb, 0xdb, 0xd3, 0xf0, + 0x98, 0x92, 0x18, 0x6b, 0xbb, 0x1d, 0xf9, 0xa8, 0x93, 0x9e, 0xa6, 0xfc, 0xd3, 0x08, 0x56, 0x59, + 0xfa, 0x4e, 0x01, 0x7f, 0xa6, 0x41, 0xde, 0xa2, 0xbe, 0x7d, 0x29, 0x2d, 0xdb, 0x88, 0xd4, 0x5b, + 0xaa, 0x42, 0x45, 0x6e, 0xf0, 0x8e, 0xbc, 0x17, 0x26, 0x7f, 0xd5, 0x5e, 0x98, 0x9a, 0x7c, 0x2f, + 0x0c, 0x72, 0x31, 0x3d, 0x34, 0x17, 0x33, 0xa3, 0x72, 0xb1, 0xf4, 0x41, 0x8a, 0xef, 0x3b, 0xc1, + 0xfb, 0x4d, 0x90, 0x16, 0xf7, 0xc2, 0xb4, 0x48, 0xb1, 0xd5, 0x86, 0x6a, 0xe3, 0xbe, 0x36, 0x1b, + 0xd8, 0x26, 0xe6, 0xae, 0x89, 0xbd, 0x53, 0x92, 0x43, 0x52, 0x5c, 0x2a, 0xaa, 0x38, 0x59, 0x2e, + 0xe9, 0xb3, 0x20, 0x97, 0x81, 0x1c, 0xc9, 0xbc, 0x41, 0x8e, 0x94, 0x7e, 0x4a, 0x82, 0x25, 0x1a, + 0x91, 0xfb, 0x68, 0x07, 0x5b, 0xff, 0x47, 0xed, 0x09, 0xa3, 0x72, 0x49, 0x8a, 0x4a, 0x5e, 0x87, + 0x7f, 0xb0, 0x3e, 0x1e, 0xeb, 0x1f, 0x29, 0x60, 0x2a, 0xd8, 0xcc, 0x61, 0x19, 0x00, 0x0e, 0x63, + 0xfb, 0x35, 0xe7, 0x7a, 0x8e, 0x82, 0xbd, 0x70, 0xd4, 0x90, 0x2c, 0xe0, 0xbb, 0x20, 0xcb, 0x7b, + 0x22, 0x17, 0xce, 0x49, 0xb9, 0x40, 0x3c, 0x8c, 0xda, 0x77, 0x1a, 0xc8, 0x25, 0xd8, 0xd3, 0x6f, + 0xd0, 0x55, 0x74, 0x7b, 0xda, 0xe5, 0x51, 0x2c, 0x05, 0xb5, 0xa4, 0xc0, 0xd1, 0xf8, 0xf2, 0x67, + 0x1a, 0xe2, 0x09, 0xa5, 0xf7, 0x15, 0x30, 0x4f, 0x17, 0x4a, 0xa9, 0x09, 0x85, 0xb1, 0x01, 0xa6, + 0x3c, 0xd1, 0x66, 0xcb, 0x9d, 0xae, 0x96, 0xca, 0x51, 0x5a, 0x87, 0x50, 0xa9, 0xa7, 0x8f, 0x7a, + 0x9a, 0x62, 0x84, 0x48, 0xb8, 0x1a, 0xa1, 0x31, 0x39, 0x8c, 0x46, 0x0a, 0x49, 0x44, 0x88, 0xfb, + 0x2a, 0x09, 0xe0, 0x26, 0xad, 0xb7, 0xa9, 0xfe, 0xfa, 0x52, 0x7d, 0x12, 0x5b, 0xd1, 0x85, 0x3e, + 0x29, 0x71, 0x7b, 0xfd, 0x56, 0xb7, 0xa7, 0xad, 0x9f, 0xa2, 0x9d, 0x5f, 0xc0, 0x4b, 0x6f, 0x21, + 0xcb, 0x37, 0x79, 0x26, 0xbe, 0x31, 0x5f, 0x24, 0xc1, 0xdc, 0x5b, 0x8e, 0xd5, 0x69, 0xe3, 0x90, + 0x3e, 0x37, 0x46, 0x9f, 0xda, 0xa7, 0x2f, 0x6a, 0xab, 0xaf, 0x77, 0x7b, 0xda, 0xda, 0xb8, 0xd4, + 0x45, 0xb1, 0x67, 0x9a, 0xb6, 0xcf, 0x93, 0x60, 0x71, 0xdb, 0x71, 0xff, 0xb7, 0xc5, 0xce, 0x6b, + 0xd2, 0x36, 0xd9, 0x8a, 0x91, 0xb7, 0xd8, 0x27, 0x8f, 0x22, 0x1e, 0x20, 0xe2, 0x99, 0x4f, 0xf4, + 0xb5, 0x6e, 0x4f, 0xab, 0x8e, 0x4b, 0x5c, 0x1f, 0x77, 0xa6, 0x49, 0x3b, 0x4a, 0x82, 0xa5, 0x47, + 0x1d, 0x64, 0x13, 0xd3, 0xc2, 0x9c, 0xb8, 0x90, 0xb6, 0xf7, 0x62, 0xb4, 0x15, 0xfa, 0xb4, 0x45, + 0x31, 0x82, 0xc0, 0xdb, 0xdd, 0x9e, 0x76, 0x73, 0x5c, 0x02, 0x87, 0x79, 0x38, 0xd3, 0x54, 0x7e, + 0x99, 0x04, 0x73, 0x5b, 0xbc, 0x9a, 0x0a, 0x5e, 0xe2, 0x60, 0x08, 0x85, 0xf2, 0x65, 0x84, 0xbb, + 0x53, 0x8e, 0x22, 0x26, 0x4b, 0xde, 0x28, 0xf6, 0x4c, 0x93, 0xf7, 0x6d, 0x12, 0x2c, 0x6d, 0x60, + 0x82, 0xeb, 0x04, 0x37, 0xee, 0x99, 0xd8, 0x92, 0x48, 0x7c, 0xaa, 0xc4, 0x58, 0x2c, 0x4a, 0x47, + 0x99, 0xa1, 0x20, 0x5d, 0xef, 0xf6, 0xb4, 0x5b, 0xe3, 0xf2, 0x38, 0xdc, 0xc7, 0xef, 0x86, 0x4f, + 0x56, 0x39, 0x4e, 0xca, 0x67, 0x14, 0xf4, 0x66, 0x7c, 0x46, 0x7d, 0x9c, 0x69, 0x3e, 0x3f, 0xcc, + 0x82, 0x59, 0x76, 0xad, 0x10, 0xd2, 0xf8, 0x77, 0x20, 0x4a, 0x6d, 0xc1, 0x21, 0x0c, 0x8e, 0x67, + 0x9e, 0x5b, 0x2f, 0x6f, 0x89, 0x22, 0x9c, 0x5b, 0xc0, 0xeb, 0x20, 0xeb, 0xb3, 0x43, 0x90, 0xa8, + 0xa2, 0x0a, 0x83, 0x77, 0x06, 0xd1, 0xe3, 0x56, 0x2d, 0x61, 0x08, 0x7b, 0x78, 0x13, 0x64, 0x2d, + 0xc6, 0xa2, 0x38, 0x04, 0x96, 0x06, 0x91, 0xf1, 0x63, 0x01, 0x45, 0x73, 0x0c, 0x5c, 0x03, 0x19, + 0x56, 0xae, 0x89, 0xbb, 0xb8, 0xc8, 0x63, 0xe3, 0x45, 0x53, 0x2d, 0x61, 0x70, 0x73, 0x58, 0x05, + 0x69, 0xd7, 0x73, 0xda, 0xa2, 0x74, 0xbe, 0x30, 0xf8, 0x4c, 0xb9, 0xd6, 0xac, 0x25, 0x0c, 0x66, + 0x0b, 0xaf, 0xd2, 0xd3, 0x2e, 0x2d, 0x52, 0x7d, 0x76, 0x81, 0x40, 0x2b, 0x94, 0x01, 0x98, 0x04, + 0x09, 0x4c, 0xe1, 0x55, 0x90, 0x3d, 0x60, 0x25, 0x88, 0xb8, 0xfd, 0x59, 0x96, 0x41, 0xd1, 0xe2, + 0x84, 0xbe, 0x17, 0xb7, 0x85, 0xf7, 0xc0, 0x0c, 0x71, 0xdc, 0xbd, 0xe0, 0x4b, 0x2f, 0x2e, 0x1f, + 0x8a, 0x32, 0x76, 0x58, 0x25, 0x50, 0x4b, 0x18, 0x11, 0x1c, 0x7c, 0x08, 0xe6, 0xf7, 0x23, 0x9f, + 0x21, 0xec, 0xb3, 0x1b, 0xcd, 0x01, 0x9e, 0x87, 0x7f, 0x20, 0x6b, 0x09, 0x23, 0x86, 0x86, 0x1b, + 0x60, 0xce, 0x8f, 0xec, 0xca, 0xe2, 0x8a, 0x30, 0xf2, 0x5e, 0xd1, 0x7d, 0xbb, 0x96, 0x30, 0x06, + 0x30, 0xf0, 0x3e, 0x98, 0x6b, 0x44, 0xf6, 0x24, 0x75, 0x3a, 0xbe, 0xaa, 0xe1, 0xbb, 0x16, 0xf5, + 0x16, 0xc5, 0xca, 0xde, 0x78, 0x46, 0xaa, 0x33, 0xa3, 0xbd, 0x45, 0x73, 0x56, 0xf6, 0xc6, 0x67, + 0x74, 0xd0, 0xdf, 0x3d, 0x4a, 0x5f, 0x67, 0xc0, 0x8c, 0xc8, 0x0a, 0x7e, 0x11, 0xf2, 0xef, 0x50, + 0xe8, 0x3c, 0x29, 0x2e, 0x8e, 0x12, 0x3a, 0x33, 0x97, 0x74, 0xfe, 0xaf, 0x50, 0xe7, 0x3c, 0x43, + 0x96, 0xfa, 0x3b, 0x12, 0x7b, 0xae, 0x84, 0x10, 0xda, 0x5e, 0x0d, 0xb4, 0xcd, 0x13, 0xe3, 0xfc, + 0xf0, 0xe3, 0x44, 0x80, 0x12, 0xc2, 0x5e, 0x07, 0x39, 0x93, 0xdf, 0xa8, 0x0e, 0x4b, 0x89, 0xf8, + 0x85, 0x2b, 0x95, 0xaa, 0x00, 0xc0, 0xd5, 0xbe, 0xc0, 0x79, 0x5e, 0x9c, 0x8b, 0x0b, 0x3c, 0x04, + 0x05, 0xfa, 0xbe, 0x12, 0xea, 0x3b, 0x2b, 0x30, 0xb1, 0xd2, 0x3b, 0x7c, 0x31, 0x21, 0xee, 0xbb, + 0x60, 0x36, 0x90, 0x03, 0x9b, 0x12, 0xea, 0xbe, 0x38, 0xaa, 0x72, 0x08, 0xf0, 0x51, 0x14, 0xdc, + 0x8c, 0x69, 0x88, 0x2b, 0x5b, 0x1b, 0xfd, 0xed, 0x0c, 0x3c, 0x0d, 0x0a, 0x68, 0x33, 0x26, 0x20, + 0x30, 0xca, 0x55, 0x20, 0x9f, 0x98, 0x2b, 0x3e, 0x01, 0x6b, 0x60, 0xaa, 0x8d, 0x09, 0x6a, 0x20, + 0x82, 0xd4, 0x1c, 0xdb, 0xf9, 0x2f, 0x45, 0x33, 0xad, 0x2f, 0xa6, 0xf2, 0x03, 0x61, 0x78, 0xd7, + 0x26, 0xde, 0xa1, 0x38, 0x6a, 0x86, 0xe8, 0xe5, 0xff, 0x80, 0xd9, 0x88, 0x01, 0x9c, 0x07, 0xa9, + 0x3d, 0x1c, 0xdc, 0x8c, 0xd3, 0x26, 0x5c, 0x04, 0x99, 0x03, 0x64, 0x75, 0x30, 0xd3, 0x54, 0xde, + 0xe0, 0x9d, 0xf5, 0xe4, 0x75, 0x45, 0xcf, 0x83, 0x9c, 0xc7, 0x9f, 0xa2, 0x37, 0x9f, 0xbf, 0x2c, + 0x24, 0x5e, 0xbc, 0x2c, 0x24, 0x5e, 0xbf, 0x2c, 0x28, 0x4f, 0x8f, 0x0b, 0xca, 0xc7, 0xc7, 0x05, + 0xe5, 0xe8, 0xb8, 0xa0, 0x3c, 0x3f, 0x2e, 0x28, 0x3f, 0x1c, 0x17, 0x94, 0x1f, 0x8f, 0x0b, 0x89, + 0xd7, 0xc7, 0x05, 0xe5, 0xd9, 0xab, 0x42, 0xe2, 0xf9, 0xab, 0x42, 0xe2, 0xc5, 0xab, 0x42, 0xe2, + 0xed, 0x2b, 0x13, 0x7f, 0x84, 0x76, 0xb2, 0x8c, 0xa9, 0xd5, 0x9f, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x84, 0x24, 0xe9, 0xad, 0x3c, 0x1b, 0x00, 0x00, } func (this *LokiRequest) Equal(that interface{}) bool { @@ -7326,7 +7326,7 @@ func (m *LokiData) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Result = append(m.Result, github_com_grafana_loki_v3_pkg_push.Stream{}) + m.Result = append(m.Result, github_com_grafana_loki_pkg_push.Stream{}) if err := m.Result[len(m.Result)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/pkg/querier/queryrange/queryrange.proto b/pkg/querier/queryrange/queryrange.proto index c0c746a3fc945..01f48298521c1 100644 --- a/pkg/querier/queryrange/queryrange.proto +++ b/pkg/querier/queryrange/queryrange.proto @@ -125,7 +125,7 @@ message LokiData { repeated logproto.StreamAdapter Result = 2 [ (gogoproto.nullable) = false, (gogoproto.jsontag) = "result", - (gogoproto.customtype) = "github.com/grafana/loki/v3/pkg/push.Stream" + (gogoproto.customtype) = "github.com/grafana/loki/pkg/push.Stream" ]; } diff --git a/pkg/querier/queryrange/volume_test.go b/pkg/querier/queryrange/volume_test.go index 62956dd17eab0..7327a58e15d9e 100644 --- a/pkg/querier/queryrange/volume_test.go +++ b/pkg/querier/queryrange/volume_test.go @@ -9,9 +9,10 @@ import ( "github.com/grafana/dskit/user" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/loghttp" "github.com/grafana/loki/v3/pkg/logproto" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/querier/queryrange/queryrangebase" "github.com/grafana/loki/v3/pkg/storage/stores/index/seriesvolume" ) diff --git a/pkg/storage/bloom/v1/bloom_tokenizer_test.go b/pkg/storage/bloom/v1/bloom_tokenizer_test.go index 3f721974c2309..048c972d44c68 100644 --- a/pkg/storage/bloom/v1/bloom_tokenizer_test.go +++ b/pkg/storage/bloom/v1/bloom_tokenizer_test.go @@ -9,10 +9,11 @@ import ( "github.com/prometheus/prometheus/model/labels" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/chunkenc" "github.com/grafana/loki/v3/pkg/logproto" "github.com/grafana/loki/v3/pkg/logql/log" - "github.com/grafana/loki/v3/pkg/push" "github.com/prometheus/common/model" "github.com/stretchr/testify/require" diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index c59df5aba7bc1..3be136d411dea 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -23,6 +23,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/stretchr/testify/require" + "github.com/grafana/loki/pkg/push" + "github.com/grafana/loki/v3/pkg/chunkenc" "github.com/grafana/loki/v3/pkg/ingester/client" "github.com/grafana/loki/v3/pkg/iter" @@ -31,7 +33,6 @@ import ( lokilog "github.com/grafana/loki/v3/pkg/logql/log" "github.com/grafana/loki/v3/pkg/logql/syntax" "github.com/grafana/loki/v3/pkg/logqlmodel/stats" - "github.com/grafana/loki/v3/pkg/push" "github.com/grafana/loki/v3/pkg/querier/astmapper" "github.com/grafana/loki/v3/pkg/querier/plan" "github.com/grafana/loki/v3/pkg/storage/chunk" diff --git a/vendor/github.com/grafana/loki/v3/pkg/push/LICENSE b/vendor/github.com/grafana/loki/pkg/push/LICENSE similarity index 100% rename from vendor/github.com/grafana/loki/v3/pkg/push/LICENSE rename to vendor/github.com/grafana/loki/pkg/push/LICENSE diff --git a/vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go b/vendor/github.com/grafana/loki/pkg/push/push.pb.go similarity index 92% rename from vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go rename to vendor/github.com/grafana/loki/pkg/push/push.pb.go index 7979872929611..3b07d850ff162 100644 --- a/vendor/github.com/grafana/loki/v3/pkg/push/push.pb.go +++ b/vendor/github.com/grafana/loki/pkg/push/push.pb.go @@ -296,41 +296,40 @@ func init() { func init() { proto.RegisterFile("pkg/push/push.proto", fileDescriptor_35ec442956852c9e) } var fileDescriptor_35ec442956852c9e = []byte{ - // 532 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, - 0x10, 0xf6, 0x26, 0x6e, 0xda, 0x6e, 0xfa, 0xf7, 0xaf, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x57, 0x86, - 0x43, 0x0e, 0x60, 0x4b, 0xe9, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x42, - 0x70, 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0xeb, 0x4a, 0xbd, 0xf1, 0x08, 0xe5, 0x2d, - 0x78, 0x02, 0x9e, 0xa1, 0xc7, 0x1c, 0x2b, 0x0e, 0x86, 0x38, 0x17, 0x94, 0x53, 0x1f, 0x01, 0x79, - 0x6d, 0x93, 0x50, 0x90, 0xb8, 0x6c, 0xbe, 0x99, 0x9d, 0xf9, 0xbe, 0x2f, 0xb3, 0x63, 0xf8, 0x20, - 0xbb, 0x08, 0xdc, 0x2c, 0x17, 0xa1, 0x3a, 0x9c, 0x8c, 0x33, 0xc9, 0xd0, 0x46, 0xcc, 0x02, 0x85, - 0xcc, 0xdd, 0x80, 0x05, 0x4c, 0x41, 0xb7, 0x42, 0xf5, 0xbd, 0x69, 0x05, 0x8c, 0x05, 0x31, 0x75, - 0x55, 0x34, 0xce, 0xcf, 0x5d, 0x19, 0x25, 0x54, 0x48, 0x92, 0x64, 0x75, 0x81, 0xfd, 0x16, 0xf6, - 0x4f, 0x73, 0x11, 0xfa, 0xf4, 0x43, 0x4e, 0x85, 0x44, 0xc7, 0x70, 0x5d, 0x48, 0x4e, 0x49, 0x22, - 0x0c, 0x70, 0xd8, 0x1d, 0xf4, 0x87, 0x0f, 0x9d, 0x56, 0xc1, 0x79, 0xad, 0x2e, 0x46, 0x13, 0x92, - 0x49, 0xca, 0xbd, 0xbd, 0xaf, 0x85, 0xd5, 0xab, 0x53, 0x8b, 0xc2, 0x6a, 0xbb, 0xfc, 0x16, 0xd8, - 0xdb, 0x70, 0xab, 0x26, 0x16, 0x19, 0x4b, 0x05, 0xb5, 0x3f, 0x01, 0xf8, 0xdf, 0x6f, 0x0c, 0xc8, - 0x86, 0xbd, 0x98, 0x8c, 0x69, 0x5c, 0x49, 0x81, 0xc1, 0xa6, 0x07, 0x17, 0x85, 0xd5, 0x64, 0xfc, - 0xe6, 0x17, 0x8d, 0xe0, 0x3a, 0x4d, 0x25, 0x8f, 0xa8, 0x30, 0x3a, 0xca, 0xcf, 0xfe, 0xd2, 0xcf, - 0xcb, 0x54, 0xf2, 0xab, 0xd6, 0xce, 0xff, 0x37, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, - 0xe8, 0x11, 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x7b, 0x08, 0x06, 0xba, 0xb7, 0xb6, 0x28, 0x2c, 0xf0, - 0xcc, 0x57, 0x29, 0xfb, 0x05, 0xdc, 0x39, 0xa9, 0x74, 0x4e, 0x49, 0xc4, 0x5b, 0x57, 0x08, 0xea, - 0x29, 0x49, 0x68, 0xed, 0xc9, 0x57, 0x18, 0xed, 0xc2, 0xb5, 0x4b, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, - 0x64, 0x1d, 0xd8, 0x5f, 0x3a, 0x70, 0x6b, 0xd5, 0x03, 0x3a, 0x86, 0x9b, 0xbf, 0xc6, 0xab, 0xfa, - 0xfb, 0x43, 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xdb, 0x8d, 0xe5, - 0x8e, 0x14, 0xd7, 0xdf, 0x2c, 0xe0, 0x2f, 0x9b, 0xd1, 0x01, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, - 0x8d, 0x45, 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, - 0xf2, 0x8a, 0x4a, 0x32, 0x21, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x39, 0x9f, 0xfb, 0x7f, 0xcd, - 0x7b, 0xd2, 0x08, 0x1e, 0xfc, 0xd9, 0xfd, 0x94, 0x25, 0x91, 0xa4, 0x49, 0x26, 0xaf, 0xfc, 0xbf, - 0x70, 0xa3, 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x62, 0xe8, 0xff, 0x54, 0x31, 0x1a, 0x95, 0x9d, - 0xba, 0x63, 0x85, 0xb9, 0xe1, 0x18, 0x8e, 0x60, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, - 0x08, 0xed, 0x2d, 0xf9, 0x56, 0xb6, 0xd1, 0xdc, 0xbf, 0x9f, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0x4d, - 0x67, 0x58, 0xbb, 0x9d, 0x61, 0xed, 0x6e, 0x86, 0xc1, 0xc7, 0x12, 0x83, 0xcf, 0x25, 0x06, 0x37, - 0x25, 0x06, 0xd3, 0x12, 0x83, 0xef, 0x25, 0x06, 0x3f, 0x4a, 0xac, 0xdd, 0x95, 0x18, 0x5c, 0xcf, - 0xb1, 0x36, 0x9d, 0x63, 0xed, 0x76, 0x8e, 0xb5, 0xf7, 0x8f, 0x83, 0x48, 0x86, 0xf9, 0xd8, 0x39, - 0x63, 0x89, 0x1b, 0x70, 0x72, 0x4e, 0x52, 0xe2, 0xc6, 0xec, 0x22, 0x72, 0x2f, 0x8f, 0xdc, 0xf6, - 0xeb, 0x1a, 0xf7, 0x94, 0xe0, 0xd1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x51, 0xe6, 0x29, 0x58, - 0x70, 0x03, 0x00, 0x00, + // 527 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0xf5, 0x26, 0x6e, 0xda, 0x6e, 0x4a, 0xa9, 0x96, 0xb6, 0x18, 0xab, 0x5a, 0x47, 0x16, 0x87, + 0x1c, 0xc0, 0x96, 0xc2, 0x81, 0x0b, 0x97, 0x58, 0x42, 0xea, 0xa1, 0x48, 0x95, 0x41, 0x20, 0x71, + 0xdb, 0x34, 0x5b, 0xdb, 0xaa, 0xed, 0x35, 0xbb, 0x6b, 0xa4, 0xde, 0xf8, 0x84, 0xf2, 0x17, 0x7c, + 0x01, 0xdf, 0xd0, 0x63, 0x8e, 0x15, 0x07, 0x43, 0x9c, 0x0b, 0xca, 0xa9, 0x9f, 0x80, 0xbc, 0xb6, + 0x49, 0x28, 0x48, 0x5c, 0x36, 0x6f, 0x66, 0x67, 0xde, 0x7b, 0x99, 0x1d, 0xc3, 0x07, 0xd9, 0x45, + 0xe0, 0x66, 0xb9, 0x08, 0xd5, 0xe1, 0x64, 0x9c, 0x49, 0x86, 0xb6, 0x62, 0x16, 0x28, 0x64, 0xee, + 0x07, 0x2c, 0x60, 0x0a, 0xba, 0x15, 0xaa, 0xef, 0x4d, 0x2b, 0x60, 0x2c, 0x88, 0xa9, 0xab, 0xa2, + 0x49, 0x7e, 0xee, 0xca, 0x28, 0xa1, 0x42, 0x92, 0x24, 0xab, 0x0b, 0xec, 0x77, 0xb0, 0x7f, 0x9a, + 0x8b, 0xd0, 0xa7, 0x1f, 0x72, 0x2a, 0x24, 0x3a, 0x86, 0x9b, 0x42, 0x72, 0x4a, 0x12, 0x61, 0x80, + 0x41, 0x77, 0xd8, 0x1f, 0x3d, 0x74, 0x5a, 0x05, 0xe7, 0xb5, 0xba, 0x18, 0x4f, 0x49, 0x26, 0x29, + 0xf7, 0x0e, 0xbe, 0x15, 0x56, 0xaf, 0x4e, 0x2d, 0x0b, 0xab, 0xed, 0xf2, 0x5b, 0x60, 0xef, 0xc2, + 0x9d, 0x9a, 0x58, 0x64, 0x2c, 0x15, 0xd4, 0xfe, 0x0c, 0xe0, 0xbd, 0x3f, 0x18, 0x90, 0x0d, 0x7b, + 0x31, 0x99, 0xd0, 0xb8, 0x92, 0x02, 0xc3, 0x6d, 0x0f, 0x2e, 0x0b, 0xab, 0xc9, 0xf8, 0xcd, 0x2f, + 0x1a, 0xc3, 0x4d, 0x9a, 0x4a, 0x1e, 0x51, 0x61, 0x74, 0x94, 0x9f, 0xc3, 0x95, 0x9f, 0x97, 0xa9, + 0xe4, 0x97, 0xad, 0x9d, 0xfb, 0xd7, 0x85, 0xa5, 0x55, 0x46, 0x9a, 0x72, 0xbf, 0x05, 0xe8, 0x11, + 0xd4, 0x43, 0x22, 0x42, 0xa3, 0x3b, 0x00, 0x43, 0xdd, 0xdb, 0x58, 0x16, 0x16, 0x78, 0xea, 0xab, + 0x94, 0xfd, 0x02, 0xee, 0x9d, 0x54, 0x3a, 0xa7, 0x24, 0xe2, 0xad, 0x2b, 0x04, 0xf5, 0x94, 0x24, + 0xb4, 0xf6, 0xe4, 0x2b, 0x8c, 0xf6, 0xe1, 0xc6, 0x47, 0x12, 0xe7, 0xd4, 0xe8, 0xa8, 0x64, 0x1d, + 0xd8, 0x5f, 0x3b, 0x70, 0x67, 0xdd, 0x03, 0x3a, 0x86, 0xdb, 0xbf, 0xc7, 0xab, 0xfa, 0xfb, 0x23, + 0xd3, 0xa9, 0x1f, 0xc0, 0x69, 0x1f, 0xc0, 0x79, 0xd3, 0x56, 0x78, 0xbb, 0x8d, 0xe5, 0x8e, 0x14, + 0x57, 0xdf, 0x2d, 0xe0, 0xaf, 0x9a, 0xd1, 0x11, 0xd4, 0xe3, 0x28, 0x6d, 0xf4, 0xbc, 0xad, 0x65, + 0x61, 0xa9, 0xd8, 0x57, 0x27, 0xca, 0x20, 0x12, 0x92, 0xe7, 0x67, 0x32, 0xe7, 0x74, 0xfa, 0x8a, + 0x4a, 0x32, 0x25, 0x92, 0x18, 0x5d, 0x35, 0x1f, 0x73, 0x35, 0x9f, 0xbb, 0x7f, 0xcd, 0x7b, 0xdc, + 0x08, 0x1e, 0xfd, 0xdd, 0xfd, 0x84, 0x25, 0x91, 0xa4, 0x49, 0x26, 0x2f, 0xfd, 0x7f, 0x70, 0xa3, + 0x13, 0xd8, 0xcb, 0x08, 0x17, 0x74, 0x6a, 0xe8, 0xff, 0x55, 0x31, 0x1a, 0x95, 0xbd, 0xba, 0x63, + 0x8d, 0xb9, 0xe1, 0x18, 0x8d, 0x61, 0xaf, 0x5a, 0x0d, 0xca, 0xd1, 0x73, 0xa8, 0x57, 0x08, 0x1d, + 0xac, 0xf8, 0xd6, 0xb6, 0xd1, 0x3c, 0xbc, 0x9b, 0x6e, 0x76, 0x49, 0xf3, 0xde, 0xce, 0xe6, 0x58, + 0xbb, 0x99, 0x63, 0xed, 0x76, 0x8e, 0xc1, 0xa7, 0x12, 0x83, 0x2f, 0x25, 0x06, 0xd7, 0x25, 0x06, + 0xb3, 0x12, 0x83, 0x1f, 0x25, 0x06, 0x3f, 0x4b, 0xac, 0xdd, 0x96, 0x18, 0x5c, 0x2d, 0xb0, 0x36, + 0x5b, 0x60, 0xed, 0x66, 0x81, 0xb5, 0xf7, 0x83, 0x20, 0x92, 0x61, 0x3e, 0x71, 0xce, 0x58, 0xe2, + 0x06, 0x9c, 0x9c, 0x93, 0x94, 0xb8, 0x31, 0xbb, 0x88, 0xdc, 0xf6, 0xd3, 0x9a, 0xf4, 0x94, 0xda, + 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, 0xaa, 0x57, 0xd3, 0x6d, 0x03, 0x00, 0x00, } func (this *PushRequest) Equal(that interface{}) bool { diff --git a/vendor/github.com/grafana/loki/v3/pkg/push/push.proto b/vendor/github.com/grafana/loki/pkg/push/push.proto similarity index 95% rename from vendor/github.com/grafana/loki/v3/pkg/push/push.proto rename to vendor/github.com/grafana/loki/pkg/push/push.proto index e538c66903eae..3bf8ad06a8a83 100644 --- a/vendor/github.com/grafana/loki/v3/pkg/push/push.proto +++ b/vendor/github.com/grafana/loki/pkg/push/push.proto @@ -5,7 +5,7 @@ package logproto; import "gogoproto/gogo.proto"; import "google/protobuf/timestamp.proto"; -option go_package = "github.com/grafana/loki/v3/pkg/push"; +option go_package = "github.com/grafana/loki/pkg/push"; service Pusher { rpc Push(PushRequest) returns (PushResponse) {} diff --git a/vendor/github.com/grafana/loki/v3/pkg/push/timestamp.go b/vendor/github.com/grafana/loki/pkg/push/timestamp.go similarity index 100% rename from vendor/github.com/grafana/loki/v3/pkg/push/timestamp.go rename to vendor/github.com/grafana/loki/pkg/push/timestamp.go diff --git a/vendor/github.com/grafana/loki/v3/pkg/push/types.go b/vendor/github.com/grafana/loki/pkg/push/types.go similarity index 100% rename from vendor/github.com/grafana/loki/v3/pkg/push/types.go rename to vendor/github.com/grafana/loki/pkg/push/types.go diff --git a/vendor/modules.txt b/vendor/modules.txt index 10293601308e8..9bbf3e0af8662 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -935,9 +935,9 @@ github.com/grafana/gomemcache/memcache # github.com/grafana/jsonparser v0.0.0-20240209175146-098958973a2d ## explicit; go 1.13 github.com/grafana/jsonparser -# github.com/grafana/loki/v3/pkg/push v0.0.0-20231124142027-e52380921608 => ./pkg/push +# github.com/grafana/loki/pkg/push v0.0.0-20231124142027-e52380921608 => ./pkg/push ## explicit; go 1.19 -github.com/grafana/loki/v3/pkg/push +github.com/grafana/loki/pkg/push # github.com/grafana/pyroscope-go/godeltaprof v0.1.6 ## explicit; go 1.16 github.com/grafana/pyroscope-go/godeltaprof @@ -2266,4 +2266,4 @@ sigs.k8s.io/yaml # github.com/gocql/gocql => github.com/grafana/gocql v0.0.0-20200605141915-ba5dc39ece85 # github.com/hashicorp/memberlist => github.com/grafana/memberlist v0.3.1-0.20220714140823-09ffed8adbbe # github.com/grafana/regexp => github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd -# github.com/grafana/loki/v3/pkg/push => ./pkg/push +# github.com/grafana/loki/pkg/push => ./pkg/push