diff --git a/docs/sources/setup/install/helm/reference.md b/docs/sources/setup/install/helm/reference.md index 2155ae9afc66f..833cc2c77edc8 100644 --- a/docs/sources/setup/install/helm/reference.md +++ b/docs/sources/setup/install/helm/reference.md @@ -2027,7 +2027,6 @@ null Limits config
 {
-  "enforce_metric_name": false,
   "max_cache_freshness_per_query": "10m",
   "reject_old_samples": true,
   "reject_old_samples_max_age": "168h",
diff --git a/docs/sources/setup/upgrade/_index.md b/docs/sources/setup/upgrade/_index.md
index e76a3d1b191df..e9483e5219409 100644
--- a/docs/sources/setup/upgrade/_index.md
+++ b/docs/sources/setup/upgrade/_index.md
@@ -241,10 +241,6 @@ Some Loki metrics started with the prefix `cortex_`. In this release they will b
  - `cortex_query_scheduler_queue_duration_seconds_sum`
  - `cortex_query_scheduler_queue_length`
  - `cortex_query_scheduler_running`
- - `cortex_quota_cgroup_cpu_max`
- - `cortex_quota_cgroup_cpu_period`
- - `cortex_quota_cpu_count`
- - `cortex_quota_gomaxprocs`
  - `cortex_ring_member_heartbeats_total`
  - `cortex_ring_member_tokens_owned`
  - `cortex_ring_member_tokens_to_own`
diff --git a/pkg/bloomcompactor/TODO.md b/pkg/bloomcompactor/TODO.md
index b34fc24aa967a..479f5399a350d 100644
--- a/pkg/bloomcompactor/TODO.md
+++ b/pkg/bloomcompactor/TODO.md
@@ -1,5 +1,4 @@
-* Should we consider configuring falsePosRate of sbf at runtime?
+* Adding falsePosRate of sbf into config
+* Add per-tenant bool to enable compaction
 * Use tarGz, untarGz before uploding blocks to storage
-* Return checksum from `BuildFrom`
-* Move meta creation to an outer layer, ensure one meta.json per compaction cycle.
 * Introduce back `maxLookBackPeriod` as `RejectOldSamplesMaxAge` limit in distributors
diff --git a/pkg/bloomcompactor/bloomcompactor.go b/pkg/bloomcompactor/bloomcompactor.go
index b517957833a94..c41d4bdfd7c91 100644
--- a/pkg/bloomcompactor/bloomcompactor.go
+++ b/pkg/bloomcompactor/bloomcompactor.go
@@ -503,10 +503,13 @@ func createLocalDirName(workingDir string, job Job) string {
 	return filepath.Join(workingDir, dir)
 }
 
-func CompactNewChunks(ctx context.Context, logger log.Logger, job Job, chunks []chunk.Chunk, bt *v1.BloomTokenizer, bloomShipperClient bloomshipper.Client, dst string) (err error) {
+// Compacts given list of chunks, uploads them to storage and returns a list of bloomBlocks
+func CompactNewChunks(ctx context.Context, logger log.Logger, job Job,
+	chunks []chunk.Chunk, bt *v1.BloomTokenizer,
+	bloomShipperClient bloomshipper.Client, dst string) ([]bloomshipper.Block, error) {
 	// Ensure the context has not been canceled (ie. compactor shutdown has been triggered).
 	if err := ctx.Err(); err != nil {
-		return err
+		return nil, err
 	}
 
 	// Create a bloom for this series
@@ -526,31 +529,14 @@ func CompactNewChunks(ctx context.Context, logger log.Logger, job Job, chunks []
 	blocks, err := buildBloomBlock(ctx, logger, bloomForChks, job, dst)
 	if err != nil {
 		level.Error(logger).Log("building bloomBlocks", err)
-		return
+		return nil, err
 	}
-
 	storedBlocks, err := bloomShipperClient.PutBlocks(ctx, []bloomshipper.Block{blocks})
 	if err != nil {
 		level.Error(logger).Log("putting blocks to storage", err)
-		return
-	}
-
-	storedBlockRefs := make([]bloomshipper.BlockRef, len(storedBlocks))
-	// Build and upload meta.json to storage
-	meta := bloomshipper.Meta{
-		// After successful compaction there should be no tombstones
-		Tombstones: make([]bloomshipper.BlockRef, 0),
-		Blocks:     storedBlockRefs,
-	}
-
-	// TODO move this to an outer layer, otherwise creates a meta per block
-	err = bloomShipperClient.PutMeta(ctx, meta)
-	if err != nil {
-		level.Error(logger).Log("putting meta.json to storage", err)
-		return
+		return nil, err
 	}
-
-	return nil
+	return storedBlocks, nil
 }
 
 func (c *Compactor) runCompact(ctx context.Context, logger log.Logger, job Job, bloomShipperClient bloomshipper.Client, bt *v1.BloomTokenizer, storeClient storeClient) error {
@@ -559,23 +545,43 @@ func (c *Compactor) runCompact(ctx context.Context, logger log.Logger, job Job,
 		return err
 	}
 
-	// TODO call bloomShipperClient.GetMetas to get existing meta.json
+	metaSearchParams := bloomshipper.MetaSearchParams{
+		TenantID:       job.tenantID,
+		MinFingerprint: uint64(job.seriesFP),
+		MaxFingerprint: uint64(job.seriesFP),
+		StartTimestamp: int64(job.from),
+		EndTimestamp:   int64(job.through),
+	}
 	var metas []bloomshipper.Meta
+	//TODO  Configure pool for these to avoid allocations
+	var bloomBlocksRefs []bloomshipper.BlockRef
+	var tombstonedBlockRefs []bloomshipper.BlockRef
+
+	metas, err := bloomShipperClient.GetMetas(ctx, metaSearchParams)
+	if err != nil {
+		return err
+	}
 
 	if len(metas) == 0 {
 		// Get chunks data from list of chunkRefs
-		chks, err := storeClient.chunk.GetChunks(
-			ctx,
-			makeChunkRefs(job.Chunks(), job.Tenant(), job.Fingerprint()),
-		)
+		chks, err := storeClient.chunk.GetChunks(ctx, makeChunkRefs(job.Chunks(), job.Tenant(), job.Fingerprint()))
 		if err != nil {
 			return err
 		}
 
-		err = CompactNewChunks(ctx, logger, job, chks, bt, bloomShipperClient, c.cfg.WorkingDirectory)
+		storedBlocks, err := CompactNewChunks(ctx, logger, job, chks, bt, bloomShipperClient, c.cfg.WorkingDirectory)
 		if err != nil {
-			return err
+			return level.Error(logger).Log("compacting new chunks", err)
 		}
+
+		storedBlockRefs := make([]bloomshipper.BlockRef, len(storedBlocks))
+
+		for i, block := range storedBlocks {
+			storedBlockRefs[i] = block.BlockRef
+		}
+
+		// all blocks are new and active blocks
+		bloomBlocksRefs = storedBlockRefs
 	} else {
 		// TODO complete part 2 - periodic compaction for delta from previous period
 		// When already compacted metas exists
@@ -586,11 +592,24 @@ func (c *Compactor) runCompact(ctx context.Context, logger log.Logger, job Job,
 			for _, blockRef := range meta.Blocks {
 				uniqueIndexPaths[blockRef.IndexPath] = struct{}{}
 				// ...
+
+				// the result should return a list of active
+				// blocks and tombstoned bloom blocks.
 			}
 		}
 
 	}
 
+	// After all is done, create one meta file and upload to storage
+	meta := bloomshipper.Meta{
+		Tombstones: tombstonedBlockRefs,
+		Blocks:     bloomBlocksRefs,
+	}
+	err = bloomShipperClient.PutMeta(ctx, meta)
+	if err != nil {
+		level.Error(logger).Log("putting meta.json to storage", err)
+		return err
+	}
 	return nil
 }
 
diff --git a/pkg/logql/log/ip.go b/pkg/logql/log/ip.go
index cd803e820c10c..1508432d245c5 100644
--- a/pkg/logql/log/ip.go
+++ b/pkg/logql/log/ip.go
@@ -78,39 +78,41 @@ func (f *IPLineFilter) filterTy(line []byte, ty labels.MatchType) bool {
 
 type IPLabelFilter struct {
 	ip *ipFilter
-	ty LabelFilterType
+	Ty LabelFilterType
 
-	// if used as label matcher, this holds the identifier label name.
+	// if used as Label matcher, this holds the identifier Label name.
 	// e.g: (|remote_addr = ip("xxx")). Here labelName is `remote_addr`
-	label string
+	Label string
 
 	// patError records if given pattern is invalid.
 	patError error
 
-	// local copy of pattern to display it in errors, even though pattern matcher fails because of invalid pattern.
-	pattern string
+	// local copy of Pattern to display it in errors, even though Pattern matcher fails because of invalid Pattern.
+	Pattern string
 }
 
 // NewIPLabelFilter is used to construct ip filter as label filter for the given `label`.
-func NewIPLabelFilter(pattern string, label string, ty LabelFilterType) *IPLabelFilter {
+func NewIPLabelFilter(pattern, label string, ty LabelFilterType) *IPLabelFilter {
 	ip, err := newIPFilter(pattern)
 	return &IPLabelFilter{
 		ip:       ip,
-		label:    label,
-		ty:       ty,
+		Label:    label,
+		Ty:       ty,
 		patError: err,
-		pattern:  pattern,
+		Pattern:  pattern,
 	}
 }
 
 // `Process` implements `Stage` interface
 func (f *IPLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) {
-	return line, f.filterTy(line, f.ty, lbs)
+	return line, f.filterTy(line, f.Ty, lbs)
 }
 
+func (f *IPLabelFilter) isLabelFilterer() {}
+
 // `RequiredLabelNames` implements `Stage` interface
 func (f *IPLabelFilter) RequiredLabelNames() []string {
-	return []string{f.label}
+	return []string{f.Label}
 }
 
 // PatternError will be used `labelFilter.Stage()` method so that, if the given pattern is wrong
@@ -124,7 +126,7 @@ func (f *IPLabelFilter) filterTy(_ []byte, ty LabelFilterType, lbs *LabelsBuilde
 		// why `true`?. if there's an error only the string matchers can filter out.
 		return true
 	}
-	input, ok := lbs.Get(f.label)
+	input, ok := lbs.Get(f.Label)
 	if !ok {
 		// we have not found the label.
 		return false
@@ -146,11 +148,11 @@ func (f *IPLabelFilter) filterTy(_ []byte, ty LabelFilterType, lbs *LabelsBuilde
 // `String` implements fmt.Stringer inteface, by which also implements `LabelFilterer` inteface.
 func (f *IPLabelFilter) String() string {
 	eq := "=" // LabelFilterEqual -> "==", we don't want in string representation of ip label filter.
-	if f.ty == LabelFilterNotEqual {
+	if f.Ty == LabelFilterNotEqual {
 		eq = LabelFilterNotEqual.String()
 	}
 
-	return fmt.Sprintf("%s%sip(%q)", f.label, eq, f.pattern) // label filter
+	return fmt.Sprintf("%s%sip(%q)", f.Label, eq, f.Pattern) // label filter
 }
 
 // ipFilter search for IP addresses of given `pattern` in the given `line`.
diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go
index a056d8e16ba00..e3bb1a4bcd5b8 100644
--- a/pkg/logql/log/label_filter.go
+++ b/pkg/logql/log/label_filter.go
@@ -54,15 +54,20 @@ func (f LabelFilterType) String() string {
 }
 
 // LabelFilterer can filter extracted labels.
+//
+//sumtype:decl
 type LabelFilterer interface {
 	Stage
 	fmt.Stringer
+
+	// Seal trait
+	isLabelFilterer()
 }
 
 type BinaryLabelFilter struct {
 	Left  LabelFilterer
 	Right LabelFilterer
-	and   bool
+	And   bool
 }
 
 // NewAndLabelFilter creates a new LabelFilterer from a and binary operation of two LabelFilterer.
@@ -70,7 +75,7 @@ func NewAndLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilt
 	return &BinaryLabelFilter{
 		Left:  left,
 		Right: right,
-		and:   true,
+		And:   true,
 	}
 }
 
@@ -84,16 +89,18 @@ func NewOrLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilte
 
 func (b *BinaryLabelFilter) Process(ts int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) {
 	line, lok := b.Left.Process(ts, line, lbs)
-	if !b.and && lok {
+	if !b.And && lok {
 		return line, true
 	}
 	line, rok := b.Right.Process(ts, line, lbs)
-	if !b.and {
+	if !b.And {
 		return line, lok || rok
 	}
 	return line, lok && rok
 }
 
+func (b *BinaryLabelFilter) isLabelFilterer() {}
+
 func (b *BinaryLabelFilter) RequiredLabelNames() []string {
 	var names []string
 	names = append(names, b.Left.RequiredLabelNames()...)
@@ -105,7 +112,7 @@ func (b *BinaryLabelFilter) String() string {
 	var sb strings.Builder
 	sb.WriteString("( ")
 	sb.WriteString(b.Left.String())
-	if b.and {
+	if b.And {
 		sb.WriteString(" , ")
 	} else {
 		sb.WriteString(" or ")
@@ -122,6 +129,9 @@ type NoopLabelFilter struct {
 func (NoopLabelFilter) Process(_ int64, line []byte, _ *LabelsBuilder) ([]byte, bool) {
 	return line, true
 }
+
+func (NoopLabelFilter) isLabelFilterer() {}
+
 func (NoopLabelFilter) RequiredLabelNames() []string { return []string{} }
 
 func (f NoopLabelFilter) String() string {
@@ -197,6 +207,8 @@ func (d *BytesLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]
 	}
 }
 
+func (d *BytesLabelFilter) isLabelFilterer() {}
+
 func (d *BytesLabelFilter) RequiredLabelNames() []string {
 	return []string{d.Name}
 }
@@ -207,7 +219,7 @@ func (d *BytesLabelFilter) String() string {
 			return -1
 		}
 		return r
-	}, humanize.Bytes(d.Value))
+	}, humanize.Bytes(d.Value)) // TODO: discuss whether this should just be bytes, B, to be more accurate.
 	return fmt.Sprintf("%s%s%s", d.Name, d.Type, b)
 }
 
@@ -262,6 +274,8 @@ func (d *DurationLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder)
 	}
 }
 
+func (d *DurationLabelFilter) isLabelFilterer() {}
+
 func (d *DurationLabelFilter) RequiredLabelNames() []string {
 	return []string{d.Name}
 }
@@ -323,6 +337,8 @@ func (n *NumericLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) (
 
 }
 
+func (n *NumericLabelFilter) isLabelFilterer() {}
+
 func (n *NumericLabelFilter) RequiredLabelNames() []string {
 	return []string{n.Name}
 }
@@ -348,7 +364,7 @@ func NewStringLabelFilter(m *labels.Matcher) LabelFilterer {
 		return &NoopLabelFilter{m}
 	}
 
-	return &lineFilterLabelFilter{
+	return &LineFilterLabelFilter{
 		Matcher: m,
 		filter:  f,
 	}
@@ -358,18 +374,20 @@ func (s *StringLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([
 	return line, s.Matches(labelValue(s.Name, lbs))
 }
 
+func (s *StringLabelFilter) isLabelFilterer() {}
+
 func (s *StringLabelFilter) RequiredLabelNames() []string {
 	return []string{s.Name}
 }
 
-// lineFilterLabelFilter filters the desired label using an optimized line filter
-type lineFilterLabelFilter struct {
+// LineFilterLabelFilter filters the desired label using an optimized line filter
+type LineFilterLabelFilter struct {
 	*labels.Matcher
 	filter Filterer
 }
 
 // overrides the matcher.String() function in case there is a regexpFilter
-func (s *lineFilterLabelFilter) String() string {
+func (s *LineFilterLabelFilter) String() string {
 	if unwrappedFilter, ok := s.filter.(regexpFilter); ok {
 		rStr := unwrappedFilter.String()
 		str := fmt.Sprintf("%s%s`%s`", s.Matcher.Name, s.Matcher.Type, rStr)
@@ -378,12 +396,14 @@ func (s *lineFilterLabelFilter) String() string {
 	return s.Matcher.String()
 }
 
-func (s *lineFilterLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) {
+func (s *LineFilterLabelFilter) Process(_ int64, line []byte, lbs *LabelsBuilder) ([]byte, bool) {
 	v := labelValue(s.Name, lbs)
 	return line, s.filter.Filter(unsafeGetBytes(v))
 }
 
-func (s *lineFilterLabelFilter) RequiredLabelNames() []string {
+func (s *LineFilterLabelFilter) isLabelFilterer() {}
+
+func (s *LineFilterLabelFilter) RequiredLabelNames() []string {
 	return []string{s.Name}
 }
 
diff --git a/pkg/logql/syntax/ast.go b/pkg/logql/syntax/ast.go
index 4e251022860e5..95009df3a4689 100644
--- a/pkg/logql/syntax/ast.go
+++ b/pkg/logql/syntax/ast.go
@@ -29,6 +29,7 @@ type Expr interface {
 	Shardable() bool // A recursive check on the AST to see if it's shardable.
 	Walkable
 	fmt.Stringer
+	AcceptVisitor
 
 	// Pretty prettyfies any LogQL expression at given `level` of the whole LogQL query.
 	Pretty(level int) string
@@ -53,11 +54,15 @@ type implicit struct{}
 func (implicit) logQLExpr() {}
 
 // LogSelectorExpr is a LogQL expression filtering and returning logs.
+//
+//sumtype:decl
 type LogSelectorExpr interface {
 	Matchers() []*labels.Matcher
-	LogPipelineExpr
+	Pipeline() (Pipeline, error)
 	HasFilter() bool
 	Expr
+
+	isLogSelectorExpr()
 }
 
 // Type alias for backward compatibility
@@ -66,19 +71,17 @@ type (
 	SampleExtractor = log.SampleExtractor
 )
 
-// LogPipelineExpr is an expression defining a log pipeline.
-type LogPipelineExpr interface {
-	Pipeline() (Pipeline, error)
-	Expr
-}
-
 // StageExpr is an expression defining a single step into a log pipeline
+//
+//sumtype:decl
 type StageExpr interface {
 	Stage() (log.Stage, error)
 	Expr
+
+	isStageExpr()
 }
 
-// MultiStageExpr is multiple stages which implement a PipelineExpr.
+// MultiStageExpr is multiple stages which implements a LogSelectorExpr.
 type MultiStageExpr []StageExpr
 
 func (m MultiStageExpr) Pipeline() (log.Pipeline, error) {
@@ -196,6 +199,8 @@ func newMatcherExpr(matchers []*labels.Matcher) *MatchersExpr {
 	return &MatchersExpr{Mts: matchers}
 }
 
+func (e *MatchersExpr) isLogSelectorExpr() {}
+
 func (e *MatchersExpr) Matchers() []*labels.Matcher {
 	return e.Mts
 }
@@ -208,6 +213,8 @@ func (e *MatchersExpr) Shardable() bool { return true }
 
 func (e *MatchersExpr) Walk(f WalkFn) { f(e) }
 
+func (e *MatchersExpr) Accept(v RootVisitor) { v.VisitMatchers(e) }
+
 func (e *MatchersExpr) String() string {
 	var sb strings.Builder
 	sb.WriteString("{")
@@ -242,6 +249,8 @@ func newPipelineExpr(left *MatchersExpr, pipeline MultiStageExpr) LogSelectorExp
 	}
 }
 
+func (e *PipelineExpr) isLogSelectorExpr() {}
+
 func (e *PipelineExpr) Shardable() bool {
 	for _, p := range e.MultiStages {
 		if !p.Shardable() {
@@ -266,6 +275,8 @@ func (e *PipelineExpr) Walk(f WalkFn) {
 	walkAll(f, xs...)
 }
 
+func (e *PipelineExpr) Accept(v RootVisitor) { v.VisitPipeline(e) }
+
 func (e *PipelineExpr) Matchers() []*labels.Matcher {
 	return e.Left.Matchers()
 }
@@ -333,6 +344,8 @@ func newNestedLineFilterExpr(left *LineFilterExpr, right *LineFilterExpr) *LineF
 	}
 }
 
+func (*LineFilterExpr) isStageExpr() {}
+
 func (e *LineFilterExpr) Walk(f WalkFn) {
 	f(e)
 	if e.Left == nil {
@@ -341,6 +354,10 @@ func (e *LineFilterExpr) Walk(f WalkFn) {
 	e.Left.Walk(f)
 }
 
+func (e *LineFilterExpr) Accept(v RootVisitor) {
+	v.VisitLineFilter(e)
+}
+
 // AddFilterExpr adds a filter expression to a logselector expression.
 func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, op, match string) (LogSelectorExpr, error) {
 	filter := newLineFilterExpr(ty, op, match)
@@ -471,10 +488,14 @@ func newLogfmtParserExpr(flags []string) *LogfmtParserExpr {
 	return &e
 }
 
+func (*LogfmtParserExpr) isStageExpr() {}
+
 func (e *LogfmtParserExpr) Shardable() bool { return true }
 
 func (e *LogfmtParserExpr) Walk(f WalkFn) { f(e) }
 
+func (e *LogfmtParserExpr) Accept(v RootVisitor) { v.VisitLogfmtParser(e) }
+
 func (e *LogfmtParserExpr) Stage() (log.Stage, error) {
 	return log.NewLogfmtParser(e.Strict, e.KeepEmpty), nil
 }
@@ -524,10 +545,14 @@ func newLabelParserExpr(op, param string) *LabelParserExpr {
 	}
 }
 
+func (*LabelParserExpr) isStageExpr() {}
+
 func (e *LabelParserExpr) Shardable() bool { return true }
 
 func (e *LabelParserExpr) Walk(f WalkFn) { f(e) }
 
+func (e *LabelParserExpr) Accept(v RootVisitor) { v.VisitLabelParser(e) }
+
 func (e *LabelParserExpr) Stage() (log.Stage, error) {
 	switch e.Op {
 	case OpParserTypeJSON:
@@ -569,10 +594,14 @@ func newLabelFilterExpr(filterer log.LabelFilterer) *LabelFilterExpr {
 	}
 }
 
+func (*LabelFilterExpr) isStageExpr() {}
+
 func (e *LabelFilterExpr) Shardable() bool { return true }
 
 func (e *LabelFilterExpr) Walk(f WalkFn) { f(e) }
 
+func (e *LabelFilterExpr) Accept(v RootVisitor) { v.VisitLabelFilter(e) }
+
 func (e *LabelFilterExpr) Stage() (log.Stage, error) {
 	switch ip := e.LabelFilterer.(type) {
 	case *log.IPLabelFilter:
@@ -606,6 +635,8 @@ func newDecolorizeExpr() *DecolorizeExpr {
 	return &DecolorizeExpr{}
 }
 
+func (*DecolorizeExpr) isStageExpr() {}
+
 func (e *DecolorizeExpr) Shardable() bool { return true }
 
 func (e *DecolorizeExpr) Stage() (log.Stage, error) {
@@ -616,6 +647,8 @@ func (e *DecolorizeExpr) String() string {
 }
 func (e *DecolorizeExpr) Walk(f WalkFn) { f(e) }
 
+func (e *DecolorizeExpr) Accept(v RootVisitor) { v.VisitDecolorize(e) }
+
 type DropLabelsExpr struct {
 	dropLabels []log.DropLabel
 	implicit
@@ -625,6 +658,8 @@ func newDropLabelsExpr(dropLabels []log.DropLabel) *DropLabelsExpr {
 	return &DropLabelsExpr{dropLabels: dropLabels}
 }
 
+func (*DropLabelsExpr) isStageExpr() {}
+
 func (e *DropLabelsExpr) Shardable() bool { return true }
 
 func (e *DropLabelsExpr) Stage() (log.Stage, error) {
@@ -654,6 +689,8 @@ func (e *DropLabelsExpr) String() string {
 }
 func (e *DropLabelsExpr) Walk(f WalkFn) { f(e) }
 
+func (e *DropLabelsExpr) Accept(v RootVisitor) { v.VisitDropLabels(e) }
+
 type KeepLabelsExpr struct {
 	keepLabels []log.KeepLabel
 	implicit
@@ -663,6 +700,8 @@ func newKeepLabelsExpr(keepLabels []log.KeepLabel) *KeepLabelsExpr {
 	return &KeepLabelsExpr{keepLabels: keepLabels}
 }
 
+func (*KeepLabelsExpr) isStageExpr() {}
+
 func (e *KeepLabelsExpr) Shardable() bool { return true }
 
 func (e *KeepLabelsExpr) Stage() (log.Stage, error) {
@@ -694,10 +733,16 @@ func (e *KeepLabelsExpr) String() string {
 
 func (e *KeepLabelsExpr) Walk(f WalkFn) { f(e) }
 
+func (e *KeepLabelsExpr) Accept(v RootVisitor) { v.VisitKeepLabel(e) }
+
+func (*LineFmtExpr) isStageExpr() {}
+
 func (e *LineFmtExpr) Shardable() bool { return true }
 
 func (e *LineFmtExpr) Walk(f WalkFn) { f(e) }
 
+func (e *LineFmtExpr) Accept(v RootVisitor) { v.VisitLineFmt(e) }
+
 func (e *LineFmtExpr) Stage() (log.Stage, error) {
 	return log.NewFormatter(e.Value)
 }
@@ -717,6 +762,8 @@ func newLabelFmtExpr(fmts []log.LabelFmt) *LabelFmtExpr {
 	}
 }
 
+func (*LabelFmtExpr) isStageExpr() {}
+
 func (e *LabelFmtExpr) Shardable() bool {
 	// While LabelFmt is shardable in certain cases, it is not always,
 	// but this is left to the shardmapper to determine
@@ -725,6 +772,8 @@ func (e *LabelFmtExpr) Shardable() bool {
 
 func (e *LabelFmtExpr) Walk(f WalkFn) { f(e) }
 
+func (e *LabelFmtExpr) Accept(v RootVisitor) { v.VisitLabelFmt(e) }
+
 func (e *LabelFmtExpr) Stage() (log.Stage, error) {
 	return log.NewLabelsFormatter(e.Formats)
 }
@@ -761,10 +810,14 @@ func newJSONExpressionParser(expressions []log.LabelExtractionExpr) *JSONExpress
 	}
 }
 
+func (*JSONExpressionParser) isStageExpr() {}
+
 func (j *JSONExpressionParser) Shardable() bool { return true }
 
 func (j *JSONExpressionParser) Walk(f WalkFn) { f(j) }
 
+func (j *JSONExpressionParser) Accept(v RootVisitor) { v.VisitJSONExpressionParser(j) }
+
 func (j *JSONExpressionParser) Stage() (log.Stage, error) {
 	return log.NewJSONExpressionParser(j.Expressions)
 }
@@ -813,10 +866,14 @@ func newLogfmtExpressionParser(expressions []log.LabelExtractionExpr, flags []st
 	return &e
 }
 
+func (*LogfmtExpressionParser) isStageExpr() {}
+
 func (l *LogfmtExpressionParser) Shardable() bool { return true }
 
 func (l *LogfmtExpressionParser) Walk(f WalkFn) { f(l) }
 
+func (l *LogfmtExpressionParser) Accept(v RootVisitor) { v.VisitLogfmtExpressionParser(l) }
+
 func (l *LogfmtExpressionParser) Stage() (log.Stage, error) {
 	return log.NewLogfmtExpressionParser(l.Expressions, l.Strict)
 }
@@ -942,6 +999,10 @@ func (r *LogRange) Walk(f WalkFn) {
 	r.Left.Walk(f)
 }
 
+func (r *LogRange) Accept(v RootVisitor) {
+	v.VisitLogRange(r)
+}
+
 // WithoutUnwrap returns a copy of the log range without the unwrap statement.
 func (r *LogRange) WithoutUnwrap() (*LogRange, error) {
 	left, err := Clone(r.Left)
@@ -1101,12 +1162,15 @@ func IsLogicalBinOp(op string) bool {
 }
 
 // SampleExpr is a LogQL expression filtering logs and returning metric samples.
+//
+//sumtype:decl
 type SampleExpr interface {
 	// Selector is the LogQL selector to apply when retrieving logs.
 	Selector() (LogSelectorExpr, error)
 	Extractor() (SampleExtractor, error)
 	MatcherGroups() ([]MatcherRange, error)
 	Expr
+	isSampleExpr()
 }
 
 // RangeAggregationExpr not all range vector aggregation expressions support grouping by/without label(s),
@@ -1150,6 +1214,7 @@ func newRangeAggregationExpr(left *LogRange, operation string, gr *Grouping, str
 	}
 	return e
 }
+func (e *RangeAggregationExpr) isSampleExpr() {}
 
 func (e *RangeAggregationExpr) Selector() (LogSelectorExpr, error) {
 	if e.err != nil {
@@ -1235,6 +1300,8 @@ func (e *RangeAggregationExpr) Walk(f WalkFn) {
 	e.Left.Walk(f)
 }
 
+func (e *RangeAggregationExpr) Accept(v RootVisitor) { v.VisitRangeAggregation(e) }
+
 // Grouping struct represents the grouping by/without label(s) for vector aggregators and range vector aggregators.
 // The representation is as follows:
 //   - No Grouping (labels dismissed):  () => Grouping{Without: false, Groups: nil}
@@ -1278,11 +1345,11 @@ func (g Grouping) Singleton() bool {
 // VectorAggregationExpr all vector aggregation expressions support grouping by/without label(s),
 // therefore the Grouping struct can never be nil.
 type VectorAggregationExpr struct {
-	Left SampleExpr
+	Left SampleExpr `json:"sample_expr"`
 
-	Grouping  *Grouping
-	Params    int
-	Operation string
+	Grouping  *Grouping `json:"grouping,omitempty"`
+	Params    int       `json:"params"`
+	Operation string    `json:"operation"`
 	err       error
 	implicit
 }
@@ -1319,6 +1386,8 @@ func mustNewVectorAggregationExpr(left SampleExpr, operation string, gr *Groupin
 	}
 }
 
+func (e *VectorAggregationExpr) isSampleExpr() {}
+
 func (e *VectorAggregationExpr) MatcherGroups() ([]MatcherRange, error) {
 	if e.err != nil {
 		return nil, e.err
@@ -1438,6 +1507,8 @@ func (e *VectorAggregationExpr) Walk(f WalkFn) {
 	e.Left.Walk(f)
 }
 
+func (e *VectorAggregationExpr) Accept(v RootVisitor) { v.VisitVectorAggregation(e) }
+
 // VectorMatchCardinality describes the cardinality relationship
 // of two Vectors in a binary operation.
 type VectorMatchCardinality int
@@ -1553,6 +1624,8 @@ func (e *BinOpExpr) Walk(f WalkFn) {
 	walkAll(f, e.SampleExpr, e.RHS)
 }
 
+func (e *BinOpExpr) Accept(v RootVisitor) { v.VisitBinOp(e) }
+
 func mustNewBinOpExpr(op string, opts *BinOpOptions, lhs, rhs Expr) SampleExpr {
 	left, ok := lhs.(SampleExpr)
 	if !ok {
@@ -1852,7 +1925,7 @@ func MergeBinOp(op string, left, right *promql.Sample, swap, filter, isVectorCom
 }
 
 type LiteralExpr struct {
-	Val float64
+	Val float64 `json:"val"`
 	err error
 	implicit
 }
@@ -1880,10 +1953,13 @@ func (e *LiteralExpr) String() string {
 // literlExpr impls SampleExpr & LogSelectorExpr mainly to reduce the need for more complicated typings
 // to facilitate sum types. We'll be type switching when evaluating them anyways
 // and they will only be present in binary operation legs.
+func (e *LiteralExpr) isSampleExpr()                           {}
+func (e *LiteralExpr) isLogSelectorExpr()                      {}
 func (e *LiteralExpr) Selector() (LogSelectorExpr, error)      { return e, e.err }
 func (e *LiteralExpr) HasFilter() bool                         { return false }
 func (e *LiteralExpr) Shardable() bool                         { return true }
 func (e *LiteralExpr) Walk(f WalkFn)                           { f(e) }
+func (e *LiteralExpr) Accept(v RootVisitor)                    { v.VisitLiteral(e) }
 func (e *LiteralExpr) Pipeline() (log.Pipeline, error)         { return log.NewNoopPipeline(), nil }
 func (e *LiteralExpr) Matchers() []*labels.Matcher             { return nil }
 func (e *LiteralExpr) MatcherGroups() ([]MatcherRange, error)  { return nil, e.err }
@@ -1945,6 +2021,8 @@ func mustNewLabelReplaceExpr(left SampleExpr, dst, replacement, src, regex strin
 	}
 }
 
+func (e *LabelReplaceExpr) isSampleExpr() {}
+
 func (e *LabelReplaceExpr) Selector() (LogSelectorExpr, error) {
 	if e.err != nil {
 		return nil, e.err
@@ -1978,6 +2056,8 @@ func (e *LabelReplaceExpr) Walk(f WalkFn) {
 	e.Left.Walk(f)
 }
 
+func (e *LabelReplaceExpr) Accept(v RootVisitor) { v.VisitLabelReplace(e) }
+
 func (e *LabelReplaceExpr) String() string {
 	var sb strings.Builder
 	sb.WriteString(OpLabelReplace)
@@ -2078,6 +2158,9 @@ func NewVectorExpr(scalar string) *VectorExpr {
 	}
 }
 
+func (e *VectorExpr) isSampleExpr()      {}
+func (e *VectorExpr) isLogSelectorExpr() {}
+
 func (e *VectorExpr) Err() error {
 	return e.err
 }
@@ -2102,6 +2185,7 @@ func (e *VectorExpr) Selector() (LogSelectorExpr, error)      { return e, e.err
 func (e *VectorExpr) HasFilter() bool                         { return false }
 func (e *VectorExpr) Shardable() bool                         { return false }
 func (e *VectorExpr) Walk(f WalkFn)                           { f(e) }
+func (e *VectorExpr) Accept(v RootVisitor)                    { v.VisitVector(e) }
 func (e *VectorExpr) Pipeline() (log.Pipeline, error)         { return log.NewNoopPipeline(), nil }
 func (e *VectorExpr) Matchers() []*labels.Matcher             { return nil }
 func (e *VectorExpr) MatcherGroups() ([]MatcherRange, error)  { return nil, e.err }
diff --git a/pkg/logql/syntax/parser_test.go b/pkg/logql/syntax/parser_test.go
index c0c39a7d8f137..cd45b6ec74c1e 100644
--- a/pkg/logql/syntax/parser_test.go
+++ b/pkg/logql/syntax/parser_test.go
@@ -17,241 +17,240 @@ func NewStringLabelFilter(s string) *string {
 	return &s
 }
 
-func TestParse(t *testing.T) {
-	for _, tc := range []struct {
-		in  string
-		exp Expr
-		err error
-	}{
-		{
-			// raw string
-			in: "count_over_time({foo=~`bar\\w+`}[12h] |~ `error\\`)",
-			exp: &RangeAggregationExpr{
-				Operation: "count_over_time",
-				Left: &LogRange{
-					Left: &PipelineExpr{
-						MultiStages: MultiStageExpr{
-							newLineFilterExpr(labels.MatchRegexp, "", "error\\"),
-						},
-						Left: &MatchersExpr{
-							Mts: []*labels.Matcher{
-								mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"),
-							},
+var ParseTestCases = []struct {
+	in  string
+	exp Expr
+	err error
+}{
+	{
+		// raw string
+		in: "count_over_time({foo=~`bar\\w+`}[12h] |~ `error\\`)",
+		exp: &RangeAggregationExpr{
+			Operation: "count_over_time",
+			Left: &LogRange{
+				Left: &PipelineExpr{
+					MultiStages: MultiStageExpr{
+						newLineFilterExpr(labels.MatchRegexp, "", "error\\"),
+					},
+					Left: &MatchersExpr{
+						Mts: []*labels.Matcher{
+							mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"),
 						},
 					},
-					Interval: 12 * time.Hour,
 				},
+				Interval: 12 * time.Hour,
 			},
 		},
-		{
-			in: `{ foo = "bar" } | decolorize`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newDecolorizeExpr(),
-				},
-			),
-		},
-		{
-			// test [12h] before filter expr
-			in: `count_over_time({foo="bar"}[12h] |= "error")`,
-			exp: &RangeAggregationExpr{
-				Operation: "count_over_time",
-				Left: &LogRange{
-					Left: newPipelineExpr(
-						newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}),
-						MultiStageExpr{
-							newLineFilterExpr(labels.MatchEqual, "", "error"),
-						},
-					),
-					Interval: 12 * time.Hour,
-				},
+	},
+	{
+		in: `{ foo = "bar" } | decolorize`,
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newDecolorizeExpr(),
 			},
-		},
-		{
-			// test [12h] after filter expr
-			in: `count_over_time({foo="bar"} |= "error" [12h])`,
-			exp: &RangeAggregationExpr{
-				Operation: "count_over_time",
-				Left: &LogRange{
-					Left: newPipelineExpr(
-						newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}),
-						MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "error")},
-					),
-					Interval: 12 * time.Hour,
-				},
+		),
+	},
+	{
+		// test [12h] before filter expr
+		in: `count_over_time({foo="bar"}[12h] |= "error")`,
+		exp: &RangeAggregationExpr{
+			Operation: "count_over_time",
+			Left: &LogRange{
+				Left: newPipelineExpr(
+					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}),
+					MultiStageExpr{
+						newLineFilterExpr(labels.MatchEqual, "", "error"),
+					},
+				),
+				Interval: 12 * time.Hour,
 			},
 		},
-		{
-			in:  `{foo="bar"}`,
-			exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-		},
-		{
-			in:  `{ foo = "bar" }`,
-			exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-		},
-		{
-			in: `{ namespace="buzz", foo != "bar" }`,
-			exp: &MatchersExpr{Mts: []*labels.Matcher{
-				mustNewMatcher(labels.MatchEqual, "namespace", "buzz"),
-				mustNewMatcher(labels.MatchNotEqual, "foo", "bar"),
-			}},
-		},
-		{
-			in:  `{ foo =~ "bar" }`,
-			exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}},
-		},
-		{
-			in: `{ namespace="buzz", foo !~ "bar" }`,
-			exp: &MatchersExpr{Mts: []*labels.Matcher{
-				mustNewMatcher(labels.MatchEqual, "namespace", "buzz"),
-				mustNewMatcher(labels.MatchNotRegexp, "foo", "bar"),
-			}},
-		},
-		{
-			in: `count_over_time({ foo = "bar" }[12m])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 12 * time.Minute,
-				},
-				Operation: "count_over_time",
+	},
+	{
+		// test [12h] after filter expr
+		in: `count_over_time({foo="bar"} |= "error" [12h])`,
+		exp: &RangeAggregationExpr{
+			Operation: "count_over_time",
+			Left: &LogRange{
+				Left: newPipelineExpr(
+					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}),
+					MultiStageExpr{newLineFilterExpr(labels.MatchEqual, "", "error")},
+				),
+				Interval: 12 * time.Hour,
 			},
 		},
-		{
-			in: `bytes_over_time({ foo = "bar" }[12m])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 12 * time.Minute,
-				},
-				Operation: OpRangeTypeBytes,
+	},
+	{
+		in:  `{foo="bar"}`,
+		exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+	},
+	{
+		in:  `{ foo = "bar" }`,
+		exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+	},
+	{
+		in: `{ namespace="buzz", foo != "bar" }`,
+		exp: &MatchersExpr{Mts: []*labels.Matcher{
+			mustNewMatcher(labels.MatchEqual, "namespace", "buzz"),
+			mustNewMatcher(labels.MatchNotEqual, "foo", "bar"),
+		}},
+	},
+	{
+		in:  `{ foo =~ "bar" }`,
+		exp: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}},
+	},
+	{
+		in: `{ namespace="buzz", foo !~ "bar" }`,
+		exp: &MatchersExpr{Mts: []*labels.Matcher{
+			mustNewMatcher(labels.MatchEqual, "namespace", "buzz"),
+			mustNewMatcher(labels.MatchNotRegexp, "foo", "bar"),
+		}},
+	},
+	{
+		in: `count_over_time({ foo = "bar" }[12m])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 12 * time.Minute,
 			},
-		},
-		{
-			in: `bytes_rate({ foo = "bar" }[12m])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 12 * time.Minute,
-				},
-				Operation: OpRangeTypeBytesRate,
+			Operation: "count_over_time",
+		},
+	},
+	{
+		in: `bytes_over_time({ foo = "bar" }[12m])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 12 * time.Minute,
 			},
-		},
-		{
-			in: `rate({ foo = "bar" }[5h])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "rate",
+			Operation: OpRangeTypeBytes,
+		},
+	},
+	{
+		in: `bytes_rate({ foo = "bar" }[12m])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 12 * time.Minute,
 			},
-		},
-		{
-			in: `{ foo = "bar" }|logfmt --strict`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newLogfmtParserExpr([]string{OpStrict}),
-				},
-			),
-		},
-		{
-			in: `{ foo = "bar" }|logfmt|rate="a"`, // rate should also be able to use it as IDENTIFIER
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newLogfmtParserExpr(nil),
-					newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "rate", "a"))),
-				},
-			),
-		},
-		{
-			in: `{ foo = "bar" }|logfmt|length>5d`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newLogfmtParserExpr(nil),
-					newLabelFilterExpr(log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "length", 5*24*time.Hour)),
-				},
-			),
-		},
-		{
-			in: `{ foo = "bar" }|logfmt --strict --keep-empty|length>5d`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newLogfmtParserExpr([]string{OpStrict, OpKeepEmpty}),
-					newLabelFilterExpr(log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "length", 5*24*time.Hour)),
-				},
-			),
-		},
-		{
-			in: `rate({ foo = "bar" }[5d])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * 24 * time.Hour,
-				},
-				Operation: "rate",
+			Operation: OpRangeTypeBytesRate,
+		},
+	},
+	{
+		in: `rate({ foo = "bar" }[5h])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
 			},
-		},
-		{
-			in: `count_over_time({ foo = "bar" }[1w])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 7 * 24 * time.Hour,
-				},
-				Operation: "count_over_time",
+			Operation: "rate",
+		},
+	},
+	{
+		in: `{ foo = "bar" }|logfmt --strict`,
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newLogfmtParserExpr([]string{OpStrict}),
 			},
-		},
-		{
-			in: `absent_over_time({ foo = "bar" }[1w])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 7 * 24 * time.Hour,
-				},
-				Operation: OpRangeTypeAbsent,
+		),
+	},
+	{
+		in: `{ foo = "bar" }|logfmt|rate="a"`, // rate should also be able to use it as IDENTIFIER
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newLogfmtParserExpr(nil),
+				newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "rate", "a"))),
 			},
-		},
-		{
-			in: `sum(rate({ foo = "bar" }[5h]))`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "rate",
-			}, "sum", nil, nil),
-		},
-		{
-			in: `sum(rate({ foo ="bar" }[1y]))`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 365 * 24 * time.Hour,
-				},
-				Operation: "rate",
-			}, "sum", nil, nil),
-		},
-		{
-			in: `avg(count_over_time({ foo = "bar" }[5h])) by (bar,foo)`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "count_over_time",
-			}, "avg", &Grouping{
-				Without: false,
-				Groups:  []string{"bar", "foo"},
-			}, nil),
-		},
-		{
-			in: `avg(
+		),
+	},
+	{
+		in: `{ foo = "bar" }|logfmt|length>5d`,
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newLogfmtParserExpr(nil),
+				newLabelFilterExpr(log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "length", 5*24*time.Hour)),
+			},
+		),
+	},
+	{
+		in: `{ foo = "bar" }|logfmt --strict --keep-empty|length>5d`,
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newLogfmtParserExpr([]string{OpStrict, OpKeepEmpty}),
+				newLabelFilterExpr(log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "length", 5*24*time.Hour)),
+			},
+		),
+	},
+	{
+		in: `rate({ foo = "bar" }[5d])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * 24 * time.Hour,
+			},
+			Operation: "rate",
+		},
+	},
+	{
+		in: `count_over_time({ foo = "bar" }[1w])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 7 * 24 * time.Hour,
+			},
+			Operation: "count_over_time",
+		},
+	},
+	{
+		in: `absent_over_time({ foo = "bar" }[1w])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 7 * 24 * time.Hour,
+			},
+			Operation: OpRangeTypeAbsent,
+		},
+	},
+	{
+		in: `sum(rate({ foo = "bar" }[5h]))`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "rate",
+		}, "sum", nil, nil),
+	},
+	{
+		in: `sum(rate({ foo ="bar" }[1y]))`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 365 * 24 * time.Hour,
+			},
+			Operation: "rate",
+		}, "sum", nil, nil),
+	},
+	{
+		in: `avg(count_over_time({ foo = "bar" }[5h])) by (bar,foo)`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "avg", &Grouping{
+			Without: false,
+			Groups:  []string{"bar", "foo"},
+		}, nil),
+	},
+	{
+		in: `avg(
 					label_replace(
 						count_over_time({ foo = "bar" }[5h]),
 						"bar",
@@ -260,499 +259,499 @@ func TestParse(t *testing.T) {
 						"(.*).(.*)"
 					)
 				) by (bar,foo)`,
-			exp: mustNewVectorAggregationExpr(
-				mustNewLabelReplaceExpr(
-					&RangeAggregationExpr{
-						Left: &LogRange{
-							Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-							Interval: 5 * time.Hour,
-						},
-						Operation: "count_over_time",
+		exp: mustNewVectorAggregationExpr(
+			mustNewLabelReplaceExpr(
+				&RangeAggregationExpr{
+					Left: &LogRange{
+						Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+						Interval: 5 * time.Hour,
 					},
-					"bar", "$1$2", "foo", "(.*).(.*)",
-				),
-				"avg", &Grouping{
-					Without: false,
-					Groups:  []string{"bar", "foo"},
-				}, nil),
-		},
-		{
-			in: `avg(count_over_time({ foo = "bar" }[5h])) by ()`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "count_over_time",
-			}, "avg", &Grouping{
-				Without: false,
-				Groups:  nil,
-			}, nil),
-		},
-		{
-			in: `max without (bar) (count_over_time({ foo = "bar" }[5h]))`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "count_over_time",
-			}, "max", &Grouping{
-				Without: true,
-				Groups:  []string{"bar"},
-			}, nil),
-		},
-		{
-			in: `max without () (count_over_time({ foo = "bar" }[5h]))`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "count_over_time",
-			}, "max", &Grouping{
-				Without: true,
-				Groups:  nil,
-			}, nil),
-		},
-		{
-			in: `topk(10,count_over_time({ foo = "bar" }[5h])) without (bar)`,
-			exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "count_over_time",
-			}, "topk", &Grouping{
-				Without: true,
-				Groups:  []string{"bar"},
-			}, NewStringLabelFilter("10")),
-		},
-		{
-			in: `bottomk(30 ,sum(rate({ foo = "bar" }[5h])) by (foo))`,
-			exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
-				},
-				Operation: "rate",
-			}, "sum", &Grouping{
-				Groups:  []string{"foo"},
-				Without: false,
-			}, nil), "bottomk", nil,
-				NewStringLabelFilter("30")),
-		},
-		{
-			in: `max( sum(count_over_time({ foo = "bar" }[5h])) without (foo,bar) ) by (foo)`,
-			exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&RangeAggregationExpr{
-				Left: &LogRange{
-					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-					Interval: 5 * time.Hour,
+					Operation: "count_over_time",
 				},
-				Operation: "count_over_time",
-			}, "sum", &Grouping{
-				Groups:  []string{"foo", "bar"},
-				Without: true,
-			}, nil), "max", &Grouping{
-				Groups:  []string{"foo"},
+				"bar", "$1$2", "foo", "(.*).(.*)",
+			),
+			"avg", &Grouping{
 				Without: false,
+				Groups:  []string{"bar", "foo"},
 			}, nil),
-		},
-		{
-			in:  `unk({ foo = "bar" }[5m])`,
-			err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER", 1, 1),
-		},
-		{
-			in:  `absent_over_time({ foo = "bar" }[5h]) by (foo)`,
-			err: logqlmodel.NewParseError("grouping not allowed for absent_over_time aggregation", 0, 0),
-		},
-		{
-			in:  `rate({ foo = "bar" }[5minutes])`,
-			err: logqlmodel.NewParseError(`unknown unit "minutes" in duration "5minutes"`, 0, 21),
-		},
-		{
-			in:  `label_replace(rate({ foo = "bar" }[5m]),"")`,
-			err: logqlmodel.NewParseError(`syntax error: unexpected ), expecting ,`, 1, 43),
-		},
-		{
-			in:  `label_replace(rate({ foo = "bar" }[5m]),"foo","$1","bar","^^^^x43\\q")`,
-			err: logqlmodel.NewParseError("invalid regex in label_replace: error parsing regexp: invalid escape sequence: `\\q`", 0, 0),
-		},
-		{
-			in:  `rate({ foo = "bar" }[5)`,
-			err: logqlmodel.NewParseError("missing closing ']' in duration", 0, 21),
-		},
-		{
-			in:  `min({ foo = "bar" }[5m])`,
-			err: logqlmodel.NewParseError("syntax error: unexpected RANGE", 0, 20),
-		},
-		// line filter for ip-matcher
-		{
-			in: `{foo="bar"} |= "baz" |= ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newLineFilterExpr(labels.MatchEqual, "", "baz"),
-						newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-					),
-				},
-			),
-		},
-		{
-			in: `{ foo = "bar" , ip="foo"}|logfmt|= ip("127.0.0.1")|ip="2.3.4.5"|ip="abc"|ipaddr=ip("4.5.6.7")|ip=ip("6.7.8.9")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar"), mustNewMatcher(labels.MatchEqual, "ip", "foo")}),
-				MultiStageExpr{
-					newLogfmtParserExpr(nil),
-					newLineFilterExpr(labels.MatchEqual, OpFilterIP, "127.0.0.1"),
-					newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "2.3.4.5"))),
-					newLabelFilterExpr(log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "ip", "abc"))),
-					newLabelFilterExpr(log.NewIPLabelFilter("4.5.6.7", "ipaddr", log.LabelFilterEqual)),
-					newLabelFilterExpr(log.NewIPLabelFilter("6.7.8.9", "ip", log.LabelFilterEqual)),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} |= ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
+	},
+	{
+		in: `avg(count_over_time({ foo = "bar" }[5h])) by ()`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "avg", &Grouping{
+			Without: false,
+			Groups:  nil,
+		}, nil),
+	},
+	{
+		in: `max without (bar) (count_over_time({ foo = "bar" }[5h]))`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "max", &Grouping{
+			Without: true,
+			Groups:  []string{"bar"},
+		}, nil),
+	},
+	{
+		in: `max without () (count_over_time({ foo = "bar" }[5h]))`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "max", &Grouping{
+			Without: true,
+			Groups:  nil,
+		}, nil),
+	},
+	{
+		in: `topk(10,count_over_time({ foo = "bar" }[5h])) without (bar)`,
+		exp: mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "topk", &Grouping{
+			Without: true,
+			Groups:  []string{"bar"},
+		}, NewStringLabelFilter("10")),
+	},
+	{
+		in: `bottomk(30 ,sum(rate({ foo = "bar" }[5h])) by (foo))`,
+		exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "rate",
+		}, "sum", &Grouping{
+			Groups:  []string{"foo"},
+			Without: false,
+		}, nil), "bottomk", nil,
+			NewStringLabelFilter("30")),
+	},
+	{
+		in: `max( sum(count_over_time({ foo = "bar" }[5h])) without (foo,bar) ) by (foo)`,
+		exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&RangeAggregationExpr{
+			Left: &LogRange{
+				Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+				Interval: 5 * time.Hour,
+			},
+			Operation: "count_over_time",
+		}, "sum", &Grouping{
+			Groups:  []string{"foo", "bar"},
+			Without: true,
+		}, nil), "max", &Grouping{
+			Groups:  []string{"foo"},
+			Without: false,
+		}, nil),
+	},
+	{
+		in:  `unk({ foo = "bar" }[5m])`,
+		err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER", 1, 1),
+	},
+	{
+		in:  `absent_over_time({ foo = "bar" }[5h]) by (foo)`,
+		err: logqlmodel.NewParseError("grouping not allowed for absent_over_time aggregation", 0, 0),
+	},
+	{
+		in:  `rate({ foo = "bar" }[5minutes])`,
+		err: logqlmodel.NewParseError(`unknown unit "minutes" in duration "5minutes"`, 0, 21),
+	},
+	{
+		in:  `label_replace(rate({ foo = "bar" }[5m]),"")`,
+		err: logqlmodel.NewParseError(`syntax error: unexpected ), expecting ,`, 1, 43),
+	},
+	{
+		in:  `label_replace(rate({ foo = "bar" }[5m]),"foo","$1","bar","^^^^x43\\q")`,
+		err: logqlmodel.NewParseError("invalid regex in label_replace: error parsing regexp: invalid escape sequence: `\\q`", 0, 0),
+	},
+	{
+		in:  `rate({ foo = "bar" }[5)`,
+		err: logqlmodel.NewParseError("missing closing ']' in duration", 0, 21),
+	},
+	{
+		in:  `min({ foo = "bar" }[5m])`,
+		err: logqlmodel.NewParseError("syntax error: unexpected RANGE", 0, 20),
+	},
+	// line filter for ip-matcher
+	{
+		in: `{foo="bar"} |= "baz" |= ip("123.123.123.123")`,
+		exp: newPipelineExpr(
+			newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
+			MultiStageExpr{
+				newNestedLineFilterExpr(
+					newLineFilterExpr(labels.MatchEqual, "", "baz"),
 					newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} |= ip("123.123.123.123")|= "baz"`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-						newLineFilterExpr(labels.MatchEqual, "", "baz"),
-					),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} |= ip("123.123.123.123")|= "baz" |=ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newNestedLineFilterExpr(
-							newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-							newLineFilterExpr(labels.MatchEqual, "", "baz"),
-						),
-						newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-					),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} |= "baz" |= ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newLineFilterExpr(labels.MatchEqual, "", "baz"),
-						newLineFilterExpr(labels.MatchEqual, OpFilterIP, "123.123.123.123"),
-					),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} != ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} != ip("123.123.123.123")|= "baz"`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"),
-						newLineFilterExpr(labels.MatchEqual, "", "baz"),
-					),
-				},
-			),
-		},
-		{
-			in: `{foo="bar"} != ip("123.123.123.123")|= "baz" !=ip("123.123.123.123")`,
-			exp: newPipelineExpr(
-				newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}),
-				MultiStageExpr{
-					newNestedLineFilterExpr(
-						newNestedLineFilterExpr(
-							newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"),
-							newLineFilterExpr(labels.MatchEqual, "", "baz"),
-						),
-						newLineFilterExpr(labels.MatchNotEqual, OpFilterIP, "123.123.123.123"),
-					),
-				},
-			),
-		},
-		// label filter for ip-matcher
-		{
-			in:  `{ foo = "bar" }|logfmt|addr>=ip("1.2.3.4")`,
-			err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 30),
-		},
-		{
-			in:  `{ foo = "bar" }|logfmt|addr>ip("1.2.3.4")`,
-			err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 29),
-		},
-		{
-			in:  `{ foo = "bar" }|logfmt|addr<=ip("1.2.3.4")`,
-			err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 30),
-		},
-		{
-			in:  `{ foo = "bar" }|logfmt|addr=ip("1.2.3.4")`,
+		err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 30),
+	},
+	{
+		in:  `{ foo = "bar" }|logfmt|addr>ip("1.2.3.4")`,
+		err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 29),
+	},
+	{
+		in:  `{ foo = "bar" }|logfmt|addr<=ip("1.2.3.4")`,
+		err: logqlmodel.NewParseError("syntax error: unexpected ip, expecting BYTES or NUMBER or DURATION", 1, 30),
+	},
+	{
+		in:  `{ foo = "bar" }|logfmt|addr= 250ms or ( status_code < 500 and status_code > 200)`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
 					),
-					mustNewVectorAggregationExpr(newRangeAggregationExpr(
-						&LogRange{
-							Left: &MatchersExpr{
-								Mts: []*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "foo", "bar"),
-								},
-							},
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil),
-						"sum",
-						&Grouping{
-							Without: false,
-							Groups:  []string{"foo"},
-						},
-						nil,
+				},
+			},
+		},
+	},
+	{
+		in: `{app="foo"} |= "bar" | unpack | json | latency >= 250ms or ( status_code < 500 and status_code > 200)`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeUnpack, ""),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
 					),
-				),
-			),
+				},
+			},
 		},
-		{
-			in: `sum by (job) (
-							count_over_time({namespace="tns"} |= "level=error"[5m])
-						/
-							count_over_time({namespace="tns"}[5m])
-						)`,
-			exp: mustNewVectorAggregationExpr(
-				mustNewBinOpExpr(OpTypeDiv,
-					&BinOpOptions{
-						VectorMatching: &VectorMatching{Card: CardOneToOne},
-					},
-					newRangeAggregationExpr(
-						&LogRange{
-							Left: newPipelineExpr(
-								newMatcherExpr([]*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "namespace", "tns"),
-								}),
-								MultiStageExpr{
-									newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-								}),
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil),
-					newRangeAggregationExpr(
-						&LogRange{
-							Left: &MatchersExpr{
-								Mts: []*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "namespace", "tns"),
-								},
-							},
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil)), OpTypeSum, &Grouping{Groups: []string{"job"}}, nil),
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | (duration > 1s or status!= 200) and method!="POST"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewAndLabelFilter(
+						log.NewOrLabelFilter(
+							log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "duration", 1*time.Second),
+							log.NewNumericLabelFilter(log.LabelFilterNotEqual, "status", 200.0),
+						),
+						log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "method", "POST")),
+					),
+				},
+			},
 		},
-		{
-			in: `sum by (job) (
-							count_over_time({namespace="tns"} |= "level=error"[5m])
-						/
-							count_over_time({namespace="tns"}[5m])
-						) * 100`,
-			exp: mustNewBinOpExpr(OpTypeMul, &BinOpOptions{
-				VectorMatching: &VectorMatching{Card: CardOneToOne},
-			}, mustNewVectorAggregationExpr(
-				mustNewBinOpExpr(OpTypeDiv,
-					&BinOpOptions{
-						VectorMatching: &VectorMatching{Card: CardOneToOne},
-					},
-					newRangeAggregationExpr(
-						&LogRange{
-							Left: newPipelineExpr(
-								newMatcherExpr([]*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "namespace", "tns"),
-								}),
-								MultiStageExpr{
-									newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-								}),
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil),
-					newRangeAggregationExpr(
-						&LogRange{
-							Left: &MatchersExpr{
-								Mts: []*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "namespace", "tns"),
-								},
-							},
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil)), OpTypeSum, &Grouping{Groups: []string{"job"}}, nil),
-				mustNewLiteralExpr("100", false),
-			),
+	},
+	{
+		in: `{app="foo"} |= "bar" | pattern " bar " | (duration > 1s or status!= 200) and method!="POST"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypePattern, " bar "),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewAndLabelFilter(
+						log.NewOrLabelFilter(
+							log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "duration", 1*time.Second),
+							log.NewNumericLabelFilter(log.LabelFilterNotEqual, "status", 200.0),
+						),
+						log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "method", "POST")),
+					),
+				},
+			},
 		},
-		{
-			// reduces binop with two literalExprs
-			in: `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 / 2`,
-			exp: mustNewBinOpExpr(
-				OpTypeAdd,
-				&BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | ( status_code < 500 and status_code > 200) or latency >= 250ms `,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+					),
 				},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						&LogRange{
-							Left: &MatchersExpr{
-								Mts: []*labels.Matcher{
-									mustNewMatcher(labels.MatchEqual, "foo", "bar"),
-								},
-							},
-							Interval: 5 * time.Minute,
-						}, OpRangeTypeCount, nil, nil),
-					"sum",
-					&Grouping{
-						Without: false,
-						Groups:  []string{"foo"},
-					},
-					nil,
-				),
-				&LiteralExpr{Val: 0.5},
-			),
+			},
 		},
-		{
-			// test signs
-			in: `1 + -2 / 1`,
-			exp: mustNewBinOpExpr(
-				OpTypeAdd,
-				&BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | ( status_code < 500 or status_code > 200) and latency >= 250ms `,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewAndLabelFilter(
+						log.NewOrLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+					),
 				},
-				&LiteralExpr{Val: 1},
-				mustNewBinOpExpr(OpTypeDiv, &BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
-				}, &LiteralExpr{Val: -2}, &LiteralExpr{Val: 1}),
-			),
+			},
 		},
-		{
-			// test signs/ops with equal associativity
-			in: `1 + 1 - -1`,
-			exp: mustNewBinOpExpr(
-				OpTypeSub,
-				&BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
+	},
+	{
+		in: `{app="foo"} |= "bar" | json |  status_code < 500 or status_code > 200 and latency >= 250ms `,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+							log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						),
+					),
 				},
-				mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
-				}, &LiteralExpr{Val: 1}, &LiteralExpr{Val: 1}),
-				&LiteralExpr{Val: -1},
-			),
+			},
 		},
-		{
-			in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)`,
-			exp: &PipelineExpr{
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+				| foo="bar" buzz!="blip", blop=~"boop" or fuzz==5`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
+					),
+				},
+				&LabelFilterExpr{
+					LabelFilterer: log.NewAndLabelFilter(
+						log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "foo", "bar")),
+						log.NewAndLabelFilter(
+							log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "buzz", "blip")),
+							log.NewOrLabelFilter(
+								log.NewStringLabelFilter(mustNewMatcher(labels.MatchRegexp, "blop", "boop")),
+								log.NewNumericLabelFilter(log.LabelFilterEqual, "fuzz", 5),
+							),
+						),
+					),
+				},
+			},
+		},
+	},
+	{
+		in: `{app="foo"} |= "bar" | line_format "blip{{ .foo }}blop"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLineFmtExpr("blip{{ .foo }}blop"),
+			},
+		},
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
+					),
+				},
+				newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+			},
+		},
+	},
+	{
+		in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+						),
+					),
+				},
+				newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+				newLabelFmtExpr([]log.LabelFmt{
+					log.NewRenameLabelFmt("foo", "bar"),
+					log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+				}),
+			},
+		},
+	},
+	{
+		in: `count_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
@@ -1380,16 +1603,63 @@ func TestParse(t *testing.T) {
 							),
 						),
 					},
+					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
+				},
+			},
+				5*time.Minute,
+				nil, nil),
+			OpRangeTypeCount,
+			nil,
+			nil,
+		),
+	},
+	{
+		in:  "{app=~\"\xa0\xa1\"}",
+		exp: nil,
+		err: logqlmodel.NewParseError("invalid UTF-8 encoding", 1, 7),
+	},
+	{
+		in: `sum_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`,
+		exp: nil,
+		err: logqlmodel.NewParseError("invalid aggregation sum_over_time without unwrap", 0, 0),
+	},
+	{
+		in:  `count_over_time({app="foo"} |= "foo" | json | unwrap foo [5m])`,
+		exp: nil,
+		err: logqlmodel.NewParseError("invalid aggregation count_over_time with unwrap", 0, 0),
+	},
+	{
+		in: `{app="foo"} |= "bar" | json |  status_code < 500 or status_code > 200 and size >= 2.5KiB `,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
+				&LabelFilterExpr{
+					LabelFilterer: log.NewOrLabelFilter(
+						log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+						log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
+							log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, "size", 2560),
+						),
+					),
 				},
 			},
 		},
-		{
-			in: `{app="foo"} |= "bar" | unpack | json | latency >= 250ms or ( status_code < 500 and status_code > 200)`,
-			exp: &PipelineExpr{
+	},
+	{
+		in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
-					newLabelParserExpr(OpParserTypeUnpack, ""),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
 						LabelFilterer: log.NewOrLabelFilter(
@@ -1400,108 +1670,224 @@ func TestParse(t *testing.T) {
 							),
 						),
 					},
+					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | (duration > 1s or status!= 200) and method!="POST"`,
-			exp: &PipelineExpr{
+				5*time.Minute,
+				newUnwrapExpr("foo", ""),
+				nil),
+			OpRangeTypeStdvar, nil, nil,
+		),
+	},
+	{
+		in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
-						LabelFilterer: log.NewAndLabelFilter(
-							log.NewOrLabelFilter(
-								log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "duration", 1*time.Second),
-								log.NewNumericLabelFilter(log.LabelFilterNotEqual, "status", 200.0),
+						LabelFilterer: log.NewOrLabelFilter(
+							log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+							log.NewAndLabelFilter(
+								log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+								log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 							),
-							log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "method", "POST")),
 						),
 					},
+					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | pattern " bar " | (duration > 1s or status!= 200) and method!="POST"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("foo", OpConvDuration),
+				nil),
+			OpRangeTypeStdvar, nil, nil,
+		),
+	},
+	{
+		in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap bytes(foo) [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
+				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
 				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
-					newLabelParserExpr(OpParserTypePattern, " bar "),
+					newLineFilterExpr(labels.MatchEqual, "", "level=error"),
+					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
 						LabelFilterer: log.NewAndLabelFilter(
-							log.NewOrLabelFilter(
-								log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "duration", 1*time.Second),
-								log.NewNumericLabelFilter(log.LabelFilterNotEqual, "status", 200.0),
-							),
-							log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "method", "POST")),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
+							log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
 						),
 					},
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | ( status_code < 500 and status_code > 200) or latency >= 250ms `,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("foo", OpConvBytes),
+				nil),
+			OpRangeTypeSum, nil, nil,
+		),
+	},
+	{
+		in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap bytes(foo) [5m] offset 5m)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
+				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
 				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
+					newLineFilterExpr(labels.MatchEqual, "", "level=error"),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
-						LabelFilterer: log.NewOrLabelFilter(
-							log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-							),
-							log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+						LabelFilterer: log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
+							log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
 						),
 					},
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | ( status_code < 500 or status_code > 200) and latency >= 250ms `,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("foo", OpConvBytes),
+				newOffsetExpr(5*time.Minute)),
+			OpRangeTypeSum, nil, nil,
+		),
+	},
+	{
+		in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
+				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
 				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
+					newLineFilterExpr(labels.MatchEqual, "", "level=error"),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
 						LabelFilterer: log.NewAndLabelFilter(
-							log.NewOrLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-							),
-							log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+							log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
+							log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
 						),
 					},
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json |  status_code < 500 or status_code > 200 and latency >= 250ms `,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("latency", ""),
+				nil),
+			OpRangeTypeSum, nil, nil,
+		),
+	},
+	{
+		in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo==5,bar<25ms| unwrap latency [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
+				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
 				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
+					newLineFilterExpr(labels.MatchEqual, "", "level=error"),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
-						LabelFilterer: log.NewOrLabelFilter(
-							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-							log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-								log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-							),
+						LabelFilterer: log.NewAndLabelFilter(
+							log.NewNumericLabelFilter(log.LabelFilterEqual, "foo", 5),
+							log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
 						),
 					},
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-				| foo="bar" buzz!="blip", blop=~"boop" or fuzz==5`,
-			exp: &PipelineExpr{
+				5*time.Minute,
+				newUnwrapExpr("latency", ""),
+				nil),
+			OpRangeTypeSum, nil, nil,
+		),
+	},
+	{
+		in: `stddev_over_time({app="foo"} |= "bar" | unwrap bar [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
+				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				MultiStages: MultiStageExpr{
+					newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				},
+			},
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				nil),
+			OpRangeTypeStddev, nil, nil,
+		),
+	},
+	{
+		in: `min_over_time({app="foo"} | unwrap bar [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				nil),
+			OpRangeTypeMin, nil, nil,
+		),
+	},
+	{
+		in: `min_over_time({app="foo"} | unwrap bar [5m]) by ()`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				nil),
+			OpRangeTypeMin, &Grouping{}, nil,
+		),
+	},
+	{
+		in: `max_over_time({app="foo"} | unwrap bar [5m]) without ()`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				nil),
+			OpRangeTypeMax, &Grouping{Without: true}, nil,
+		),
+	},
+	{
+		in: `max_over_time({app="foo"} | unwrap bar [5m]) without (foo,bar)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				nil),
+			OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
+		),
+	},
+	{
+		in: `max_over_time({app="foo"} | unwrap bar [5m] offset 5m) without (foo,bar)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				newOffsetExpr(5*time.Minute)),
+			OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
+		),
+	},
+	{
+		in: `max_over_time({app="foo"} | unwrap bar [5m] offset -5m) without (foo,bar)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(
+				newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+				5*time.Minute,
+				newUnwrapExpr("bar", ""),
+				newOffsetExpr(-5*time.Minute)),
+			OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
+		),
+	},
+	{
+		in: `max_over_time(({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo )[5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
@@ -1515,35 +1901,24 @@ func TestParse(t *testing.T) {
 							),
 						),
 					},
-					&LabelFilterExpr{
-						LabelFilterer: log.NewAndLabelFilter(
-							log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "foo", "bar")),
-							log.NewAndLabelFilter(
-								log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "buzz", "blip")),
-								log.NewOrLabelFilter(
-									log.NewStringLabelFilter(mustNewMatcher(labels.MatchRegexp, "blop", "boop")),
-									log.NewNumericLabelFilter(log.LabelFilterEqual, "fuzz", 5),
-								),
-							),
-						),
-					},
-				},
-			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | line_format "blip{{ .foo }}blop"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
-					newLineFmtExpr("blip{{ .foo }}blop"),
+					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}"`,
-			exp: &PipelineExpr{
+				5*time.Minute,
+				newUnwrapExpr("foo", ""),
+				nil),
+			OpRangeTypeMax, nil, nil,
+		),
+	},
+	{
+		in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
@@ -1558,13 +1933,23 @@ func TestParse(t *testing.T) {
 						),
 					},
 					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
 				},
 			},
-		},
-		{
-			in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"`,
-			exp: &PipelineExpr{
+				5*time.Minute,
+				newUnwrapExpr("foo", ""),
+				nil),
+			OpRangeTypeQuantile, nil, NewStringLabelFilter("0.99998"),
+		),
+	},
+	{
+		in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]) by (namespace,instance)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
@@ -1585,78 +1970,51 @@ func TestParse(t *testing.T) {
 					}),
 				},
 			},
-		},
-		{
-			in: `count_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "bar"),
-						newLabelParserExpr(OpParserTypeJSON, ""),
-						&LabelFilterExpr{
-							LabelFilterer: log.NewOrLabelFilter(
-								log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-								log.NewAndLabelFilter(
-									log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-									log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-								),
-							),
-						},
-						newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-						newLabelFmtExpr([]log.LabelFmt{
-							log.NewRenameLabelFmt("foo", "bar"),
-							log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-						}),
-					},
-				},
-					5*time.Minute,
-					nil, nil),
-				OpRangeTypeCount,
-				nil,
-				nil,
-			),
-		},
-		{
-			in:  "{app=~\"\xa0\xa1\"}",
-			exp: nil,
-			err: logqlmodel.NewParseError("invalid UTF-8 encoding", 1, 7),
-		},
-		{
-			in: `sum_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`,
-			exp: nil,
-			err: logqlmodel.NewParseError("invalid aggregation sum_over_time without unwrap", 0, 0),
-		},
-		{
-			in:  `count_over_time({app="foo"} |= "foo" | json | unwrap foo [5m])`,
-			exp: nil,
-			err: logqlmodel.NewParseError("invalid aggregation count_over_time with unwrap", 0, 0),
-		},
-		{
-			in: `{app="foo"} |= "bar" | json |  status_code < 500 or status_code > 200 and size >= 2.5KiB `,
-			exp: &PipelineExpr{
+				5*time.Minute,
+				newUnwrapExpr("foo", ""),
+				nil),
+			OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
+		),
+	},
+	{
+		in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo | __error__ !~".+"[5m]) by (namespace,instance)`,
+		exp: newRangeAggregationExpr(
+			newLogRange(&PipelineExpr{
 				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 				MultiStages: MultiStageExpr{
 					newLineFilterExpr(labels.MatchEqual, "", "bar"),
 					newLabelParserExpr(OpParserTypeJSON, ""),
 					&LabelFilterExpr{
 						LabelFilterer: log.NewOrLabelFilter(
-							log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+							log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
 							log.NewAndLabelFilter(
+								log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
 								log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-								log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, "size", 2560),
 							),
 						),
 					},
+					newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+					newLabelFmtExpr([]log.LabelFmt{
+						log.NewRenameLabelFmt("foo", "bar"),
+						log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+					}),
 				},
 			},
-		},
-		{
-			in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`,
-			exp: newRangeAggregationExpr(
+				5*time.Minute,
+				newUnwrapExpr("foo", "").addPostFilter(log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotRegexp, logqlmodel.ErrorLabel, ".+"))),
+				nil),
+			OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
+		),
+	},
+	{
+		in: `sum without (foo) (
+				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+								) by (namespace,instance)
+					)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -1681,13 +2039,21 @@ func TestParse(t *testing.T) {
 					5*time.Minute,
 					newUnwrapExpr("foo", ""),
 					nil),
-				OpRangeTypeStdvar, nil, nil,
+				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 			),
-		},
-		{
-			in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m])`,
-			exp: newRangeAggregationExpr(
+			OpTypeSum,
+			&Grouping{Without: true, Groups: []string{"foo"}},
+			nil,
+		),
+	},
+	{
+		in: `sum without (foo) (
+				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] offset 5m
+								) by (namespace,instance)
+					)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -1710,184 +2076,23 @@ func TestParse(t *testing.T) {
 					},
 				},
 					5*time.Minute,
-					newUnwrapExpr("foo", OpConvDuration),
-					nil),
-				OpRangeTypeStdvar, nil, nil,
-			),
-		},
-		{
-			in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap bytes(foo) [5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-						newLabelParserExpr(OpParserTypeJSON, ""),
-						&LabelFilterExpr{
-							LabelFilterer: log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
-								log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
-							),
-						},
-					},
-				},
-					5*time.Minute,
-					newUnwrapExpr("foo", OpConvBytes),
-					nil),
-				OpRangeTypeSum, nil, nil,
-			),
-		},
-		{
-			in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap bytes(foo) [5m] offset 5m)`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-						newLabelParserExpr(OpParserTypeJSON, ""),
-						&LabelFilterExpr{
-							LabelFilterer: log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
-								log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
-							),
-						},
-					},
-				},
-					5*time.Minute,
-					newUnwrapExpr("foo", OpConvBytes),
-					newOffsetExpr(5*time.Minute)),
-				OpRangeTypeSum, nil, nil,
-			),
-		},
-		{
-			in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-						newLabelParserExpr(OpParserTypeJSON, ""),
-						&LabelFilterExpr{
-							LabelFilterer: log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5),
-								log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
-							),
-						},
-					},
-				},
-					5*time.Minute,
-					newUnwrapExpr("latency", ""),
-					nil),
-				OpRangeTypeSum, nil, nil,
-			),
-		},
-		{
-			in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo==5,bar<25ms| unwrap latency [5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "level=error"),
-						newLabelParserExpr(OpParserTypeJSON, ""),
-						&LabelFilterExpr{
-							LabelFilterer: log.NewAndLabelFilter(
-								log.NewNumericLabelFilter(log.LabelFilterEqual, "foo", 5),
-								log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond),
-							),
-						},
-					},
-				},
-					5*time.Minute,
-					newUnwrapExpr("latency", ""),
-					nil),
-				OpRangeTypeSum, nil, nil,
-			),
-		},
-		{
-			in: `stddev_over_time({app="foo"} |= "bar" | unwrap bar [5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(&PipelineExpr{
-					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					MultiStages: MultiStageExpr{
-						newLineFilterExpr(labels.MatchEqual, "", "bar"),
-					},
-				},
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					nil),
-				OpRangeTypeStddev, nil, nil,
-			),
-		},
-		{
-			in: `min_over_time({app="foo"} | unwrap bar [5m])`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					nil),
-				OpRangeTypeMin, nil, nil,
-			),
-		},
-		{
-			in: `min_over_time({app="foo"} | unwrap bar [5m]) by ()`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					nil),
-				OpRangeTypeMin, &Grouping{}, nil,
-			),
-		},
-		{
-			in: `max_over_time({app="foo"} | unwrap bar [5m]) without ()`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					nil),
-				OpRangeTypeMax, &Grouping{Without: true}, nil,
-			),
-		},
-		{
-			in: `max_over_time({app="foo"} | unwrap bar [5m]) without (foo,bar)`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					nil),
-				OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
-			),
-		},
-		{
-			in: `max_over_time({app="foo"} | unwrap bar [5m] offset 5m) without (foo,bar)`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
+					newUnwrapExpr("foo", ""),
 					newOffsetExpr(5*time.Minute)),
-				OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
-			),
-		},
-		{
-			in: `max_over_time({app="foo"} | unwrap bar [5m] offset -5m) without (foo,bar)`,
-			exp: newRangeAggregationExpr(
-				newLogRange(
-					newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-					5*time.Minute,
-					newUnwrapExpr("bar", ""),
-					newOffsetExpr(-5*time.Minute)),
-				OpRangeTypeMax, &Grouping{Without: true, Groups: []string{"foo", "bar"}}, nil,
+				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 			),
-		},
-		{
-			in: `max_over_time(({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo )[5m])`,
-			exp: newRangeAggregationExpr(
+			OpTypeSum,
+			&Grouping{Without: true, Groups: []string{"foo"}},
+			nil,
+		),
+	},
+	{
+		in: `sum without (foo) (
+			quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m]
+							) by (namespace,instance)
+				)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -1910,15 +2115,23 @@ func TestParse(t *testing.T) {
 					},
 				},
 					5*time.Minute,
-					newUnwrapExpr("foo", ""),
+					newUnwrapExpr("foo", OpConvDuration),
 					nil),
-				OpRangeTypeMax, nil, nil,
+				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 			),
-		},
-		{
-			in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`,
-			exp: newRangeAggregationExpr(
+			OpTypeSum,
+			&Grouping{Without: true, Groups: []string{"foo"}},
+			nil,
+		),
+	},
+	{
+		in: `sum without (foo) (
+			quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m]
+							) by (namespace,instance)
+				)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -1941,15 +2154,23 @@ func TestParse(t *testing.T) {
 					},
 				},
 					5*time.Minute,
-					newUnwrapExpr("foo", ""),
+					newUnwrapExpr("foo", OpConvDuration),
 					nil),
-				OpRangeTypeQuantile, nil, NewStringLabelFilter("0.99998"),
+				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"),
 			),
-		},
-		{
-			in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]) by (namespace,instance)`,
-			exp: newRangeAggregationExpr(
+			OpTypeSum,
+			&Grouping{Without: true, Groups: []string{"foo"}},
+			nil,
+		),
+	},
+	{
+		in: `sum without (foo) (
+			quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration_seconds(foo) [5m]
+							) by (namespace,instance)
+				)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -1972,15 +2193,23 @@ func TestParse(t *testing.T) {
 					},
 				},
 					5*time.Minute,
-					newUnwrapExpr("foo", ""),
+					newUnwrapExpr("foo", OpConvDurationSeconds),
 					nil),
-				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
+				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"),
 			),
-		},
-		{
-			in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-			| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo | __error__ !~".+"[5m]) by (namespace,instance)`,
-			exp: newRangeAggregationExpr(
+			OpTypeSum,
+			&Grouping{Without: true, Groups: []string{"foo"}},
+			nil,
+		),
+	},
+	{
+		in: `topk(10,
+				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+								) by (namespace,instance)
+					)`,
+		exp: mustNewVectorAggregationExpr(
+			newRangeAggregationExpr(
 				newLogRange(&PipelineExpr{
 					Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
 					MultiStages: MultiStageExpr{
@@ -2003,57 +2232,33 @@ func TestParse(t *testing.T) {
 					},
 				},
 					5*time.Minute,
-					newUnwrapExpr("foo", "").addPostFilter(log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotRegexp, logqlmodel.ErrorLabel, ".+"))),
+					newUnwrapExpr("foo", ""),
 					nil),
 				OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 			),
-		},
-		{
-			in: `sum without (foo) (
+			OpTypeTopK,
+			nil,
+			NewStringLabelFilter("10"),
+		),
+	},
+	{
+		in: `
+			sum by (foo,bar) (
 				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
 					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
 								) by (namespace,instance)
-					)`,
-			exp: mustNewVectorAggregationExpr(
-				newRangeAggregationExpr(
-					newLogRange(&PipelineExpr{
-						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-						MultiStages: MultiStageExpr{
-							newLineFilterExpr(labels.MatchEqual, "", "bar"),
-							newLabelParserExpr(OpParserTypeJSON, ""),
-							&LabelFilterExpr{
-								LabelFilterer: log.NewOrLabelFilter(
-									log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-									log.NewAndLabelFilter(
-										log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-										log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-									),
-								),
-							},
-							newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-							newLabelFmtExpr([]log.LabelFmt{
-								log.NewRenameLabelFmt("foo", "bar"),
-								log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-							}),
-						},
-					},
-						5*time.Minute,
-						newUnwrapExpr("foo", ""),
-						nil),
-					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-				),
-				OpTypeSum,
-				&Grouping{Without: true, Groups: []string{"foo"}},
-				nil,
-			),
-		},
-		{
-			in: `sum without (foo) (
-				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] offset 5m
-								) by (namespace,instance)
-					)`,
-			exp: mustNewVectorAggregationExpr(
+					)
+					+
+					avg(
+						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+										) by (namespace,instance)
+							) by (foo,bar)
+					`,
+		exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{
+			VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: false,
+		},
+			mustNewVectorAggregationExpr(
 				newRangeAggregationExpr(
 					newLogRange(&PipelineExpr{
 						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
@@ -2078,21 +2283,14 @@ func TestParse(t *testing.T) {
 					},
 						5*time.Minute,
 						newUnwrapExpr("foo", ""),
-						newOffsetExpr(5*time.Minute)),
+						nil),
 					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 				),
 				OpTypeSum,
-				&Grouping{Without: true, Groups: []string{"foo"}},
+				&Grouping{Groups: []string{"foo", "bar"}},
 				nil,
 			),
-		},
-		{
-			in: `sum without (foo) (
-			quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m]
-							) by (namespace,instance)
-				)`,
-			exp: mustNewVectorAggregationExpr(
+			mustNewVectorAggregationExpr(
 				newRangeAggregationExpr(
 					newLogRange(&PipelineExpr{
 						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
@@ -2116,22 +2314,32 @@ func TestParse(t *testing.T) {
 						},
 					},
 						5*time.Minute,
-						newUnwrapExpr("foo", OpConvDuration),
+						newUnwrapExpr("foo", ""),
 						nil),
-					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
+					OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
 				),
-				OpTypeSum,
-				&Grouping{Without: true, Groups: []string{"foo"}},
+				OpTypeAvg,
+				&Grouping{Groups: []string{"foo", "bar"}},
 				nil,
 			),
-		},
-		{
-			in: `sum without (foo) (
-			quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m]
-							) by (namespace,instance)
-				)`,
-			exp: mustNewVectorAggregationExpr(
+		),
+	},
+	{
+		in: `
+			sum by (foo,bar) (
+				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+								) by (namespace,instance)
+					)
+					+ ignoring (bar)
+					avg(
+						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+										) by (namespace,instance)
+							) by (foo)
+					`,
+		exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardOneToOne, On: false, MatchingLabels: []string{"bar"}}},
+			mustNewVectorAggregationExpr(
 				newRangeAggregationExpr(
 					newLogRange(&PipelineExpr{
 						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
@@ -2155,22 +2363,15 @@ func TestParse(t *testing.T) {
 						},
 					},
 						5*time.Minute,
-						newUnwrapExpr("foo", OpConvDuration),
+						newUnwrapExpr("foo", ""),
 						nil),
-					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"),
+					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 				),
 				OpTypeSum,
-				&Grouping{Without: true, Groups: []string{"foo"}},
+				&Grouping{Groups: []string{"foo", "bar"}},
 				nil,
 			),
-		},
-		{
-			in: `sum without (foo) (
-			quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration_seconds(foo) [5m]
-							) by (namespace,instance)
-				)`,
-			exp: mustNewVectorAggregationExpr(
+			mustNewVectorAggregationExpr(
 				newRangeAggregationExpr(
 					newLogRange(&PipelineExpr{
 						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
@@ -2194,22 +2395,32 @@ func TestParse(t *testing.T) {
 						},
 					},
 						5*time.Minute,
-						newUnwrapExpr("foo", OpConvDurationSeconds),
+						newUnwrapExpr("foo", ""),
 						nil),
-					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"),
+					OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
 				),
-				OpTypeSum,
-				&Grouping{Without: true, Groups: []string{"foo"}},
+				OpTypeAvg,
+				&Grouping{Groups: []string{"foo"}},
 				nil,
 			),
-		},
-		{
-			in: `topk(10,
+		),
+	},
+	{
+		in: `
+			sum by (foo,bar) (
 				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
 					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
 								) by (namespace,instance)
-					)`,
-			exp: mustNewVectorAggregationExpr(
+					)
+					+ on (foo)
+					avg(
+						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
+										) by (namespace,instance)
+							) by (foo)
+					`,
+		exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardOneToOne, On: true, MatchingLabels: []string{"foo"}}},
+			mustNewVectorAggregationExpr(
 				newRangeAggregationExpr(
 					newLogRange(&PipelineExpr{
 						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
@@ -2229,266 +2440,54 @@ func TestParse(t *testing.T) {
 							newLabelFmtExpr([]log.LabelFmt{
 								log.NewRenameLabelFmt("foo", "bar"),
 								log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-							}),
-						},
-					},
-						5*time.Minute,
-						newUnwrapExpr("foo", ""),
-						nil),
-					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-				),
-				OpTypeTopK,
-				nil,
-				NewStringLabelFilter("10"),
-			),
-		},
-		{
-			in: `
-			sum by (foo,bar) (
-				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-								) by (namespace,instance)
-					)
-					+
-					avg(
-						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-										) by (namespace,instance)
-							) by (foo,bar)
-					`,
-			exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{
-				VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: false,
-			},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
-									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
-							},
-						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"foo", "bar"}},
-					nil,
-				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
-									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
-							},
-						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
-					),
-					OpTypeAvg,
-					&Grouping{Groups: []string{"foo", "bar"}},
-					nil,
-				),
-			),
-		},
-		{
-			in: `
-			sum by (foo,bar) (
-				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-								) by (namespace,instance)
-					)
-					+ ignoring (bar)
-					avg(
-						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-										) by (namespace,instance)
-							) by (foo)
-					`,
-			exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardOneToOne, On: false, MatchingLabels: []string{"bar"}}},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
-									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
-							},
-						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"foo", "bar"}},
-					nil,
-				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
-									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
-							},
-						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
-					),
-					OpTypeAvg,
-					&Grouping{Groups: []string{"foo"}},
-					nil,
-				),
-			),
-		},
-		{
-			in: `
-			sum by (foo,bar) (
-				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-								) by (namespace,instance)
-					)
-					+ on (foo)
-					avg(
-						avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
-							| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
-										) by (namespace,instance)
-							) by (foo)
-					`,
-			exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardOneToOne, On: true, MatchingLabels: []string{"foo"}}},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
-									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
-							},
+							}),
 						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"foo", "bar"}},
-					nil,
+					},
+						5*time.Minute,
+						newUnwrapExpr("foo", ""),
+						nil),
+					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
+				OpTypeSum,
+				&Grouping{Groups: []string{"foo", "bar"}},
+				nil,
+			),
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					newLogRange(&PipelineExpr{
+						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						MultiStages: MultiStageExpr{
+							newLineFilterExpr(labels.MatchEqual, "", "bar"),
+							newLabelParserExpr(OpParserTypeJSON, ""),
+							&LabelFilterExpr{
+								LabelFilterer: log.NewOrLabelFilter(
+									log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+									log.NewAndLabelFilter(
+										log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+										log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
+								),
 							},
+							newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+							newLabelFmtExpr([]log.LabelFmt{
+								log.NewRenameLabelFmt("foo", "bar"),
+								log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+							}),
 						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
-					),
-					OpTypeAvg,
-					&Grouping{Groups: []string{"foo"}},
-					nil,
+					},
+						5*time.Minute,
+						newUnwrapExpr("foo", ""),
+						nil),
+					OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
 				),
+				OpTypeAvg,
+				&Grouping{Groups: []string{"foo"}},
+				nil,
 			),
-		},
-		{
-			in: `
+		),
+	},
+	{
+		in: `
 			sum by (foo,bar) (
 				quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
 					| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]
@@ -2501,137 +2500,137 @@ func TestParse(t *testing.T) {
 										) by (namespace,instance)
 							) by (foo)
 					`,
-			exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardManyToOne, Include: []string{"foo"}, On: false, MatchingLabels: []string{"bar"}}},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
+		exp: mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{ReturnBool: false, VectorMatching: &VectorMatching{Card: CardManyToOne, Include: []string{"foo"}, On: false, MatchingLabels: []string{"bar"}}},
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					newLogRange(&PipelineExpr{
+						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						MultiStages: MultiStageExpr{
+							newLineFilterExpr(labels.MatchEqual, "", "bar"),
+							newLabelParserExpr(OpParserTypeJSON, ""),
+							&LabelFilterExpr{
+								LabelFilterer: log.NewOrLabelFilter(
+									log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+									log.NewAndLabelFilter(
+										log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+										log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
+								),
 							},
+							newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+							newLabelFmtExpr([]log.LabelFmt{
+								log.NewRenameLabelFmt("foo", "bar"),
+								log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+							}),
 						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"foo", "bar"}},
-					nil,
+					},
+						5*time.Minute,
+						newUnwrapExpr("foo", ""),
+						nil),
+					OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						newLogRange(&PipelineExpr{
-							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							MultiStages: MultiStageExpr{
-								newLineFilterExpr(labels.MatchEqual, "", "bar"),
-								newLabelParserExpr(OpParserTypeJSON, ""),
-								&LabelFilterExpr{
-									LabelFilterer: log.NewOrLabelFilter(
-										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-										log.NewAndLabelFilter(
-											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-										),
+				OpTypeSum,
+				&Grouping{Groups: []string{"foo", "bar"}},
+				nil,
+			),
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					newLogRange(&PipelineExpr{
+						Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						MultiStages: MultiStageExpr{
+							newLineFilterExpr(labels.MatchEqual, "", "bar"),
+							newLabelParserExpr(OpParserTypeJSON, ""),
+							&LabelFilterExpr{
+								LabelFilterer: log.NewOrLabelFilter(
+									log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+									log.NewAndLabelFilter(
+										log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+										log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 									),
-								},
-								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-								newLabelFmtExpr([]log.LabelFmt{
-									log.NewRenameLabelFmt("foo", "bar"),
-									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-								}),
+								),
 							},
+							newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+							newLabelFmtExpr([]log.LabelFmt{
+								log.NewRenameLabelFmt("foo", "bar"),
+								log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+							}),
 						},
-							5*time.Minute,
-							newUnwrapExpr("foo", ""),
-							nil),
-						OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
-					),
-					OpTypeAvg,
-					&Grouping{Groups: []string{"foo"}},
-					nil,
+					},
+						5*time.Minute,
+						newUnwrapExpr("foo", ""),
+						nil),
+					OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
 				),
+				OpTypeAvg,
+				&Grouping{Groups: []string{"foo"}},
+				nil,
 			),
-		},
-		{
-			in: `
+		),
+	},
+	{
+		in: `
 			sum by (app,machine) (count_over_time({app="foo"}[1m])) > bool on () group_right (app) sum by (app) (count_over_time({app="foo"}[1m]))
 					`,
-			exp: mustNewBinOpExpr(OpTypeGT, &BinOpOptions{ReturnBool: true, VectorMatching: &VectorMatching{Card: CardOneToMany, Include: []string{"app"}, On: true, MatchingLabels: nil}},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						&LogRange{
-							Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							Interval: 1 * time.Minute,
-						},
-						OpRangeTypeCount, nil, nil,
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"app", "machine"}},
-					nil,
+		exp: mustNewBinOpExpr(OpTypeGT, &BinOpOptions{ReturnBool: true, VectorMatching: &VectorMatching{Card: CardOneToMany, Include: []string{"app"}, On: true, MatchingLabels: nil}},
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					&LogRange{
+						Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						Interval: 1 * time.Minute,
+					},
+					OpRangeTypeCount, nil, nil,
 				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						&LogRange{
-							Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							Interval: 1 * time.Minute,
-						},
-						OpRangeTypeCount, nil, nil,
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"app"}},
-					nil,
+				OpTypeSum,
+				&Grouping{Groups: []string{"app", "machine"}},
+				nil,
+			),
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					&LogRange{
+						Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						Interval: 1 * time.Minute,
+					},
+					OpRangeTypeCount, nil, nil,
 				),
+				OpTypeSum,
+				&Grouping{Groups: []string{"app"}},
+				nil,
 			),
-		},
-		{
-			in: `
+		),
+	},
+	{
+		in: `
 			sum by (app,machine) (count_over_time({app="foo"}[1m])) > bool on () group_right sum by (app) (count_over_time({app="foo"}[1m]))
 					`,
-			exp: mustNewBinOpExpr(OpTypeGT, &BinOpOptions{ReturnBool: true, VectorMatching: &VectorMatching{Card: CardOneToMany, Include: nil, On: true, MatchingLabels: nil}},
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						&LogRange{
-							Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							Interval: 1 * time.Minute,
-						},
-						OpRangeTypeCount, nil, nil,
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"app", "machine"}},
-					nil,
+		exp: mustNewBinOpExpr(OpTypeGT, &BinOpOptions{ReturnBool: true, VectorMatching: &VectorMatching{Card: CardOneToMany, Include: nil, On: true, MatchingLabels: nil}},
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					&LogRange{
+						Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						Interval: 1 * time.Minute,
+					},
+					OpRangeTypeCount, nil, nil,
 				),
-				mustNewVectorAggregationExpr(
-					newRangeAggregationExpr(
-						&LogRange{
-							Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-							Interval: 1 * time.Minute,
-						},
-						OpRangeTypeCount, nil, nil,
-					),
-					OpTypeSum,
-					&Grouping{Groups: []string{"app"}},
-					nil,
+				OpTypeSum,
+				&Grouping{Groups: []string{"app", "machine"}},
+				nil,
+			),
+			mustNewVectorAggregationExpr(
+				newRangeAggregationExpr(
+					&LogRange{
+						Left:     newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+						Interval: 1 * time.Minute,
+					},
+					OpRangeTypeCount, nil, nil,
 				),
+				OpTypeSum,
+				&Grouping{Groups: []string{"app"}},
+				nil,
 			),
-		},
-		{
-			in: `
+		),
+	},
+	{
+		in: `
 			label_replace(
 				sum by (foo,bar) (
 					quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
@@ -2649,194 +2648,207 @@ func TestParse(t *testing.T) {
 				"svc",
 				"(.*)"
 				)`,
-			exp: mustNewLabelReplaceExpr(
-				mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: false},
-					mustNewVectorAggregationExpr(
-						newRangeAggregationExpr(
-							newLogRange(&PipelineExpr{
-								Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-								MultiStages: MultiStageExpr{
-									newLineFilterExpr(labels.MatchEqual, "", "bar"),
-									newLabelParserExpr(OpParserTypeJSON, ""),
-									&LabelFilterExpr{
-										LabelFilterer: log.NewOrLabelFilter(
-											log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-											log.NewAndLabelFilter(
-												log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-												log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-											),
+		exp: mustNewLabelReplaceExpr(
+			mustNewBinOpExpr(OpTypeAdd, &BinOpOptions{VectorMatching: &VectorMatching{Card: CardOneToOne}, ReturnBool: false},
+				mustNewVectorAggregationExpr(
+					newRangeAggregationExpr(
+						newLogRange(&PipelineExpr{
+							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+							MultiStages: MultiStageExpr{
+								newLineFilterExpr(labels.MatchEqual, "", "bar"),
+								newLabelParserExpr(OpParserTypeJSON, ""),
+								&LabelFilterExpr{
+									LabelFilterer: log.NewOrLabelFilter(
+										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+										log.NewAndLabelFilter(
+											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 										),
-									},
-									newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-									newLabelFmtExpr([]log.LabelFmt{
-										log.NewRenameLabelFmt("foo", "bar"),
-										log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-									}),
+									),
 								},
+								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+								newLabelFmtExpr([]log.LabelFmt{
+									log.NewRenameLabelFmt("foo", "bar"),
+									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+								}),
 							},
-								5*time.Minute,
-								newUnwrapExpr("foo", ""),
-								nil),
-							OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
-						),
-						OpTypeSum,
-						&Grouping{Groups: []string{"foo", "bar"}},
-						nil,
+						},
+							5*time.Minute,
+							newUnwrapExpr("foo", ""),
+							nil),
+						OpRangeTypeQuantile, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"),
 					),
-					mustNewVectorAggregationExpr(
-						newRangeAggregationExpr(
-							newLogRange(&PipelineExpr{
-								Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-								MultiStages: MultiStageExpr{
-									newLineFilterExpr(labels.MatchEqual, "", "bar"),
-									newLabelParserExpr(OpParserTypeJSON, ""),
-									&LabelFilterExpr{
-										LabelFilterer: log.NewOrLabelFilter(
-											log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
-											log.NewAndLabelFilter(
-												log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
-												log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
-											),
+					OpTypeSum,
+					&Grouping{Groups: []string{"foo", "bar"}},
+					nil,
+				),
+				mustNewVectorAggregationExpr(
+					newRangeAggregationExpr(
+						newLogRange(&PipelineExpr{
+							Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+							MultiStages: MultiStageExpr{
+								newLineFilterExpr(labels.MatchEqual, "", "bar"),
+								newLabelParserExpr(OpParserTypeJSON, ""),
+								&LabelFilterExpr{
+									LabelFilterer: log.NewOrLabelFilter(
+										log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond),
+										log.NewAndLabelFilter(
+											log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0),
+											log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0),
 										),
-									},
-									newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
-									newLabelFmtExpr([]log.LabelFmt{
-										log.NewRenameLabelFmt("foo", "bar"),
-										log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
-									}),
+									),
 								},
+								newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"),
+								newLabelFmtExpr([]log.LabelFmt{
+									log.NewRenameLabelFmt("foo", "bar"),
+									log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"),
+								}),
 							},
-								5*time.Minute,
-								newUnwrapExpr("foo", ""),
-								nil),
-							OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
-						),
-						OpTypeAvg,
-						&Grouping{Groups: []string{"foo", "bar"}},
-						nil,
+						},
+							5*time.Minute,
+							newUnwrapExpr("foo", ""),
+							nil),
+						OpRangeTypeAvg, &Grouping{Without: false, Groups: []string{"namespace", "instance"}}, nil,
 					),
+					OpTypeAvg,
+					&Grouping{Groups: []string{"foo", "bar"}},
+					nil,
 				),
-				"foo", "$1", "svc", "(.*)",
 			),
-		},
-		{
-			// ensure binary ops with two literals are reduced recursively
-			in:  `1 + 1 + 1`,
-			exp: &LiteralExpr{Val: 3},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 == 1`,
-			exp: &LiteralExpr{Val: 1},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 != 1`,
-			exp: &LiteralExpr{Val: 0},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 > 1`,
-			exp: &LiteralExpr{Val: 0},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 >= 1`,
-			exp: &LiteralExpr{Val: 1},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 < 1`,
-			exp: &LiteralExpr{Val: 0},
-		},
-		{
-			// ensure binary ops with two literals are reduced when comparisons are used
-			in:  `1 <= 1`,
-			exp: &LiteralExpr{Val: 1},
-		},
-		{
-			// ensure binary ops with two literals are reduced recursively when comparisons are used
-			in:  `1 >= 1 > 1`,
-			exp: &LiteralExpr{Val: 0},
-		},
-		{
-			in:  `{foo="bar"} + {foo="bar"}`,
-			err: logqlmodel.NewParseError(`unexpected type for left leg of binary operation (+): *syntax.MatchersExpr`, 0, 0),
-		},
-		{
-			in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) - {foo="bar"}`,
-			err: logqlmodel.NewParseError(`unexpected type for right leg of binary operation (-): *syntax.MatchersExpr`, 0, 0),
-		},
-		{
-			in:  `{foo="bar"} / sum(count_over_time({foo="bar"}[5m])) by (foo)`,
-			err: logqlmodel.NewParseError(`unexpected type for left leg of binary operation (/): *syntax.MatchersExpr`, 0, 0),
-		},
-		{
-			in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) or 1`,
-			err: logqlmodel.NewParseError(`unexpected literal for right leg of logical/set binary operation (or): 1.000000`, 0, 0),
-		},
-		{
-			in:  `1 unless sum(count_over_time({foo="bar"}[5m])) by (foo)`,
-			err: logqlmodel.NewParseError(`unexpected literal for left leg of logical/set binary operation (unless): 1.000000`, 0, 0),
-		},
-		{
-			in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 or 1`,
-			err: logqlmodel.NewParseError(`unexpected literal for right leg of logical/set binary operation (or): 1.000000`, 0, 0),
-		},
-		{
-			in: `count_over_time({ foo ="bar" }[12m]) > count_over_time({ foo = "bar" }[12m])`,
-			exp: &BinOpExpr{
-				Op: OpTypeGT,
-				Opts: &BinOpOptions{
-					ReturnBool:     false,
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
-				},
-				SampleExpr: &RangeAggregationExpr{
-					Left: &LogRange{
-						Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-						Interval: 12 * time.Minute,
-					},
-					Operation: "count_over_time",
+			"foo", "$1", "svc", "(.*)",
+		),
+	},
+	{
+		// ensure binary ops with two literals are reduced recursively
+		in:  `1 + 1 + 1`,
+		exp: &LiteralExpr{Val: 3},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 == 1`,
+		exp: &LiteralExpr{Val: 1},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 != 1`,
+		exp: &LiteralExpr{Val: 0},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 > 1`,
+		exp: &LiteralExpr{Val: 0},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 >= 1`,
+		exp: &LiteralExpr{Val: 1},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 < 1`,
+		exp: &LiteralExpr{Val: 0},
+	},
+	{
+		// ensure binary ops with two literals are reduced when comparisons are used
+		in:  `1 <= 1`,
+		exp: &LiteralExpr{Val: 1},
+	},
+	{
+		// ensure binary ops with two literals are reduced recursively when comparisons are used
+		in:  `1 >= 1 > 1`,
+		exp: &LiteralExpr{Val: 0},
+	},
+	{
+		in:  `{foo="bar"} + {foo="bar"}`,
+		err: logqlmodel.NewParseError(`unexpected type for left leg of binary operation (+): *syntax.MatchersExpr`, 0, 0),
+	},
+	{
+		in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) - {foo="bar"}`,
+		err: logqlmodel.NewParseError(`unexpected type for right leg of binary operation (-): *syntax.MatchersExpr`, 0, 0),
+	},
+	{
+		in:  `{foo="bar"} / sum(count_over_time({foo="bar"}[5m])) by (foo)`,
+		err: logqlmodel.NewParseError(`unexpected type for left leg of binary operation (/): *syntax.MatchersExpr`, 0, 0),
+	},
+	{
+		in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) or 1`,
+		err: logqlmodel.NewParseError(`unexpected literal for right leg of logical/set binary operation (or): 1.000000`, 0, 0),
+	},
+	{
+		in:  `1 unless sum(count_over_time({foo="bar"}[5m])) by (foo)`,
+		err: logqlmodel.NewParseError(`unexpected literal for left leg of logical/set binary operation (unless): 1.000000`, 0, 0),
+	},
+	{
+		in:  `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 or 1`,
+		err: logqlmodel.NewParseError(`unexpected literal for right leg of logical/set binary operation (or): 1.000000`, 0, 0),
+	},
+	{
+		in: `count_over_time({ foo ="bar" }[12m]) > count_over_time({ foo = "bar" }[12m])`,
+		exp: &BinOpExpr{
+			Op: OpTypeGT,
+			Opts: &BinOpOptions{
+				ReturnBool:     false,
+				VectorMatching: &VectorMatching{Card: CardOneToOne},
+			},
+			SampleExpr: &RangeAggregationExpr{
+				Left: &LogRange{
+					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+					Interval: 12 * time.Minute,
 				},
-				RHS: &RangeAggregationExpr{
-					Left: &LogRange{
-						Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-						Interval: 12 * time.Minute,
-					},
-					Operation: "count_over_time",
+				Operation: "count_over_time",
+			},
+			RHS: &RangeAggregationExpr{
+				Left: &LogRange{
+					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+					Interval: 12 * time.Minute,
 				},
+				Operation: "count_over_time",
 			},
 		},
-		{
-			in: `count_over_time({ foo = "bar" }[12m]) > 1`,
-			exp: &BinOpExpr{
-				Op: OpTypeGT,
-				Opts: &BinOpOptions{
-					ReturnBool:     false,
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
+	},
+	{
+		in: `count_over_time({ foo = "bar" }[12m]) > 1`,
+		exp: &BinOpExpr{
+			Op: OpTypeGT,
+			Opts: &BinOpOptions{
+				ReturnBool:     false,
+				VectorMatching: &VectorMatching{Card: CardOneToOne},
+			},
+			SampleExpr: &RangeAggregationExpr{
+				Left: &LogRange{
+					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+					Interval: 12 * time.Minute,
 				},
-				SampleExpr: &RangeAggregationExpr{
-					Left: &LogRange{
-						Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-						Interval: 12 * time.Minute,
-					},
-					Operation: "count_over_time",
+				Operation: "count_over_time",
+			},
+			RHS: &LiteralExpr{Val: 1},
+		},
+	},
+	{
+		// cannot compare metric & log queries
+		in:  `count_over_time({ foo = "bar" }[12m]) > { foo = "bar" }`,
+		err: logqlmodel.NewParseError("unexpected type for right leg of binary operation (>): *syntax.MatchersExpr", 0, 0),
+	},
+	{
+		in: `count_over_time({ foo = "bar" }[12m]) or count_over_time({ foo = "bar" }[12m]) > 1`,
+		exp: &BinOpExpr{
+			Op: OpTypeOr,
+			Opts: &BinOpOptions{
+				ReturnBool:     false,
+				VectorMatching: &VectorMatching{},
+			},
+			SampleExpr: &RangeAggregationExpr{
+				Left: &LogRange{
+					Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+					Interval: 12 * time.Minute,
 				},
-				RHS: &LiteralExpr{Val: 1},
+				Operation: "count_over_time",
 			},
-		},
-		{
-			// cannot compare metric & log queries
-			in:  `count_over_time({ foo = "bar" }[12m]) > { foo = "bar" }`,
-			err: logqlmodel.NewParseError("unexpected type for right leg of binary operation (>): *syntax.MatchersExpr", 0, 0),
-		},
-		{
-			in: `count_over_time({ foo = "bar" }[12m]) or count_over_time({ foo = "bar" }[12m]) > 1`,
-			exp: &BinOpExpr{
-				Op: OpTypeOr,
+			RHS: &BinOpExpr{
+				Op: OpTypeGT,
 				Opts: &BinOpOptions{
 					ReturnBool:     false,
-					VectorMatching: &VectorMatching{},
+					VectorMatching: &VectorMatching{Card: CardOneToOne},
 				},
 				SampleExpr: &RangeAggregationExpr{
 					Left: &LogRange{
@@ -2845,301 +2857,291 @@ func TestParse(t *testing.T) {
 					},
 					Operation: "count_over_time",
 				},
-				RHS: &BinOpExpr{
-					Op: OpTypeGT,
-					Opts: &BinOpOptions{
-						ReturnBool:     false,
-						VectorMatching: &VectorMatching{Card: CardOneToOne},
-					},
-					SampleExpr: &RangeAggregationExpr{
-						Left: &LogRange{
-							Left:     &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
-							Interval: 12 * time.Minute,
-						},
-						Operation: "count_over_time",
-					},
-					RHS: &LiteralExpr{Val: 1},
-				},
-			},
-		},
-		{
-			// test associativity
-			in:  `1 > 1 < 1`,
-			exp: &LiteralExpr{Val: 1},
-		},
-		{
-			// bool modifiers are reduced-away between two literal legs
-			in:  `1 > 1 > bool 1`,
-			exp: &LiteralExpr{Val: 0},
-		},
-		{
-			// cannot lead with bool modifier
-			in:  `bool 1 > 1 > bool 1`,
-			err: logqlmodel.NewParseError("syntax error: unexpected bool", 1, 1),
-		},
-		{
-			in:  `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m]) by (foo)`,
-			err: logqlmodel.NewParseError("grouping not allowed for sum_over_time aggregation", 0, 0),
-		},
-		{
-			in:  `sum_over_time(50,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
-			err: logqlmodel.NewParseError("parameter 50 not supported for operation sum_over_time", 0, 0),
-		},
-		{
-			in:  `quantile_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
-			err: logqlmodel.NewParseError("parameter required for operation quantile_over_time", 0, 0),
-		},
-		{
-			in:  `quantile_over_time(foo,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
-			err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER, expecting NUMBER or { or (", 1, 20),
-		},
-		{
-			in:  `vector(abc)`,
-			err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER, expecting NUMBER", 1, 8),
-		},
-		{
-			in:  `vector(1)`,
-			exp: &VectorExpr{Val: 1, err: nil},
-		},
-		{
-			in:  `label_replace(vector(0), "foo", "bar", "", "")`,
-			exp: mustNewLabelReplaceExpr(&VectorExpr{Val: 0, err: nil}, "foo", "bar", "", ""),
-		},
-		{
-			in: `sum(vector(0))`,
-			exp: &VectorAggregationExpr{
-				Left:      &VectorExpr{Val: 0, err: nil},
-				Grouping:  &Grouping{},
-				Params:    0,
-				Operation: "sum",
+				RHS: &LiteralExpr{Val: 1},
 			},
 		},
-		{
-			in: `{app="foo"}
+	},
+	{
+		// test associativity
+		in:  `1 > 1 < 1`,
+		exp: &LiteralExpr{Val: 1},
+	},
+	{
+		// bool modifiers are reduced-away between two literal legs
+		in:  `1 > 1 > bool 1`,
+		exp: &LiteralExpr{Val: 0},
+	},
+	{
+		// cannot lead with bool modifier
+		in:  `bool 1 > 1 > bool 1`,
+		err: logqlmodel.NewParseError("syntax error: unexpected bool", 1, 1),
+	},
+	{
+		in:  `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m]) by (foo)`,
+		err: logqlmodel.NewParseError("grouping not allowed for sum_over_time aggregation", 0, 0),
+	},
+	{
+		in:  `sum_over_time(50,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
+		err: logqlmodel.NewParseError("parameter 50 not supported for operation sum_over_time", 0, 0),
+	},
+	{
+		in:  `quantile_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
+		err: logqlmodel.NewParseError("parameter required for operation quantile_over_time", 0, 0),
+	},
+	{
+		in:  `quantile_over_time(foo,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`,
+		err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER, expecting NUMBER or { or (", 1, 20),
+	},
+	{
+		in:  `vector(abc)`,
+		err: logqlmodel.NewParseError("syntax error: unexpected IDENTIFIER, expecting NUMBER", 1, 8),
+	},
+	{
+		in:  `vector(1)`,
+		exp: &VectorExpr{Val: 1, err: nil},
+	},
+	{
+		in:  `label_replace(vector(0), "foo", "bar", "", "")`,
+		exp: mustNewLabelReplaceExpr(&VectorExpr{Val: 0, err: nil}, "foo", "bar", "", ""),
+	},
+	{
+		in: `sum(vector(0))`,
+		exp: &VectorAggregationExpr{
+			Left:      &VectorExpr{Val: 0, err: nil},
+			Grouping:  &Grouping{},
+			Params:    0,
+			Operation: "sum",
+		},
+	},
+	{
+		in: `{app="foo"}
 					# |= "bar"
 					| json`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLabelParserExpr(OpParserTypeJSON, ""),
-				},
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLabelParserExpr(OpParserTypeJSON, ""),
 			},
 		},
-		{
-			in: `{app="foo"}
+	},
+	{
+		in: `{app="foo"}
 					#
 					|= "bar"
 					| json`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "bar"),
-					newLabelParserExpr(OpParserTypeJSON, ""),
-				},
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "bar"),
+				newLabelParserExpr(OpParserTypeJSON, ""),
 			},
 		},
-		{
-			in:  `{app="foo"} # |= "bar" | json`,
-			exp: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-		},
-		{
-			in: `{app="foo"} | json #`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLabelParserExpr(OpParserTypeJSON, ""),
-				},
+	},
+	{
+		in:  `{app="foo"} # |= "bar" | json`,
+		exp: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+	},
+	{
+		in: `{app="foo"} | json #`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLabelParserExpr(OpParserTypeJSON, ""),
 			},
 		},
-		{
-			in:  `#{app="foo"} | json`,
-			err: logqlmodel.NewParseError("syntax error: unexpected $end", 1, 20),
-		},
-		{
-			in:  `{app="#"}`,
-			exp: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "#"}}),
-		},
-		{
-			in: `{app="foo"} |= "#"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLineFilterExpr(labels.MatchEqual, "", "#"),
-				},
+	},
+	{
+		in:  `#{app="foo"} | json`,
+		err: logqlmodel.NewParseError("syntax error: unexpected $end", 1, 20),
+	},
+	{
+		in:  `{app="#"}`,
+		exp: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "#"}}),
+	},
+	{
+		in: `{app="foo"} |= "#"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLineFilterExpr(labels.MatchEqual, "", "#"),
 			},
 		},
-		{
-			in: `{app="foo"} | bar="#"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					&LabelFilterExpr{
-						LabelFilterer: log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "bar", "#")),
-					},
+	},
+	{
+		in: `{app="foo"} | bar="#"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				&LabelFilterExpr{
+					LabelFilterer: log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "bar", "#")),
 				},
 			},
 		},
-		{
-			in: `{app="foo"} | json bob="top.sub[\"index\"]"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newJSONExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("bob", `top.sub["index"]`),
-					}),
-				},
+	},
+	{
+		in: `{app="foo"} | json bob="top.sub[\"index\"]"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newJSONExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("bob", `top.sub["index"]`),
+				}),
 			},
 		},
-		{
-			in: `{app="foo"} | json bob="top.params[0]"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newJSONExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("bob", `top.params[0]`),
-					}),
-				},
+	},
+	{
+		in: `{app="foo"} | json bob="top.params[0]"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newJSONExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("bob", `top.params[0]`),
+				}),
 			},
 		},
-		{
-			in: `{app="foo"} | json response_code="response.code", api_key="request.headers[\"X-API-KEY\"]"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newJSONExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("response_code", `response.code`),
-						log.NewLabelExtractionExpr("api_key", `request.headers["X-API-KEY"]`),
-					}),
-				},
+	},
+	{
+		in: `{app="foo"} | json response_code="response.code", api_key="request.headers[\"X-API-KEY\"]"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newJSONExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("response_code", `response.code`),
+					log.NewLabelExtractionExpr("api_key", `request.headers["X-API-KEY"]`),
+				}),
 			},
 		},
-		{
-			in: `{app="foo"} | json response_code, api_key="request.headers[\"X-API-KEY\"]", layer7_something_specific="layer7_something_specific"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newJSONExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("response_code", `response_code`),
-						log.NewLabelExtractionExpr("api_key", `request.headers["X-API-KEY"]`),
-						log.NewLabelExtractionExpr("layer7_something_specific", `layer7_something_specific`),
-					}),
-				},
+	},
+	{
+		in: `{app="foo"} | json response_code, api_key="request.headers[\"X-API-KEY\"]", layer7_something_specific="layer7_something_specific"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newJSONExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("response_code", `response_code`),
+					log.NewLabelExtractionExpr("api_key", `request.headers["X-API-KEY"]`),
+					log.NewLabelExtractionExpr("layer7_something_specific", `layer7_something_specific`),
+				}),
 			},
 		},
-		{
-			in: `count_over_time({ foo ="bar" } | json layer7_something_specific="layer7_something_specific" [12m])`,
-			exp: &RangeAggregationExpr{
-				Left: &LogRange{
-					Left: &PipelineExpr{
-						MultiStages: MultiStageExpr{
-							newJSONExpressionParser([]log.LabelExtractionExpr{
-								log.NewLabelExtractionExpr("layer7_something_specific", `layer7_something_specific`),
-							}),
-						},
-						Left: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
+	},
+	{
+		in: `count_over_time({ foo ="bar" } | json layer7_something_specific="layer7_something_specific" [12m])`,
+		exp: &RangeAggregationExpr{
+			Left: &LogRange{
+				Left: &PipelineExpr{
+					MultiStages: MultiStageExpr{
+						newJSONExpressionParser([]log.LabelExtractionExpr{
+							log.NewLabelExtractionExpr("layer7_something_specific", `layer7_something_specific`),
+						}),
 					},
-					Interval: 12 * time.Minute,
+					Left: &MatchersExpr{Mts: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}},
 				},
-				Operation: "count_over_time",
+				Interval: 12 * time.Minute,
 			},
+			Operation: "count_over_time",
 		},
-		{
-			// binop always includes vector matching. Default is `without ()`,
-			// the zero value.
-			in: `
+	},
+	{
+		// binop always includes vector matching. Default is `without ()`,
+		// the zero value.
+		in: `
 			sum(count_over_time({foo="bar"}[5m])) or vector(1)
 			`,
-			exp: mustNewBinOpExpr(
-				OpTypeOr,
-				&BinOpOptions{
-					VectorMatching: &VectorMatching{Card: CardOneToOne},
-				},
-				mustNewVectorAggregationExpr(newRangeAggregationExpr(
-					&LogRange{
-						Left: &MatchersExpr{
-							Mts: []*labels.Matcher{
-								mustNewMatcher(labels.MatchEqual, "foo", "bar"),
-							},
+		exp: mustNewBinOpExpr(
+			OpTypeOr,
+			&BinOpOptions{
+				VectorMatching: &VectorMatching{Card: CardOneToOne},
+			},
+			mustNewVectorAggregationExpr(newRangeAggregationExpr(
+				&LogRange{
+					Left: &MatchersExpr{
+						Mts: []*labels.Matcher{
+							mustNewMatcher(labels.MatchEqual, "foo", "bar"),
 						},
-						Interval: 5 * time.Minute,
-					}, OpRangeTypeCount, nil, nil),
-					"sum",
-					&Grouping{},
-					nil,
-				),
-				NewVectorExpr("1"),
+					},
+					Interval: 5 * time.Minute,
+				}, OpRangeTypeCount, nil, nil),
+				"sum",
+				&Grouping{},
+				nil,
 			),
-		},
-		{
-			in: `{app="foo"} | logfmt message="msg"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("message", `msg`),
-					}, nil),
-				},
+			NewVectorExpr("1"),
+		),
+	},
+	{
+		in: `{app="foo"} | logfmt message="msg"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("message", `msg`),
+				}, nil),
 			},
 		},
-		{
-			in: `{app="foo"} | logfmt msg`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("msg", `msg`),
-					}, nil),
-				},
+	},
+	{
+		in: `{app="foo"} | logfmt msg`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("msg", `msg`),
+				}, nil),
 			},
 		},
-		{
-			in: `{app="foo"} | logfmt --strict msg`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("msg", `msg`),
-					}, []string{OpStrict}),
-				},
+	},
+	{
+		in: `{app="foo"} | logfmt --strict msg`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("msg", `msg`),
+				}, []string{OpStrict}),
 			},
 		},
-		{
-			in: `{app="foo"} | logfmt --keep-empty msg, err `,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("msg", `msg`),
-						log.NewLabelExtractionExpr("err", `err`),
-					}, []string{OpKeepEmpty}),
-				},
+	},
+	{
+		in: `{app="foo"} | logfmt --keep-empty msg, err `,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("msg", `msg`),
+					log.NewLabelExtractionExpr("err", `err`),
+				}, []string{OpKeepEmpty}),
 			},
 		},
-		{
-			in: `{app="foo"} | logfmt msg, err="error"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("msg", `msg`),
-						log.NewLabelExtractionExpr("err", `error`),
-					}, nil),
-				},
+	},
+	{
+		in: `{app="foo"} | logfmt msg, err="error"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("msg", `msg`),
+					log.NewLabelExtractionExpr("err", `error`),
+				}, nil),
 			},
 		},
-		{
-			in: `{app="foo"} | logfmt --strict --keep-empty msg="message", apiKey="api_key"`,
-			exp: &PipelineExpr{
-				Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
-				MultiStages: MultiStageExpr{
-					newLogfmtExpressionParser([]log.LabelExtractionExpr{
-						log.NewLabelExtractionExpr("msg", `message`),
-						log.NewLabelExtractionExpr("apiKey", `api_key`),
-					}, []string{OpStrict, OpKeepEmpty}),
-				},
+	},
+	{
+		in: `{app="foo"} | logfmt --strict --keep-empty msg="message", apiKey="api_key"`,
+		exp: &PipelineExpr{
+			Left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}),
+			MultiStages: MultiStageExpr{
+				newLogfmtExpressionParser([]log.LabelExtractionExpr{
+					log.NewLabelExtractionExpr("msg", `message`),
+					log.NewLabelExtractionExpr("apiKey", `api_key`),
+				}, []string{OpStrict, OpKeepEmpty}),
 			},
 		},
-	} {
+	},
+}
+
+func TestParse(t *testing.T) {
+	for _, tc := range ParseTestCases {
 		t.Run(tc.in, func(t *testing.T) {
 			ast, err := ParseExpr(tc.in)
 			require.Equal(t, tc.err, err)
diff --git a/pkg/logql/syntax/serialize.go b/pkg/logql/syntax/serialize.go
new file mode 100644
index 0000000000000..2d7a1d786fda7
--- /dev/null
+++ b/pkg/logql/syntax/serialize.go
@@ -0,0 +1,923 @@
+package syntax
+
+import (
+	"fmt"
+	"io"
+	"time"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/prometheus/prometheus/model/labels"
+
+	"github.com/grafana/loki/pkg/logql/log"
+)
+
+type JSONSerializer struct {
+	*jsoniter.Stream
+}
+
+func NewJSONSerializer(s *jsoniter.Stream) *JSONSerializer {
+	return &JSONSerializer{
+		Stream: s,
+	}
+}
+
+func EncodeJSON(e Expr, w io.Writer) error {
+	s := jsoniter.ConfigFastest.BorrowStream(w)
+	defer jsoniter.ConfigFastest.ReturnStream(s)
+	v := NewJSONSerializer(s)
+	e.Accept(v)
+	return s.Flush()
+}
+
+// Field names
+const (
+	Bin                 = "bin"
+	Binary              = "binary"
+	Bytes               = "bytes"
+	And                 = "and"
+	Card                = "cardinality"
+	Dst                 = "dst"
+	Duration            = "duration"
+	Groups              = "groups"
+	GroupingField       = "grouping"
+	Include             = "include"
+	Identifier          = "identifier"
+	Inner               = "inner"
+	IntervalNanos       = "interval_nanos"
+	IPField             = "ip"
+	Label               = "label"
+	LabelReplace        = "label_replace"
+	LHS                 = "lhs"
+	Literal             = "literal"
+	LogSelector         = "log_selector"
+	Name                = "name"
+	Numeric             = "numeric"
+	MatchingLabels      = "matching_labels"
+	On                  = "on"
+	Op                  = "operation"
+	Options             = "options"
+	OffsetNanos         = "offset_nanos"
+	Params              = "params"
+	Pattern             = "pattern"
+	PostFilterers       = "post_filterers"
+	Range               = "range"
+	RangeAgg            = "range_agg"
+	Raw                 = "raw"
+	RegexField          = "regex"
+	Replacement         = "replacement"
+	ReturnBool          = "return_bool"
+	RHS                 = "rhs"
+	Src                 = "src"
+	StringField         = "string"
+	Type                = "type"
+	Unwrap              = "unwrap"
+	Value               = "value"
+	Vector              = "vector"
+	VectorAgg           = "vector_agg"
+	VectorMatchingField = "vector_matching"
+	Without             = "without"
+)
+
+func DecodeJSON(raw string) (Expr, error) {
+	iter := jsoniter.ParseString(jsoniter.ConfigFastest, raw)
+
+	key := iter.ReadObject()
+	switch key {
+	case Bin:
+		return decodeBinOp(iter)
+	case VectorAgg:
+		return decodeVectorAgg(iter)
+	case RangeAgg:
+		return decodeRangeAgg(iter)
+	case Literal:
+		return decodeLiteral(iter)
+	case Vector:
+		return decodeVector(iter)
+	case LabelReplace:
+		return decodeLabelReplace(iter)
+	case LogSelector:
+		return decodeLogSelector(iter)
+	default:
+		return nil, fmt.Errorf("unknown expression type: %s", key)
+	}
+}
+
+var _ RootVisitor = &JSONSerializer{}
+
+func (v *JSONSerializer) VisitBinOp(e *BinOpExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Bin)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Op)
+	v.WriteString(e.Op)
+
+	v.WriteMore()
+	v.WriteObjectField(LHS)
+	e.SampleExpr.Accept(v)
+
+	v.WriteMore()
+	v.WriteObjectField(RHS)
+	e.RHS.Accept(v)
+
+	if e.Opts != nil {
+		v.WriteMore()
+		v.WriteObjectField(Options)
+		v.WriteObjectStart()
+
+		v.WriteObjectField(ReturnBool)
+		v.WriteBool(e.Opts.ReturnBool)
+
+		if e.Opts.VectorMatching != nil {
+			v.WriteMore()
+			v.WriteObjectField(VectorMatchingField)
+			encodeVectorMatching(v.Stream, e.Opts.VectorMatching)
+		}
+
+		v.WriteObjectEnd()
+		v.Flush()
+
+	}
+
+	v.WriteObjectEnd()
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitVectorAggregation(e *VectorAggregationExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(VectorAgg)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Params)
+	v.WriteInt(e.Params)
+
+	v.WriteMore()
+	v.WriteObjectField(Op)
+	v.WriteString(e.Operation)
+
+	if e.Grouping != nil {
+		v.WriteMore()
+		v.WriteObjectField(GroupingField)
+		encodeGrouping(v.Stream, e.Grouping)
+	}
+
+	v.WriteMore()
+	v.WriteObjectField(Inner)
+	e.Left.Accept(v)
+
+	v.WriteObjectEnd()
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitRangeAggregation(e *RangeAggregationExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(RangeAgg)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Op)
+	v.WriteString(e.Operation)
+
+	if e.Grouping != nil {
+		v.WriteMore()
+		v.WriteObjectField(GroupingField)
+		encodeGrouping(v.Stream, e.Grouping)
+	}
+
+	if e.Params != nil {
+		v.WriteMore()
+		v.WriteObjectField(Params)
+		v.WriteFloat64(*e.Params)
+	}
+
+	v.WriteMore()
+	v.WriteObjectField(Range)
+	v.VisitLogRange(e.Left)
+	v.WriteObjectEnd()
+
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitLogRange(e *LogRange) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(IntervalNanos)
+	v.WriteInt64(int64(e.Interval))
+	v.WriteMore()
+	v.WriteObjectField(OffsetNanos)
+	v.WriteInt64(int64(e.Offset))
+
+	// Serialize log selector pipeline as string.
+	v.WriteMore()
+	v.WriteObjectField(LogSelector)
+	encodeLogSelector(v.Stream, e.Left)
+
+	if e.Unwrap != nil {
+		v.WriteMore()
+		v.WriteObjectField(Unwrap)
+		encodeUnwrap(v.Stream, e.Unwrap)
+	}
+
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitLabelReplace(e *LabelReplaceExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(LabelReplace)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Inner)
+	e.Left.Accept(v)
+
+	v.WriteMore()
+	v.WriteObjectField(Dst)
+	v.WriteString(e.Dst)
+
+	v.WriteMore()
+	v.WriteObjectField(Src)
+	v.WriteString(e.Src)
+
+	v.WriteMore()
+	v.WriteObjectField(Replacement)
+	v.WriteString(e.Replacement)
+
+	v.WriteMore()
+	v.WriteObjectField(RegexField)
+	v.WriteString(e.Regex)
+
+	v.WriteObjectEnd()
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitLiteral(e *LiteralExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Literal)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Value)
+	v.WriteFloat64(e.Val)
+
+	v.WriteObjectEnd()
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitVector(e *VectorExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Vector)
+	v.WriteObjectStart()
+
+	v.WriteObjectField(Value)
+	v.WriteFloat64(e.Val)
+
+	v.WriteObjectEnd()
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitMatchers(e *MatchersExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(LogSelector)
+	encodeLogSelector(v.Stream, e)
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+func (v *JSONSerializer) VisitPipeline(e *PipelineExpr) {
+	v.WriteObjectStart()
+
+	v.WriteObjectField(LogSelector)
+	encodeLogSelector(v.Stream, e)
+	v.WriteObjectEnd()
+	v.Flush()
+}
+
+// Below are StageExpr visitors that we are skipping since a pipeline is
+// serialized as a string.
+func (*JSONSerializer) VisitDecolorize(*DecolorizeExpr)                     {}
+func (*JSONSerializer) VisitDropLabels(*DropLabelsExpr)                     {}
+func (*JSONSerializer) VisitJSONExpressionParser(*JSONExpressionParser)     {}
+func (*JSONSerializer) VisitKeepLabel(*KeepLabelsExpr)                      {}
+func (*JSONSerializer) VisitLabelFilter(*LabelFilterExpr)                   {}
+func (*JSONSerializer) VisitLabelFmt(*LabelFmtExpr)                         {}
+func (*JSONSerializer) VisitLabelParser(*LabelParserExpr)                   {}
+func (*JSONSerializer) VisitLineFilter(*LineFilterExpr)                     {}
+func (*JSONSerializer) VisitLineFmt(*LineFmtExpr)                           {}
+func (*JSONSerializer) VisitLogfmtExpressionParser(*LogfmtExpressionParser) {}
+func (*JSONSerializer) VisitLogfmtParser(*LogfmtParserExpr)                 {}
+
+func encodeGrouping(s *jsoniter.Stream, g *Grouping) {
+	s.WriteObjectStart()
+	s.WriteObjectField(Without)
+	s.WriteBool(g.Without)
+
+	s.WriteMore()
+	s.WriteObjectField(Groups)
+	s.WriteArrayStart()
+	for i, group := range g.Groups {
+		if i > 0 {
+			s.WriteMore()
+		}
+		s.WriteString(group)
+	}
+	s.WriteArrayEnd()
+	s.WriteObjectEnd()
+}
+
+func decodeGrouping(iter *jsoniter.Iterator) (*Grouping, error) {
+	g := &Grouping{}
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Without:
+			g.Without = iter.ReadBool()
+		case Groups:
+			iter.ReadArrayCB(func(iter *jsoniter.Iterator) bool {
+				g.Groups = append(g.Groups, iter.ReadString())
+				return true
+			})
+		}
+	}
+
+	return g, nil
+}
+
+func encodeUnwrap(s *jsoniter.Stream, u *UnwrapExpr) {
+	s.WriteObjectStart()
+	s.WriteObjectField(Identifier)
+	s.WriteString(u.Identifier)
+
+	s.WriteMore()
+	s.WriteObjectField(Op)
+	s.WriteString(u.Operation)
+
+	s.WriteMore()
+	s.WriteObjectField(PostFilterers)
+	s.WriteArrayStart()
+	for i, filter := range u.PostFilters {
+		if i > 0 {
+			s.WriteMore()
+		}
+		encodeLabelFilter(s, filter)
+	}
+	s.WriteArrayEnd()
+
+	s.WriteObjectEnd()
+}
+
+func decodeUnwrap(iter *jsoniter.Iterator) *UnwrapExpr {
+	e := &UnwrapExpr{}
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Identifier:
+			e.Identifier = iter.ReadString()
+		case Op:
+			e.Operation = iter.ReadString()
+		case PostFilterers:
+			iter.ReadArrayCB(func(i *jsoniter.Iterator) bool {
+				e.PostFilters = append(e.PostFilters, decodeLabelFilter(i))
+				return true
+			})
+		}
+	}
+
+	return e
+}
+
+func encodeLabelFilter(s *jsoniter.Stream, filter log.LabelFilterer) {
+	switch concrete := filter.(type) {
+	case *log.BinaryLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(Binary)
+
+		s.WriteObjectStart()
+		s.WriteObjectField(LHS)
+		encodeLabelFilter(s, concrete.Left)
+
+		s.WriteMore()
+		s.WriteObjectField(RHS)
+		encodeLabelFilter(s, concrete.Right)
+
+		s.WriteMore()
+		s.WriteObjectField(And)
+		s.WriteBool(concrete.And)
+
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case log.NoopLabelFilter:
+		return
+	case *log.BytesLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(Bytes)
+
+		s.WriteObjectStart()
+		s.WriteObjectField(Name)
+		s.WriteString(concrete.Name)
+
+		s.WriteMore()
+		s.WriteObjectField(Value)
+		s.WriteUint64(concrete.Value)
+
+		s.WriteMore()
+		s.WriteObjectField(Type)
+		s.WriteInt(int(concrete.Type))
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case *log.DurationLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(Duration)
+
+		s.WriteObjectStart()
+		s.WriteObjectField(Name)
+		s.WriteString(concrete.Name)
+
+		s.WriteMore()
+		s.WriteObjectField(Value)
+		s.WriteInt64(int64(concrete.Value))
+
+		s.WriteMore()
+		s.WriteObjectField(Type)
+		s.WriteInt(int(concrete.Type))
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case *log.NumericLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(Numeric)
+
+		s.WriteObjectStart()
+		s.WriteObjectField(Name)
+		s.WriteString(concrete.Name)
+
+		s.WriteMore()
+		s.WriteObjectField(Value)
+		s.WriteFloat64(concrete.Value)
+
+		s.WriteMore()
+		s.WriteObjectField(Type)
+		s.WriteInt(int(concrete.Type))
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case *log.StringLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(StringField)
+
+		s.WriteObjectStart()
+		if concrete.Matcher != nil {
+			s.WriteObjectField(Name)
+			s.WriteString(concrete.Name)
+
+			s.WriteMore()
+			s.WriteObjectField(Value)
+			s.WriteString(concrete.Value)
+
+			s.WriteMore()
+			s.WriteObjectField(Type)
+			s.WriteInt(int(concrete.Type))
+		}
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case *log.LineFilterLabelFilter:
+		// Line filter label filter are encoded as string filters as
+		// well. See log.NewStringLabelFilter.
+		s.WriteObjectStart()
+		s.WriteObjectField(StringField)
+
+		s.WriteObjectStart()
+		if concrete.Matcher != nil {
+			s.WriteObjectField(Name)
+			s.WriteString(concrete.Name)
+
+			s.WriteMore()
+			s.WriteObjectField(Value)
+			s.WriteString(concrete.Value)
+
+			s.WriteMore()
+			s.WriteObjectField(Type)
+			s.WriteInt(int(concrete.Type))
+		}
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	case *log.IPLabelFilter:
+		s.WriteObjectStart()
+		s.WriteObjectField(IPField)
+
+		s.WriteObjectStart()
+		s.WriteObjectField(Type)
+		s.WriteInt(int(concrete.Ty))
+
+		s.WriteMore()
+		s.WriteObjectField(Label)
+		s.WriteString(concrete.Label)
+
+		s.WriteMore()
+		s.WriteObjectField(Pattern)
+		s.WriteString(concrete.Pattern)
+
+		s.WriteObjectEnd()
+
+		s.WriteObjectEnd()
+	}
+}
+
+func decodeLabelFilter(iter *jsoniter.Iterator) log.LabelFilterer {
+	var filter log.LabelFilterer
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Binary:
+			var left, right log.LabelFilterer
+			var and bool
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case And:
+					and = iter.ReadBool()
+				case LHS:
+					left = decodeLabelFilter(iter)
+				case RHS:
+					right = decodeLabelFilter(iter)
+				}
+			}
+
+			filter = &log.BinaryLabelFilter{
+				And:   and,
+				Left:  left,
+				Right: right,
+			}
+
+		case Bytes:
+			var name string
+			var b uint64
+			var t log.LabelFilterType
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case Name:
+					name = iter.ReadString()
+				case Value:
+					b = iter.ReadUint64()
+				case Type:
+					t = log.LabelFilterType(iter.ReadInt())
+				}
+			}
+			filter = log.NewBytesLabelFilter(t, name, b)
+		case Duration:
+			var name string
+			var duration time.Duration
+			var t log.LabelFilterType
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case Name:
+					name = iter.ReadString()
+				case Value:
+					duration = time.Duration(iter.ReadInt64())
+				case Type:
+					t = log.LabelFilterType(iter.ReadInt())
+				}
+			}
+
+			filter = log.NewDurationLabelFilter(t, name, duration)
+		case Numeric:
+			var name string
+			var value float64
+			var t log.LabelFilterType
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case Name:
+					name = iter.ReadString()
+				case Value:
+					value = iter.ReadFloat64()
+				case Type:
+					t = log.LabelFilterType(iter.ReadInt())
+				}
+			}
+
+			filter = log.NewNumericLabelFilter(t, name, value)
+		case StringField:
+
+			var name string
+			var value string
+			var t labels.MatchType
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case Name:
+					name = iter.ReadString()
+				case Value:
+					value = iter.ReadString()
+				case Type:
+					t = labels.MatchType(iter.ReadInt())
+				}
+			}
+
+			var matcher *labels.Matcher
+			if name != "" && value != "" {
+				matcher = labels.MustNewMatcher(t, name, value)
+			}
+
+			filter = log.NewStringLabelFilter(matcher)
+
+		case IPField:
+			var label string
+			var pattern string
+			var t log.LabelFilterType
+			for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
+				switch k {
+				case Pattern:
+					pattern = iter.ReadString()
+				case Label:
+					label = iter.ReadString()
+				case Type:
+					t = log.LabelFilterType(iter.ReadInt())
+				}
+			}
+			filter = log.NewIPLabelFilter(pattern, label, t)
+		}
+	}
+
+	return filter
+}
+
+func encodeLogSelector(s *jsoniter.Stream, e LogSelectorExpr) {
+	s.WriteObjectStart()
+	s.WriteObjectField(Raw)
+
+	s.WriteString(e.String())
+
+	s.WriteObjectEnd()
+	s.Flush()
+}
+
+func decodeLogSelector(iter *jsoniter.Iterator) (LogSelectorExpr, error) {
+	var e LogSelectorExpr
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Raw:
+			raw := iter.ReadString()
+			expr, err := ParseExpr(raw)
+			if err != nil {
+				return nil, err
+			}
+
+			var ok bool
+			e, ok = expr.(LogSelectorExpr)
+
+			if !ok {
+				return nil, fmt.Errorf("unexpected expression type: want(LogSelectorExpr), got(%T)", expr)
+			}
+		}
+	}
+
+	return e, nil
+}
+
+func decodeSample(iter *jsoniter.Iterator) (SampleExpr, error) {
+	var expr SampleExpr
+	var err error
+	for key := iter.ReadObject(); key != ""; key = iter.ReadObject() {
+		switch key {
+		case Bin:
+			expr, err = decodeBinOp(iter)
+		case VectorAgg:
+			expr, err = decodeVectorAgg(iter)
+		case RangeAgg:
+			expr, err = decodeRangeAgg(iter)
+		case Literal:
+			expr, err = decodeLiteral(iter)
+		case Vector:
+			expr, err = decodeVector(iter)
+		case LabelReplace:
+			expr, err = decodeLabelReplace(iter)
+		default:
+			return nil, fmt.Errorf("unknown sample expression type: %s", key)
+		}
+	}
+	return expr, err
+}
+
+func decodeBinOp(iter *jsoniter.Iterator) (*BinOpExpr, error) {
+	expr := &BinOpExpr{}
+	var err error
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Op:
+			expr.Op = iter.ReadString()
+		case RHS:
+			expr.RHS, err = decodeSample(iter)
+		case LHS:
+			expr.SampleExpr, err = decodeSample(iter)
+		case Options:
+			expr.Opts = decodeBinOpOptions(iter)
+		}
+	}
+
+	return expr, err
+}
+func decodeBinOpOptions(iter *jsoniter.Iterator) *BinOpOptions {
+	opts := &BinOpOptions{}
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case ReturnBool:
+			opts.ReturnBool = iter.ReadBool()
+		case VectorMatchingField:
+			opts.VectorMatching = decodeVectorMatching(iter)
+		}
+	}
+
+	return opts
+}
+
+func encodeVectorMatching(s *jsoniter.Stream, vm *VectorMatching) {
+	s.WriteObjectStart()
+
+	s.WriteObjectField(Include)
+	s.WriteArrayStart()
+	for i, l := range vm.Include {
+		if i > 0 {
+			s.WriteMore()
+		}
+		s.WriteString(l)
+	}
+	s.WriteArrayEnd()
+
+	s.WriteMore()
+	s.WriteObjectField(On)
+	s.WriteBool(vm.On)
+
+	s.WriteMore()
+	s.WriteObjectField(Card)
+	s.WriteInt(int(vm.Card))
+
+	s.WriteMore()
+	s.WriteObjectField(MatchingLabels)
+	s.WriteArrayStart()
+	for i, l := range vm.MatchingLabels {
+		if i > 0 {
+			s.WriteMore()
+		}
+		s.WriteString(l)
+	}
+	s.WriteArrayEnd()
+
+	s.WriteObjectEnd()
+}
+
+func decodeVectorMatching(iter *jsoniter.Iterator) *VectorMatching {
+	vm := &VectorMatching{}
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Include:
+			iter.ReadArrayCB(func(i *jsoniter.Iterator) bool {
+				vm.Include = append(vm.Include, i.ReadString())
+				return true
+			})
+		case On:
+			vm.On = iter.ReadBool()
+		case Card:
+			vm.Card = VectorMatchCardinality(iter.ReadInt())
+		case MatchingLabels:
+			iter.ReadArrayCB(func(i *jsoniter.Iterator) bool {
+				vm.MatchingLabels = append(vm.MatchingLabels, i.ReadString())
+				return true
+			})
+		}
+	}
+	return vm
+}
+
+func decodeVectorAgg(iter *jsoniter.Iterator) (*VectorAggregationExpr, error) {
+	expr := &VectorAggregationExpr{}
+	var err error
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Op:
+			expr.Operation = iter.ReadString()
+		case Params:
+			expr.Params = iter.ReadInt()
+		case GroupingField:
+			expr.Grouping, err = decodeGrouping(iter)
+		case Inner:
+			expr.Left, err = decodeSample(iter)
+		}
+	}
+
+	return expr, err
+}
+
+func decodeRangeAgg(iter *jsoniter.Iterator) (*RangeAggregationExpr, error) {
+	expr := &RangeAggregationExpr{}
+	var err error
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Op:
+			expr.Operation = iter.ReadString()
+		case Params:
+			tmp := iter.ReadFloat64()
+			expr.Params = &tmp
+		case Range:
+			expr.Left, err = decodeLogRange(iter)
+		case GroupingField:
+			expr.Grouping, err = decodeGrouping(iter)
+		}
+	}
+
+	return expr, err
+}
+
+func decodeLogRange(iter *jsoniter.Iterator) (*LogRange, error) {
+	expr := &LogRange{}
+	var err error
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case LogSelector:
+			expr.Left, err = decodeLogSelector(iter)
+		case IntervalNanos:
+			expr.Interval = time.Duration(iter.ReadInt64())
+		case OffsetNanos:
+			expr.Offset = time.Duration(iter.ReadInt64())
+		case Unwrap:
+			expr.Unwrap = decodeUnwrap(iter)
+		}
+	}
+
+	return expr, err
+}
+
+func decodeLabelReplace(iter *jsoniter.Iterator) (*LabelReplaceExpr, error) {
+	var err error
+	var left SampleExpr
+	var dst, src, replacement, regex string
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Inner:
+			left, err = decodeSample(iter)
+			if err != nil {
+				return nil, err
+			}
+		case Dst:
+			dst = iter.ReadString()
+		case Src:
+			src = iter.ReadString()
+		case Replacement:
+			replacement = iter.ReadString()
+		case RegexField:
+			regex = iter.ReadString()
+		}
+	}
+
+	return mustNewLabelReplaceExpr(left, dst, replacement, src, regex), nil
+}
+
+func decodeLiteral(iter *jsoniter.Iterator) (*LiteralExpr, error) {
+	expr := &LiteralExpr{}
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Value:
+			expr.Val = iter.ReadFloat64()
+		}
+	}
+
+	return expr, nil
+}
+
+func decodeVector(iter *jsoniter.Iterator) (*VectorExpr, error) {
+	expr := &VectorExpr{}
+
+	for f := iter.ReadObject(); f != ""; f = iter.ReadObject() {
+		switch f {
+		case Value:
+			expr.Val = iter.ReadFloat64()
+		}
+	}
+
+	return expr, nil
+}
+
+func decodeMatchers(iter *jsoniter.Iterator) (LogSelectorExpr, error) {
+	return decodeLogSelector(iter)
+}
+
+func decodePipeline(iter *jsoniter.Iterator) (LogSelectorExpr, error) {
+	return decodeLogSelector(iter)
+}
diff --git a/pkg/logql/syntax/serialize_test.go b/pkg/logql/syntax/serialize_test.go
new file mode 100644
index 0000000000000..846e3988b852b
--- /dev/null
+++ b/pkg/logql/syntax/serialize_test.go
@@ -0,0 +1,96 @@
+package syntax
+
+import (
+	"bytes"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestJSONSerializationRoundTrip(t *testing.T) {
+	tests := map[string]struct {
+		query string
+	}{
+		"simple matchers": {
+			query: `{env="prod", app=~"loki.*"}`,
+		},
+		"simple aggregation": {
+			query: `count_over_time({env="prod", app=~"loki.*"}[5m])`,
+		},
+		"simple aggregation with unwrap": {
+			query: `sum_over_time({env="prod", app=~"loki.*"} | unwrap bytes[5m])`,
+		},
+		"bin op": {
+			query: `(count_over_time({env="prod", app=~"loki.*"}[5m]) >= 0)`,
+		},
+		"label filter": {
+			query: `{app="foo"} |= "bar" | json | ( latency>=250ms or ( status_code<500 , status_code>200 ) )`,
+		},
+		"regexp": {
+			query: `{env="prod", app=~"loki.*"} |~ ".*foo.*"`,
+		},
+		"vector matching": {
+			query: `(sum by (cluster)(rate({foo="bar"}[5m])) / ignoring (cluster)  count(rate({foo="bar"}[5m])))`,
+		},
+		"sum over or vector": {
+			query: `(sum(count_over_time({foo="bar"}[5m])) or vector(1.000000))`,
+		},
+		"label replace": {
+			query: `label_replace(vector(0.000000),"foo","bar","","")`,
+		},
+		"filters with bytes": {
+			query: `{app="foo"} |= "bar" | json | ( status_code <500 or ( status_code>200 , size>=2.5KiB ) )`,
+		},
+		"post filter": {
+			query: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)
+				| line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo
+				| __error__ !~".+"[5m]) by (namespace,instance)`,
+		},
+		"multiple post filters": {
+			query: `rate({app="foo"} | json | unwrap foo | latency >= 250ms or bytes > 42B or ( status_code < 500 and status_code > 200) or source = ip("") and user = "me" [1m])`,
+		},
+	}
+
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+
+			expr, err := ParseExpr(test.query)
+			require.NoError(t, err)
+
+			var buf bytes.Buffer
+			err = EncodeJSON(expr, &buf)
+			require.NoError(t, err)
+
+			t.Log(buf.String())
+
+			actual, err := DecodeJSON(buf.String())
+			require.NoError(t, err)
+
+			require.Equal(t, expr.Pretty(0), actual.Pretty(0))
+		})
+	}
+}
+func TestJSONSerializationParseTestCases(t *testing.T) {
+	for _, tc := range ParseTestCases {
+		if tc.err == nil {
+			t.Run(tc.in, func(t *testing.T) {
+				ast, err := ParseExpr(tc.in)
+				require.NoError(t, err)
+				if strings.Contains(tc.in, "KiB") {
+					t.Skipf("Byte roundtrip conversion is broken. '%s' vs '%s'", tc.in, ast.String())
+				}
+
+				var buf bytes.Buffer
+				err = EncodeJSON(ast, &buf)
+				require.NoError(t, err)
+				actual, err := DecodeJSON(buf.String())
+				require.NoError(t, err)
+
+				t.Log(buf.String())
+
+				require.Equal(t, tc.exp, actual)
+			})
+		}
+	}
+}
diff --git a/pkg/logql/syntax/walk.go b/pkg/logql/syntax/walk.go
index 291ec8b31036f..c528c9ca63437 100644
--- a/pkg/logql/syntax/walk.go
+++ b/pkg/logql/syntax/walk.go
@@ -1,5 +1,7 @@
 package syntax
 
+import "fmt"
+
 type WalkFn = func(e Expr)
 
 func walkAll(f WalkFn, xs ...Walkable) {
@@ -11,3 +13,120 @@ func walkAll(f WalkFn, xs ...Walkable) {
 type Walkable interface {
 	Walk(f WalkFn)
 }
+
+type AcceptVisitor interface {
+	Accept(RootVisitor)
+}
+
+type RootVisitor interface {
+	SampleExprVisitor
+	LogSelectorExprVisitor
+	StageExprVisitor
+
+	VisitLogRange(*LogRange)
+}
+
+type SampleExprVisitor interface {
+	VisitBinOp(*BinOpExpr)
+	VisitVectorAggregation(*VectorAggregationExpr)
+	VisitRangeAggregation(*RangeAggregationExpr)
+	VisitLabelReplace(*LabelReplaceExpr)
+	VisitLiteral(*LiteralExpr)
+	VisitVector(*VectorExpr)
+}
+
+type LogSelectorExprVisitor interface {
+	VisitMatchers(*MatchersExpr)
+	VisitPipeline(*PipelineExpr)
+	VisitLiteral(*LiteralExpr)
+	VisitVector(*VectorExpr)
+}
+
+type StageExprVisitor interface {
+	VisitDecolorize(*DecolorizeExpr)
+	VisitDropLabels(*DropLabelsExpr)
+	VisitJSONExpressionParser(*JSONExpressionParser)
+	VisitKeepLabel(*KeepLabelsExpr)
+	VisitLabelFilter(*LabelFilterExpr)
+	VisitLabelFmt(*LabelFmtExpr)
+	VisitLabelParser(*LabelParserExpr)
+	VisitLineFilter(*LineFilterExpr)
+	VisitLineFmt(*LineFmtExpr)
+	VisitLogfmtExpressionParser(*LogfmtExpressionParser)
+	VisitLogfmtParser(*LogfmtParserExpr)
+}
+
+func Dispatch(root Expr, v RootVisitor) error {
+	switch e := root.(type) {
+	case SampleExpr:
+		DispatchSampleExpr(e, v)
+	case LogSelectorExpr:
+		DispatchLogSelectorExpr(e, v)
+	case StageExpr:
+		DispatchStageExpr(e, v)
+	case *LogRange:
+		v.VisitLogRange(e)
+	default:
+		return fmt.Errorf("unpexpected root expression type: got (%T)", e)
+	}
+
+	return nil
+}
+
+func DispatchSampleExpr(expr SampleExpr, v SampleExprVisitor) {
+	switch e := expr.(type) {
+	case *BinOpExpr:
+		v.VisitBinOp(e)
+	case *VectorAggregationExpr:
+		v.VisitVectorAggregation(e)
+	case *RangeAggregationExpr:
+		v.VisitRangeAggregation(e)
+	case *LabelReplaceExpr:
+		v.VisitLabelReplace(e)
+	case *LiteralExpr:
+		v.VisitLiteral(e)
+	case *VectorExpr:
+		v.VisitVector(e)
+	}
+}
+
+func DispatchLogSelectorExpr(expr LogSelectorExpr, v LogSelectorExprVisitor) {
+	switch e := expr.(type) {
+	case *PipelineExpr:
+		v.VisitPipeline(e)
+	case *MatchersExpr:
+		v.VisitMatchers(e)
+	case *VectorExpr:
+		v.VisitVector(e)
+	case *LiteralExpr:
+		v.VisitLiteral(e)
+	}
+}
+
+func DispatchStageExpr(expr StageExpr, v StageExprVisitor) {
+	switch e := expr.(type) {
+	case *DecolorizeExpr:
+		v.VisitDecolorize(e)
+	case *DropLabelsExpr:
+		v.VisitDropLabels(e)
+	case *JSONExpressionParser:
+		v.VisitJSONExpressionParser(e)
+	case *KeepLabelsExpr:
+		v.VisitKeepLabel(e)
+	case *LabelFilterExpr:
+		v.VisitLabelFilter(e)
+	case *LabelFmtExpr:
+		v.VisitLabelFmt(e)
+	case *LabelParserExpr:
+		v.VisitLabelParser(e)
+	case *LineFilterExpr:
+		v.VisitLineFilter(e)
+	case *LineFmtExpr:
+		v.VisitLineFmt(e)
+	case *LogfmtExpressionParser:
+		v.VisitLogfmtExpressionParser(e)
+	case *LogfmtParserExpr:
+		v.VisitLogfmtParser(e)
+	}
+
+}
diff --git a/pkg/ruler/base/client_pool.go b/pkg/ruler/base/client_pool.go
index ca2a3ac2d45f0..4a66fc935107e 100644
--- a/pkg/ruler/base/client_pool.go
+++ b/pkg/ruler/base/client_pool.go
@@ -48,15 +48,16 @@ func newRulerClientPool(clientCfg grpcclient.Config, logger log.Logger, reg prom
 	})
 
 	return &rulerClientsPool{
-		client.NewPool("ruler", poolCfg, nil, newRulerClientFactory(clientCfg, reg), clientsCount, logger),
+		client.NewPool("ruler", poolCfg, nil, newRulerClientFactory(clientCfg, reg, metricsNamespace), clientsCount, logger),
 	}
 }
 
-func newRulerClientFactory(clientCfg grpcclient.Config, reg prometheus.Registerer) client.PoolFactory {
+func newRulerClientFactory(clientCfg grpcclient.Config, reg prometheus.Registerer, metricsNamespace string) client.PoolFactory {
 	requestDuration := promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{
-		Name:    "cortex_ruler_client_request_duration_seconds",
-		Help:    "Time spent executing requests to the ruler.",
-		Buckets: prometheus.ExponentialBuckets(0.008, 4, 7),
+		Namespace: metricsNamespace,
+		Name:      "ruler_client_request_duration_seconds",
+		Help:      "Time spent executing requests to the ruler.",
+		Buckets:   prometheus.ExponentialBuckets(0.008, 4, 7),
 	}, []string{"operation", "status_code"})
 
 	return client.PoolAddrFunc(func(addr string) (client.PoolClient, error) {
@@ -64,11 +65,12 @@ func newRulerClientFactory(clientCfg grpcclient.Config, reg prometheus.Registere
 	})
 }
 
-func newRulerPoolClient(clientCfg grpcclient.Config, reg prometheus.Registerer) func(addr string) (client.PoolClient, error) {
+func newRulerPoolClient(clientCfg grpcclient.Config, reg prometheus.Registerer, metricsNamespace string) func(addr string) (client.PoolClient, error) {
 	requestDuration := promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{
-		Name:    "cortex_ruler_client_request_duration_seconds",
-		Help:    "Time spent executing requests to the ruler.",
-		Buckets: prometheus.ExponentialBuckets(0.008, 4, 7),
+		Namespace: metricsNamespace,
+		Name:      "ruler_client_request_duration_seconds",
+		Help:      "Time spent executing requests to the ruler.",
+		Buckets:   prometheus.ExponentialBuckets(0.008, 4, 7),
 	}, []string{"operation", "status_code"})
 
 	return func(addr string) (client.PoolClient, error) {
diff --git a/pkg/ruler/base/client_pool_test.go b/pkg/ruler/base/client_pool_test.go
index 3e296cc116c17..05fc23290033c 100644
--- a/pkg/ruler/base/client_pool_test.go
+++ b/pkg/ruler/base/client_pool_test.go
@@ -13,6 +13,8 @@ import (
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 	"google.golang.org/grpc"
+
+	"github.com/grafana/loki/pkg/util/constants"
 )
 
 func Test_newRulerClientFactory(t *testing.T) {
@@ -36,7 +38,7 @@ func Test_newRulerClientFactory(t *testing.T) {
 	flagext.DefaultValues(&cfg)
 
 	reg := prometheus.NewPedanticRegistry()
-	factory := newRulerPoolClient(cfg, reg)
+	factory := newRulerPoolClient(cfg, reg, constants.Loki)
 
 	for i := 0; i < 2; i++ {
 		client, err := factory(listener.Addr().String())
@@ -54,7 +56,7 @@ func Test_newRulerClientFactory(t *testing.T) {
 	require.NoError(t, err)
 
 	assert.Len(t, metrics, 1)
-	assert.Equal(t, "cortex_ruler_client_request_duration_seconds", metrics[0].GetName())
+	assert.Equal(t, "loki_ruler_client_request_duration_seconds", metrics[0].GetName())
 	assert.Equal(t, dto.MetricType_HISTOGRAM, metrics[0].GetType())
 	assert.Len(t, metrics[0].GetMetric(), 1)
 	assert.Equal(t, uint64(2), metrics[0].GetMetric()[0].GetHistogram().GetSampleCount())
diff --git a/pkg/scheduler/scheduler.go b/pkg/scheduler/scheduler.go
index b5ff71b9ce188..305d47b17e571 100644
--- a/pkg/scheduler/scheduler.go
+++ b/pkg/scheduler/scheduler.go
@@ -333,7 +333,7 @@ func (s *Scheduler) enqueueRequest(frontendContext context.Context, frontendAddr
 	// information, since that is a long-running request.
 	tracer := opentracing.GlobalTracer()
 	parentSpanContext, err := lokigrpc.GetParentSpanForRequest(tracer, msg)
-	if err != nil {
+	if err != nil && err != opentracing.ErrSpanContextNotFound {
 		return err
 	}
 
diff --git a/pkg/util/httpgrpc/carrier.go b/pkg/util/httpgrpc/carrier.go
index b52b51ae352b2..ab1753ef6c271 100644
--- a/pkg/util/httpgrpc/carrier.go
+++ b/pkg/util/httpgrpc/carrier.go
@@ -39,11 +39,7 @@ func GetParentSpanForHTTPRequest(tracer opentracing.Tracer, req *weaveworks_http
 	}
 
 	carrier := (*HeadersCarrier)(req)
-	extracted, err := tracer.Extract(opentracing.HTTPHeaders, carrier)
-	if err == opentracing.ErrSpanContextNotFound {
-		err = nil
-	}
-	return extracted, err
+	return tracer.Extract(opentracing.HTTPHeaders, carrier)
 }
 
 func GetParentSpanForQueryRequest(tracer opentracing.Tracer, req *queryrange.QueryRequest) (opentracing.SpanContext, error) {
@@ -52,11 +48,7 @@ func GetParentSpanForQueryRequest(tracer opentracing.Tracer, req *queryrange.Que
 	}
 
 	carrier := opentracing.TextMapCarrier(req.Metadata)
-	extracted, err := tracer.Extract(opentracing.TextMap, carrier)
-	if err == opentracing.ErrSpanContextNotFound {
-		err = nil
-	}
-	return extracted, err
+	return tracer.Extract(opentracing.TextMap, carrier)
 }
 
 func GetParentSpanForRequest(tracer opentracing.Tracer, req Request) (opentracing.SpanContext, error) {
diff --git a/production/docker/config/loki.yaml b/production/docker/config/loki.yaml
index 7d7346cfc63b7..e6a2f5fe31d84 100644
--- a/production/docker/config/loki.yaml
+++ b/production/docker/config/loki.yaml
@@ -89,7 +89,6 @@ schema_config:
 
 limits_config:
   max_cache_freshness_per_query: '10m'
-  enforce_metric_name: false
   reject_old_samples: true
   reject_old_samples_max_age: 30m
   ingestion_rate_mb: 10
diff --git a/production/helm/loki/values.yaml b/production/helm/loki/values.yaml
index 92b7069af39f2..de6048aecc712 100644
--- a/production/helm/loki/values.yaml
+++ b/production/helm/loki/values.yaml
@@ -254,7 +254,6 @@ loki:
     grpc_listen_port: 9095
   # -- Limits config
   limits_config:
-    enforce_metric_name: false
     reject_old_samples: true
     reject_old_samples_max_age: 168h
     max_cache_freshness_per_query: 10m
diff --git a/production/ksonnet/loki-simple-scalable/example/main.jsonnet b/production/ksonnet/loki-simple-scalable/example/main.jsonnet
index ebf7b5cf288a8..66a0d185f44d0 100644
--- a/production/ksonnet/loki-simple-scalable/example/main.jsonnet
+++ b/production/ksonnet/loki-simple-scalable/example/main.jsonnet
@@ -38,7 +38,6 @@ loki {
         },
       },
       limits_config: {
-        enforce_metric_name: false,
         reject_old_samples_max_age: '168h',  //1 week
         max_global_streams_per_user: 60000,
         ingestion_rate_mb: 75,
diff --git a/production/ksonnet/loki/config.libsonnet b/production/ksonnet/loki/config.libsonnet
index 8450e524fd1ee..20cd6ad1fe419 100644
--- a/production/ksonnet/loki/config.libsonnet
+++ b/production/ksonnet/loki/config.libsonnet
@@ -208,7 +208,6 @@
         query_ingesters_within: '2h',  // twice the max-chunk age (1h default) for safety buffer
       },
       limits_config: {
-        enforce_metric_name: false,
         // align middleware parallelism with shard factor to optimize one-legged sharded queries.
         max_query_parallelism: if $._config.queryFrontend.sharded_queries_enabled then
           // For a sharding factor of 16 (default), this is 256, or enough for 16 sharded queries.
diff --git a/production/nomad/loki-distributed/config.yml b/production/nomad/loki-distributed/config.yml
index 2391ff1afed0b..48fc8e166c688 100644
--- a/production/nomad/loki-distributed/config.yml
+++ b/production/nomad/loki-distributed/config.yml
@@ -122,6 +122,5 @@ ruler:
     dir: {{ env "NOMAD_ALLOC_DIR" }}/data/ruler
 
 limits_config:
-  enforce_metric_name: false
   reject_old_samples: true
   reject_old_samples_max_age: 168h
diff --git a/production/nomad/loki-simple/config.yml b/production/nomad/loki-simple/config.yml
index d0883b2dfa6ae..79b1d39d57a92 100644
--- a/production/nomad/loki-simple/config.yml
+++ b/production/nomad/loki-simple/config.yml
@@ -50,7 +50,6 @@ storage_config:
     s3forcepathstyle: true
 
 limits_config:
-  enforce_metric_name: false
   reject_old_samples: true
   reject_old_samples_max_age: 168h
 
diff --git a/production/nomad/loki/config.yml b/production/nomad/loki/config.yml
index 1f1e24701925a..ceeda7d2e49ef 100644
--- a/production/nomad/loki/config.yml
+++ b/production/nomad/loki/config.yml
@@ -50,7 +50,6 @@ storage_config:
     s3forcepathstyle: true
 
 limits_config:
-  enforce_metric_name: false
   reject_old_samples: true
   reject_old_samples_max_age: 168h