diff --git a/CHANGELOG.md b/CHANGELOG.md
index fae8686d1e45d..fe36b57da000a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Added
- Add fingerprint ingest processor ([#13724](https://github.com/opensearch-project/OpenSearch/pull/13724))
- [Remote Store] Rate limiter for remote store low priority uploads ([#14374](https://github.com/opensearch-project/OpenSearch/pull/14374/))
+- Apply the date histogram rewrite optimization to range aggregation ([#13865](https://github.com/opensearch-project/OpenSearch/pull/13865))
### Dependencies
- Bump `org.gradle.test-retry` from 1.5.8 to 1.5.9 ([#13442](https://github.com/opensearch-project/OpenSearch/pull/13442))
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
index 400d867296e5f..3115dce6c10a5 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
@@ -35,6 +35,7 @@
import com.fasterxml.jackson.core.JsonParseException;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
@@ -165,7 +166,7 @@ public ScaledFloatFieldMapper build(BuilderContext context) {
public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getSettings()));
- public static final class ScaledFloatFieldType extends SimpleMappedFieldType {
+ public static final class ScaledFloatFieldType extends SimpleMappedFieldType implements NumericPointEncoder {
private final double scalingFactor;
private final Double nullValue;
@@ -188,6 +189,21 @@ public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ assert value instanceof Double;
+ double doubleValue = (Double) value;
+ byte[] point = new byte[Long.BYTES];
+ if (doubleValue == Double.POSITIVE_INFINITY) {
+ LongPoint.encodeDimension(Long.MAX_VALUE, point, 0);
+ } else if (doubleValue == Double.NEGATIVE_INFINITY) {
+ LongPoint.encodeDimension(Long.MIN_VALUE, point, 0);
+ } else {
+ LongPoint.encodeDimension(Math.round(scale(value)), point, 0);
+ }
+ return point;
+ }
+
public double getScalingFactor() {
return scalingFactor;
}
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml
index 7d887d56ae8fe..2fd926276d0b4 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml
@@ -14,6 +14,9 @@ setup:
date:
type: date
format: epoch_second
+ scaled_field:
+ type: scaled_float
+ scaling_factor: 100
- do:
cluster.health:
@@ -528,3 +531,139 @@ setup:
- is_false: aggregations.unsigned_long_range.buckets.2.to
- match: { aggregations.unsigned_long_range.buckets.2.doc_count: 0 }
+
+---
+"Double range profiler shows filter rewrite info":
+ - skip:
+ version: " - 2.99.99"
+ reason: debug info for filter rewrite added in 3.0.0 (to be backported to 2.15.0)
+
+ - do:
+ indices.create:
+ index: test_profile
+ body:
+ settings:
+ number_of_replicas: 0
+ refresh_interval: -1
+ mappings:
+ properties:
+ ip:
+ type: ip
+ double:
+ type: double
+ date:
+ type: date
+ format: epoch_second
+
+ - do:
+ bulk:
+ index: test_profile
+ refresh: true
+ body:
+ - '{"index": {}}'
+ - '{"double" : 42}'
+ - '{"index": {}}'
+ - '{"double" : 100}'
+ - '{"index": {}}'
+ - '{"double" : 50}'
+
+ - do:
+ search:
+ index: test_profile
+ body:
+ size: 0
+ profile: true
+ aggs:
+ double_range:
+ range:
+ field: double
+ ranges:
+ - to: 50
+ - from: 50
+ to: 150
+ - from: 150
+
+ - length: { aggregations.double_range.buckets: 3 }
+
+ - match: { aggregations.double_range.buckets.0.key: "*-50.0" }
+ - is_false: aggregations.double_range.buckets.0.from
+ - match: { aggregations.double_range.buckets.0.to: 50.0 }
+ - match: { aggregations.double_range.buckets.0.doc_count: 1 }
+ - match: { aggregations.double_range.buckets.1.key: "50.0-150.0" }
+ - match: { aggregations.double_range.buckets.1.from: 50.0 }
+ - match: { aggregations.double_range.buckets.1.to: 150.0 }
+ - match: { aggregations.double_range.buckets.1.doc_count: 2 }
+ - match: { aggregations.double_range.buckets.2.key: "150.0-*" }
+ - match: { aggregations.double_range.buckets.2.from: 150.0 }
+ - is_false: aggregations.double_range.buckets.2.to
+ - match: { aggregations.double_range.buckets.2.doc_count: 0 }
+
+ - match: { profile.shards.0.aggregations.0.debug.optimized_segments: 1 }
+ - match: { profile.shards.0.aggregations.0.debug.unoptimized_segments: 0 }
+ - match: { profile.shards.0.aggregations.0.debug.leaf_visited: 1 }
+ - match: { profile.shards.0.aggregations.0.debug.inner_visited: 0 }
+
+---
+"Scaled Float Range Aggregation":
+ - do:
+ index:
+ index: test
+ id: 1
+ body: { "scaled_field": 1 }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: { "scaled_field": 1.53 }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body: { "scaled_field": -2.1 }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ body: { "scaled_field": 1.53 }
+
+ - do:
+ indices.refresh: { }
+
+ - do:
+ search:
+ index: test
+ body:
+ size: 0
+ aggs:
+ my_range:
+ range:
+ field: scaled_field
+ ranges:
+ - to: 0
+ - from: 0
+ to: 1
+ - from: 1
+ to: 1.5
+ - from: 1.5
+
+ - length: { aggregations.my_range.buckets: 4 }
+
+ - match: { aggregations.my_range.buckets.0.key: "*-0.0" }
+ - is_false: aggregations.my_range.buckets.0.from
+ - match: { aggregations.my_range.buckets.0.to: 0.0 }
+ - match: { aggregations.my_range.buckets.0.doc_count: 1 }
+ - match: { aggregations.my_range.buckets.1.key: "0.0-1.0" }
+ - match: { aggregations.my_range.buckets.1.from: 0.0 }
+ - match: { aggregations.my_range.buckets.1.to: 1.0 }
+ - match: { aggregations.my_range.buckets.1.doc_count: 0 }
+ - match: { aggregations.my_range.buckets.2.key: "1.0-1.5" }
+ - match: { aggregations.my_range.buckets.2.from: 1.0 }
+ - match: { aggregations.my_range.buckets.2.to: 1.5 }
+ - match: { aggregations.my_range.buckets.2.doc_count: 1 }
+ - match: { aggregations.my_range.buckets.3.key: "1.5-*" }
+ - match: { aggregations.my_range.buckets.3.from: 1.5 }
+ - is_false: aggregations.my_range.buckets.3.to
+ - match: { aggregations.my_range.buckets.3.doc_count: 2 }
diff --git a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java
index b7ee3bb8ca3e3..cf8703209fb37 100644
--- a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java
@@ -348,7 +348,7 @@ public DateFieldMapper build(BuilderContext context) {
*
* @opensearch.internal
*/
- public static final class DateFieldType extends MappedFieldType {
+ public static final class DateFieldType extends MappedFieldType implements NumericPointEncoder {
protected final DateFormatter dateTimeFormatter;
protected final DateMathParser dateMathParser;
protected final Resolution resolution;
@@ -549,6 +549,13 @@ public static long parseToLong(
return resolution.convert(dateParser.parse(BytesRefs.toString(value), now, roundUp, zone));
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Long.BYTES];
+ LongPoint.encodeDimension(value.longValue(), point, 0);
+ return point;
+ }
+
@Override
public Query distanceFeatureQuery(Object origin, String pivot, float boost, QueryShardContext context) {
failIfNotIndexedAndNoDocValues();
diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
index eb3a99b0e0388..25e5f9970795f 100644
--- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java
@@ -171,7 +171,7 @@ public NumberFieldMapper build(BuilderContext context) {
*
* @opensearch.internal
*/
- public enum NumberType {
+ public enum NumberType implements NumericPointEncoder {
HALF_FLOAT("half_float", NumericType.HALF_FLOAT) {
@Override
public Float parse(Object value, boolean coerce) {
@@ -194,6 +194,13 @@ public Number parsePoint(byte[] value) {
return HalfFloatPoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[HalfFloatPoint.BYTES];
+ HalfFloatPoint.encodeDimension(value.floatValue(), point, 0);
+ return point;
+ }
+
@Override
public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce);
@@ -331,6 +338,13 @@ public Number parsePoint(byte[] value) {
return FloatPoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Float.BYTES];
+ FloatPoint.encodeDimension(value.floatValue(), point, 0);
+ return point;
+ }
+
@Override
public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce);
@@ -457,6 +471,13 @@ public Number parsePoint(byte[] value) {
return DoublePoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Double.BYTES];
+ DoublePoint.encodeDimension(value.doubleValue(), point, 0);
+ return point;
+ }
+
@Override
public Double parse(XContentParser parser, boolean coerce) throws IOException {
double parsed = parser.doubleValue(coerce);
@@ -582,6 +603,13 @@ public Number parsePoint(byte[] value) {
return INTEGER.parsePoint(value).byteValue();
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Integer.BYTES];
+ IntPoint.encodeDimension(value.intValue(), point, 0);
+ return point;
+ }
+
@Override
public Short parse(XContentParser parser, boolean coerce) throws IOException {
int value = parser.intValue(coerce);
@@ -654,6 +682,13 @@ public Number parsePoint(byte[] value) {
return INTEGER.parsePoint(value).shortValue();
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Integer.BYTES];
+ IntPoint.encodeDimension(value.intValue(), point, 0);
+ return point;
+ }
+
@Override
public Short parse(XContentParser parser, boolean coerce) throws IOException {
return parser.shortValue(coerce);
@@ -722,6 +757,13 @@ public Number parsePoint(byte[] value) {
return IntPoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Integer.BYTES];
+ IntPoint.encodeDimension(value.intValue(), point, 0);
+ return point;
+ }
+
@Override
public Integer parse(XContentParser parser, boolean coerce) throws IOException {
return parser.intValue(coerce);
@@ -868,6 +910,13 @@ public Number parsePoint(byte[] value) {
return LongPoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[Long.BYTES];
+ LongPoint.encodeDimension(value.longValue(), point, 0);
+ return point;
+ }
+
@Override
public Long parse(XContentParser parser, boolean coerce) throws IOException {
return parser.longValue(coerce);
@@ -988,6 +1037,13 @@ public Number parsePoint(byte[] value) {
return BigIntegerPoint.decodeDimension(value, 0);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ byte[] point = new byte[BigIntegerPoint.BYTES];
+ BigIntegerPoint.encodeDimension(objectToUnsignedLong(value, false, true), point, 0);
+ return point;
+ }
+
@Override
public BigInteger parse(XContentParser parser, boolean coerce) throws IOException {
return parser.bigIntegerValue(coerce);
@@ -1215,16 +1271,30 @@ public static long objectToLong(Object value, boolean coerce) {
return Numbers.toLong(stringValue, coerce);
}
+ public static BigInteger objectToUnsignedLong(Object value, boolean coerce) {
+ return objectToUnsignedLong(value, coerce, false);
+ }
+
/**
- * Converts and Object to a {@code long} by checking it against known
+ * Converts an Object to a {@code BigInteger} by checking it against known
* types and checking its range.
+ *
+ * @param lenientBound if true, use MIN or MAX if the value is out of bound
*/
- public static BigInteger objectToUnsignedLong(Object value, boolean coerce) {
+ public static BigInteger objectToUnsignedLong(Object value, boolean coerce, boolean lenientBound) {
if (value instanceof Long) {
return Numbers.toUnsignedBigInteger(((Long) value).longValue());
}
double doubleValue = objectToDouble(value);
+ if (lenientBound) {
+ if (doubleValue < Numbers.MIN_UNSIGNED_LONG_VALUE.doubleValue()) {
+ return Numbers.MIN_UNSIGNED_LONG_VALUE;
+ }
+ if (doubleValue > Numbers.MAX_UNSIGNED_LONG_VALUE.doubleValue()) {
+ return Numbers.MAX_UNSIGNED_LONG_VALUE;
+ }
+ }
if (doubleValue < Numbers.MIN_UNSIGNED_LONG_VALUE.doubleValue()
|| doubleValue > Numbers.MAX_UNSIGNED_LONG_VALUE.doubleValue()) {
throw new IllegalArgumentException("Value [" + value + "] is out of range for an unsigned long");
@@ -1349,7 +1419,7 @@ public static Query unsignedLongRangeQuery(
*
* @opensearch.internal
*/
- public static class NumberFieldType extends SimpleMappedFieldType {
+ public static class NumberFieldType extends SimpleMappedFieldType implements NumericPointEncoder {
private final NumberType type;
private final boolean coerce;
@@ -1394,6 +1464,10 @@ public String typeName() {
return type.name;
}
+ public NumberType numberType() {
+ return type;
+ }
+
public NumericType numericType() {
return type.numericType();
}
@@ -1501,6 +1575,11 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
public Number parsePoint(byte[] value) {
return type.parsePoint(value);
}
+
+ @Override
+ public byte[] encodePoint(Number value) {
+ return type.encodePoint(value);
+ }
}
private final NumberType type;
diff --git a/server/src/main/java/org/opensearch/index/mapper/NumericPointEncoder.java b/server/src/main/java/org/opensearch/index/mapper/NumericPointEncoder.java
new file mode 100644
index 0000000000000..be746a5526594
--- /dev/null
+++ b/server/src/main/java/org/opensearch/index/mapper/NumericPointEncoder.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.mapper;
+
+/**
+ * Interface for encoding a point value
+ */
+public interface NumericPointEncoder {
+ byte[] encodePoint(Number value);
+}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java
index c8ce39a52f869..2ab003fb94e33 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java
+++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java
@@ -10,6 +10,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
@@ -32,24 +33,26 @@
import org.opensearch.index.mapper.DateFieldMapper;
import org.opensearch.index.mapper.DocCountFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
+import org.opensearch.index.mapper.NumericPointEncoder;
import org.opensearch.index.query.DateRangeIncludingNowQuery;
import org.opensearch.search.aggregations.bucket.composite.CompositeAggregator;
import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig;
import org.opensearch.search.aggregations.bucket.composite.RoundingValuesSource;
import org.opensearch.search.aggregations.bucket.histogram.LongBounds;
+import org.opensearch.search.aggregations.bucket.range.RangeAggregator.Range;
+import org.opensearch.search.aggregations.support.ValuesSource;
+import org.opensearch.search.aggregations.support.ValuesSourceConfig;
import org.opensearch.search.internal.SearchContext;
import java.io.IOException;
-import java.util.Arrays;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.OptionalLong;
import java.util.function.BiConsumer;
-import java.util.function.BiFunction;
import java.util.function.Function;
+import static org.opensearch.index.mapper.NumberFieldMapper.NumberType.LONG;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
/**
@@ -187,8 +190,8 @@ public static class FastFilterContext {
private AggregationType aggregationType;
private final SearchContext context;
- private String fieldName;
- private long[][] ranges;
+ private MappedFieldType fieldType;
+ private Ranges ranges;
// debug info related fields
public int leaf;
@@ -196,12 +199,13 @@ public static class FastFilterContext {
public int segments;
public int optimizedSegments;
- public void setFieldName(String fieldName) {
- this.fieldName = fieldName;
+ public FastFilterContext(SearchContext context) {
+ this.context = context;
}
- public FastFilterContext(SearchContext context) {
+ public FastFilterContext(SearchContext context, AggregationType aggregationType) {
this.context = context;
+ this.aggregationType = aggregationType;
}
public AggregationType getAggregationType() {
@@ -221,23 +225,87 @@ public boolean isRewriteable(final Object parent, final int subAggLength) {
return rewriteable;
}
- public void buildRanges() throws IOException {
+ public void buildRanges(MappedFieldType fieldType) throws IOException {
assert ranges == null : "Ranges should only be built once at shard level, but they are already built";
- this.ranges = this.aggregationType.buildRanges(context);
+ this.fieldType = fieldType;
+ this.ranges = this.aggregationType.buildRanges(context, fieldType);
if (ranges != null) {
logger.debug("Ranges built for shard {}", context.indexShard().shardId());
rangesBuiltAtShardLevel = true;
}
}
- public long[][] buildRanges(LeafReaderContext leaf) throws IOException {
- long[][] ranges = this.aggregationType.buildRanges(leaf, context);
+ private Ranges buildRanges(LeafReaderContext leaf) throws IOException {
+ Ranges ranges = this.aggregationType.buildRanges(leaf, context, fieldType);
if (ranges != null) {
logger.debug("Ranges built for shard {} segment {}", context.indexShard().shardId(), leaf.ord);
}
return ranges;
}
+ /**
+ * Try to populate the bucket doc counts for aggregation
+ *
+ * Usage: invoked at segment level — in getLeafCollector of aggregator
+ *
+ * @param bucketOrd bucket ordinal producer
+ * @param incrementDocCount consume the doc_count results for certain ordinal
+ */
+ public boolean tryFastFilterAggregation(
+ final LeafReaderContext ctx,
+ final BiConsumer incrementDocCount,
+ final Function