Skip to content

Commit

Permalink
Disable sub aggs on composite date histo
Browse files Browse the repository at this point in the history
Signed-off-by: Finn Carroll <[email protected]>
  • Loading branch information
finnegancarroll committed Jul 23, 2024
1 parent 942c4e7 commit 2a69ed3
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ public final class CompositeAggregator extends BucketsAggregator {

@Override
public boolean canOptimize() {
if (parent != null || subAggregators.length != 0) return false;
if (canOptimize(sourceConfigs)) {
this.valuesSource = (RoundingValuesSource) sourceConfigs[0].valuesSource();
if (rawAfterKey != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,20 @@

package org.opensearch.search.optimization.filterrewrite;

import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.PointValues;
import org.opensearch.index.mapper.DateFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.search.aggregations.LeafBucketCollector;
import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig;
import org.opensearch.search.aggregations.bucket.composite.RoundingValuesSource;

import java.io.IOException;
import java.util.List;
import java.util.function.BiConsumer;

import static org.opensearch.search.optimization.filterrewrite.TreeTraversal.multiRangesTraverse;

/**
* For composite aggregation to do optimization when it only has a single date histogram source
*/
Expand All @@ -33,4 +42,17 @@ private boolean canOptimize(boolean missing, boolean hasScript, MappedFieldType
}
return false;
}

@Override
public final void tryOptimize(PointValues values, BiConsumer<Long, Long> incrementDocCount, final LeafBucketCollector sub) throws IOException {
DateFieldMapper.DateFieldType fieldType = getFieldType();
BiConsumer<Integer, List<Integer>> collectRangeIDs = (activeIndex, docIDs) -> {
long rangeStart = LongPoint.decodeDimension(optimizationContext.getRanges().lowers[activeIndex], 0);
rangeStart = fieldType.convertNanosToMillis(rangeStart);
long ord = getBucketOrd(bucketOrdProducer().apply(rangeStart));
incrementDocCount.accept(ord, (long) docIDs.size());
};

optimizationContext.consumeDebugInfo(multiRangesTraverse(values.getPointTree(), optimizationContext.getRanges(), collectRangeIDs, getSize()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ protected long[] processHardBounds(long[] bounds, LongBounds hardBounds) {
return bounds;
}

private DateFieldMapper.DateFieldType getFieldType() {
protected DateFieldMapper.DateFieldType getFieldType() {
assert fieldType instanceof DateFieldMapper.DateFieldType;
return (DateFieldMapper.DateFieldType) fieldType;
}
Expand All @@ -125,7 +125,7 @@ protected int getSize() {
}

@Override
public final void tryOptimize(PointValues values, BiConsumer<Long, Long> incrementDocCount, final LeafBucketCollector sub) throws IOException {
public void tryOptimize(PointValues values, BiConsumer<Long, Long> incrementDocCount, final LeafBucketCollector sub) throws IOException {
int size = getSize();

DateFieldMapper.DateFieldType fieldType = getFieldType();
Expand All @@ -147,7 +147,7 @@ public final void tryOptimize(PointValues values, BiConsumer<Long, Long> increme
optimizationContext.consumeDebugInfo(multiRangesTraverse(values.getPointTree(), optimizationContext.getRanges(), collectRangeIDs, size));
}

private static long getBucketOrd(long bucketOrd) {
protected static long getBucketOrd(long bucketOrd) {
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
}
Expand Down

0 comments on commit 2a69ed3

Please sign in to comment.