Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
Signed-off-by: bowenlan-amzn <[email protected]>
  • Loading branch information
bowenlan-amzn committed Feb 9, 2024
1 parent 2baa3fc commit 70b9aad
Show file tree
Hide file tree
Showing 3 changed files with 143 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,115 @@

package org.opensearch.search.aggregations.bucket;

public class FilterRewriteIT {
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.opensearch.search.aggregations.bucket.histogram.Histogram;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase;

import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING;
import static org.opensearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;

@OpenSearchIntegTestCase.SuiteScopeTestCase
public class FilterRewriteIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase {

// simulate segment level match all
private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true);
private static final Map<String, Long> expected = new HashMap<>();

public FilterRewriteIT(Settings dynamicSettings) {
super(dynamicSettings);
}

@ParametersFactory
public static Collection<Object[]> parameters() {
return Arrays.asList(
new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() },
new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }
);
}

@Override
protected void setupSuiteScopeCluster() throws Exception {
assertAcked(client().admin().indices().prepareCreate("idx").get());

// one segment has some docs
// the other segment has some other docs
// check the results are correct
final int segmentCount = randomIntBetween(2, 10);
final Set<Long> longTerms = new HashSet();

final Map<String, Integer> dateTerms = new HashMap<>();
for (int i = 0; i < segmentCount; i++) {
final List<IndexRequestBuilder> indexRequests = new ArrayList<>();


long longTerm;
do {
longTerm = randomInt(segmentCount * 2);
} while (!longTerms.add(longTerm));
ZonedDateTime time = ZonedDateTime.of(2024, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time);

final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; j++) {
indexRequests.add(
client().prepareIndex("idx")
.setSource(
jsonBuilder().startObject()
.field("date", dateTerm)
.field("match", true)
.endObject()
)
);
}
expected.put(dateTerm+"T00:00:00.000Z", (long) frequency);

indexRandom(true, false, indexRequests);
}

logger.info("expected results dateTerms={}", dateTerms);

ensureSearchable();
}

public void testMinDocCountOnDateHistogram() throws Exception {
final SearchResponse allResponse = client().prepareSearch("idx")
.setSize(0)
.setQuery(QUERY)
.addAggregation(
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0)
)
.get();

final Histogram allHisto = allResponse.getAggregations().get("histo");
logger.info("allHisto={}", allHisto);
Map<String, Long> results = new HashMap<>();
allHisto.getBuckets().forEach(bucket ->
results.put(bucket.getKeyAsString(), bucket.getDocCount())
);

for (Map.Entry<String, Long> entry : expected.entrySet()) {
assertEquals(entry.getValue(), results.get(entry.getKey()));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,19 @@

import com.carrotsearch.randomizedtesting.generators.RandomStrings;

import org.opensearch.action.admin.indices.segments.IndexShardSegments;
import org.opensearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest;
import org.opensearch.action.admin.indices.segments.ShardSegments;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.engine.Segment;
import org.opensearch.index.fielddata.ScriptDocValues;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
Expand Down Expand Up @@ -433,6 +441,14 @@ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception {
}

private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception {

IndicesSegmentsRequest segmentReq = new IndicesSegmentsRequest("idx");
IndicesSegmentResponse segmentRes = client().admin().indices().segments(segmentReq).get();
XContentBuilder builder = XContentFactory.jsonBuilder();
segmentRes.toXContent(builder, ToXContent.EMPTY_PARAMS);
String jsonString = builder.toString();
logger.info("segmentRes={}", jsonString);

final SearchResponse allResponse = client().prepareSearch("idx")
.setSize(0)
.setQuery(QUERY)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,11 @@ public static boolean tryFastFilterAggregation(
if (!fastFilterContext.rewriteable) {
return false;
}
logger.debug(
"try fast filter on Shard {} segment {}",
fastFilterContext.context.indexShard().shardId(),
ctx.ord
);

NumericDocValues docCountValues = DocValues.getNumeric(ctx.reader(), DocCountFieldMapper.NAME);
if (docCountValues.nextDoc() != NO_MORE_DOCS) {
Expand All @@ -442,6 +447,11 @@ public static boolean tryFastFilterAggregation(
}
Weight[] filters = fastFilterContext.filters;
boolean filtersBuiltAtSegmentLevel = false;
logger.debug(
"Shard {} segment {} functionally match all documents",
fastFilterContext.context.indexShard().shardId(),
ctx.ord
);
if (filters == null) {
logger.debug(
"Shard {} segment {} functionally match all documents. Build the fast filter",
Expand Down Expand Up @@ -499,6 +509,12 @@ public static boolean tryFastFilterAggregation(

private static boolean segmentMatchAll(SearchContext ctx, LeafReaderContext leafCtx) throws IOException {
Weight weight = ctx.searcher().createWeight(ctx.query(), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (weight == null) {
return false;
}
int count = weight.count(leafCtx);
int numDocs = leafCtx.reader().numDocs();
logger.debug("Shard {} segment {} has {} count and {} num docs", ctx.indexShard().shardId(), leafCtx.ord, count, numDocs);
return weight != null && weight.count(leafCtx) == leafCtx.reader().numDocs();
}
}

0 comments on commit 70b9aad

Please sign in to comment.