Skip to content

Commit

Permalink
Updating terms query to use TermsInSetQuery
Browse files Browse the repository at this point in the history
Signed-off-by: Harsha Vamsi Kalluri <[email protected]>
  • Loading branch information
harshavamsi committed Mar 20, 2024
1 parent 958957b commit ead6713
Show file tree
Hide file tree
Showing 16 changed files with 267 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,13 @@ public void onRemoval(RemovalNotification<K, V> notification) {
.setValueType(builder.cacheConfig.getValueType())
.setSettings(builder.cacheConfig.getSettings())
.setWeigher(builder.cacheConfig.getWeigher())
<<<<<<< HEAD
.setMaxSizeInBytes(builder.cacheConfig.getMaxSizeInBytes())
.setExpireAfterAccess(builder.cacheConfig.getExpireAfterAccess())
=======
.setMaxSizeInBytes(builder.cacheConfig.getMaxSizeInBytes()) // TODO: Part of a workaround for an issue in TSC. Overall fix
// coming soon
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
.build(),
builder.cacheType,
builder.cacheFactories
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,17 @@ public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception
.setWeigher((k, v) -> keyValueSize)
.setRemovalListener(removalListener)
.setSettings(settings)
<<<<<<< HEAD
.setCachedResultParser(s -> new CachedQueryResult.PolicyValues(20_000_000L)) // Values will always appear to have taken
// 20_000_000 ns = 20 ms to compute
=======
.setCachedResultParser(new Function<String, CachedQueryResult.PolicyValues>() {
@Override
public CachedQueryResult.PolicyValues apply(String s) {
return new CachedQueryResult.PolicyValues(20_000_000L);
}
}) // Values will always appear to have taken 20_000_000 ns = 20 ms to compute
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
.build(),
CacheType.INDICES_REQUEST_CACHE,
Map.of(
Expand All @@ -152,10 +161,18 @@ public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception
LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader();
tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader);
}
<<<<<<< HEAD
// Verify on heap cache size.
assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count());
// Verify disk cache size.
assertEquals(numOfItems1 - onHeapCacheSize, tieredSpilloverCache.getDiskCache().count());
=======
tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add);
tieredSpilloverCache.getDiskCache().keys().forEach(diskTierKeys::add);

assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size());
assertEquals(tieredSpilloverCache.getDiskCache().count(), diskTierKeys.size());
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
}

public void testWithFactoryCreationWithOnHeapCacheNotPresent() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,15 @@
import org.opensearch.common.util.io.IOUtils;

import java.io.File;
<<<<<<< HEAD
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
=======
import java.nio.ByteBuffer;
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -557,6 +557,7 @@ public void testEhcacheKeyIteratorWithRemove() throws IOException {

}

<<<<<<< HEAD
public void testInvalidateAll() throws Exception {
Settings settings = Settings.builder().build();
MockRemovalListener<String, String> removalListener = new MockRemovalListener<>();
Expand Down Expand Up @@ -592,6 +593,8 @@ public void testInvalidateAll() throws Exception {
}
}

=======
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
public void testBasicGetAndPutBytesReference() throws Exception {
Settings settings = Settings.builder().build();
try (NodeEnvironment env = newNodeEnvironment(settings)) {
Expand Down Expand Up @@ -636,6 +639,7 @@ public void testBasicGetAndPutBytesReference() throws Exception {
}
}

<<<<<<< HEAD
public void testInvalidate() throws Exception {
Settings settings = Settings.builder().build();
MockRemovalListener<String, String> removalListener = new MockRemovalListener<>();
Expand Down Expand Up @@ -677,6 +681,8 @@ public void testInvalidate() throws Exception {
}
}

=======
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
private static String generateRandomString(int length) {
String characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
StringBuilder randomString = new StringBuilder(length);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,13 @@ public void testHistogram() throws Exception {
assertEquals(1.0, histogramPointData.getMin(), 1.0);
}

<<<<<<< HEAD
public void testGauge() throws Exception {
String metricName = "test-gauge";
=======
public void testObservableGauge() throws Exception {
String metricName = "test-observable-gauge";
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
MetricsRegistry metricsRegistry = internalCluster().getInstance(MetricsRegistry.class);
InMemorySingletonMetricsExporter.INSTANCE.reset();
Tags tags = Tags.create().addTag("test", "integ-test");
Expand All @@ -137,7 +142,11 @@ public void testGauge() throws Exception {

InMemorySingletonMetricsExporter exporter = InMemorySingletonMetricsExporter.INSTANCE;

<<<<<<< HEAD
assertTrue(getMaxObservableGaugeValue(exporter, metricName) >= 2.0);
=======
assertEquals(2.0, getMaxObservableGaugeValue(exporter, metricName), 0.0);
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
gaugeCloseable.close();
double observableGaugeValueAfterStop = getMaxObservableGaugeValue(exporter, metricName);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
wait_for_completion: true
task_id: $taskId
- match: { task.action: "indices:admin/forcemerge" }
- match: { task.description: "Force-merge indices [test_index], maxSegments[1], onlyExpungeDeletes[false], flush[true]" }
- match: { task.description: "Force-merge indices [test_index], maxSegments[1], onlyExpungeDeletes[false], flush[true], primaryOnly[false]" }

---
"Force merge index with wait_for_completion after task description changed":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,17 @@

- match: { hits.total: 2 }

- do:
search:
rest_total_hits_as_int: true
index: test-iodvq
body:
query:
terms:
boolean: [true, false]

- match: { hits.total: 3 }

- do:
search:
rest_total_hits_as_int: true
Expand Down Expand Up @@ -665,13 +676,13 @@
- match: {hits.total: 1}

- do:
search:
rest_total_hits_as_int: true
index: test-index
body:
query:
term:
boolean: true
search:
rest_total_hits_as_int: true
index: test-index
body:
query:
term:
boolean: true

- match: { hits.total: 2 }

Expand Down Expand Up @@ -775,6 +786,17 @@

- match: { hits.total: 2 }

- do:
search:
rest_total_hits_as_int: true
index: test-index
body:
query:
terms:
boolean: [true, false]

- match: { hits.total: 3 }

- do:
search:
rest_total_hits_as_int: true
Expand Down Expand Up @@ -1235,6 +1257,17 @@

- match: { hits.total: 2 }

- do:
search:
rest_total_hits_as_int: true
index: test-doc-values
body:
query:
terms:
boolean: [true, false]

- match: { hits.total: 3 }

- do:
search:
rest_total_hits_as_int: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -926,6 +926,33 @@ private void prepareIndices(String[] indices, int numberOfPrimaryShards, int num
private TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch prepareAndSendRequest(
String[] indices,
DiscoveryNode[] nodes
<<<<<<< HEAD
=======
) {
Map<ShardId, ShardAttributes> shardAttributesMap = null;
prepareIndices(indices, 1, 1);
shardAttributesMap = prepareRequestMap(indices, 1);
TransportNodesListShardStoreMetadataBatch.NodesStoreFilesMetadataBatch response;
return ActionTestUtils.executeBlocking(
internalCluster().getInstance(TransportNodesListShardStoreMetadataBatch.class),
new TransportNodesListShardStoreMetadataBatch.Request(shardAttributesMap, nodes)
);
}

private void assertNodeStoreFilesMetadataSuccessCase(
TransportNodesListShardStoreMetadataBatch.NodeStoreFilesMetadata nodeStoreFilesMetadata,
ShardId shardId
) {
assertNull(nodeStoreFilesMetadata.getStoreFileFetchException());
TransportNodesListShardStoreMetadataHelper.StoreFilesMetadata storeFileMetadata = nodeStoreFilesMetadata.storeFilesMetadata();
assertFalse(storeFileMetadata.isEmpty());
assertEquals(shardId, storeFileMetadata.shardId());
assertNotNull(storeFileMetadata.peerRecoveryRetentionLeases());
}

private void assertNodeGatewayStartedShardsHappyCase(
TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
) {
Map<ShardId, ShardAttributes> shardAttributesMap = null;
prepareIndices(indices, 1, 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,11 @@ public ForceMergeRequest(StreamInput in) throws IOException {
maxNumSegments = in.readInt();
onlyExpungeDeletes = in.readBoolean();
flush = in.readBoolean();
<<<<<<< HEAD
if (in.getVersion().onOrAfter(Version.V_2_13_0)) {
=======
if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
primaryOnly = in.readBoolean();
}
if (in.getVersion().onOrAfter(FORCE_MERGE_UUID_VERSION)) {
Expand Down Expand Up @@ -219,7 +223,11 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeInt(maxNumSegments);
out.writeBoolean(onlyExpungeDeletes);
out.writeBoolean(flush);
<<<<<<< HEAD
if (out.getVersion().onOrAfter(Version.V_2_13_0)) {
=======
if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
out.writeBoolean(primaryOnly);
}
if (out.getVersion().onOrAfter(FORCE_MERGE_UUID_VERSION)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ public BytesReferenceSerializer() {}

@Override
public byte[] serialize(BytesReference object) {
<<<<<<< HEAD
return BytesReference.toBytesWithoutCompact(object);
=======
return BytesReference.toBytes(object);
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -716,8 +716,11 @@ public void apply(Settings value, Settings current, Settings previous) {
IoBasedAdmissionControllerSettings.SEARCH_IO_USAGE_LIMIT,
IoBasedAdmissionControllerSettings.INDEXING_IO_USAGE_LIMIT,
IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING,
<<<<<<< HEAD
IndicesService.CLUSTER_REMOTE_STORE_PATH_PREFIX_TYPE_SETTING,

=======
>>>>>>> 7cebf44dd20 (Updating terms query to use TermsInSetQuery)
// Concurrent segment search settings
SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING,
SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING
Expand Down
Loading

0 comments on commit ead6713

Please sign in to comment.