Skip to content

Commit

Permalink
Enable Fuzzy codec for doc id fields using a bloom filter
Browse files Browse the repository at this point in the history
Signed-off-by: mgodwan <[email protected]>
  • Loading branch information
mgodwan committed Jan 25, 2024
1 parent b4306b6 commit 6287359
Show file tree
Hide file tree
Showing 21 changed files with 1,459 additions and 2 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563))
- Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876))
- New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465))
- Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022))

### Dependencies
- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.benchmark.index.codec.fuzzy;

import org.apache.lucene.util.BytesRef;
import org.opensearch.common.UUIDs;
import org.opensearch.index.codec.fuzzy.FuzzySet;
import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
import org.opensearch.index.mapper.IdFieldMapper;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;

import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

@Fork(3)
@Warmup(iterations = 2)
@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class FilterConstructionBenchmark {

private List<BytesRef> items;

@Param({ "1000000", "10000000", "50000000" })
private int numIds;

@Param({ "0.0511", "0.1023", "0.2047" })
private double fpp;

private FuzzySetFactory fuzzySetFactory;
private String fieldName;

@Setup
public void setupIds() {
this.fieldName = IdFieldMapper.NAME;
this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters));
}

@Benchmark
public FuzzySet buildFilter() throws IOException {
return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.benchmark.index.codec.fuzzy;

import org.apache.lucene.util.BytesRef;
import org.opensearch.common.UUIDs;
import org.opensearch.index.codec.fuzzy.FuzzySet;
import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
import org.opensearch.index.mapper.IdFieldMapper;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;

import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

@Fork(3)
@Warmup(iterations = 2)
@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class FilterLookupBenchmark {

@Param({ "50000000", "1000000" })
private int numItems;

@Param({ "1000000" })
private int searchKeyCount;

@Param({ "0.0511", "0.1023", "0.2047" })
private double fpp;

private FuzzySet fuzzySet;
private List<BytesRef> items;
private Random random = new Random();

@Setup
public void setupFilter() throws IOException {
String fieldName = IdFieldMapper.NAME;
items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator());
}

@Benchmark
public void contains_withExistingKeys(Blackhole blackhole) throws IOException {
for (int i = 0; i < searchKeyCount; i++) {
blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE);
}
}

@Benchmark
public void contains_withRandomKeys(Blackhole blackhole) throws IOException {
for (int i = 0; i < searchKeyCount; i++) {
blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID())));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@
import org.opensearch.common.Booleans;
import org.opensearch.common.io.Streams;
import org.opensearch.common.settings.Settings;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.codec.CodecService;
import org.opensearch.index.engine.EngineConfig;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.rest.yaml.ObjectPath;

import java.io.IOException;
Expand Down Expand Up @@ -345,6 +345,92 @@ public void testIndexingWithSegRep() throws Exception {
}
}

public void testIndexingWithFuzzyFilterPostings() throws Exception {
if (UPGRADE_FROM_VERSION.onOrBefore(Version.V_2_11_1)) {
logger.info("--> Skip test for version {} where fuzzy filter postings format feature is not available", UPGRADE_FROM_VERSION);
return;
}
final String indexName = "test-index-fuzzy-set";
final int shardCount = 3;
final int replicaCount = 1;
logger.info("--> Case {}", CLUSTER_TYPE);
printClusterNodes();
logger.info("--> _cat/shards before test execution \n{}", EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/shards?v")).getEntity()));
switch (CLUSTER_TYPE) {
case OLD:
Settings.Builder settings = Settings.builder()
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), shardCount)
.put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), replicaCount)
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
.put(
EngineConfig.INDEX_CODEC_SETTING.getKey(),
randomFrom(new ArrayList<>(CODECS) {
{
add(CodecService.LUCENE_DEFAULT_CODEC);
}
})
)
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms");
createIndex(indexName, settings.build());
waitForClusterHealthWithNoShardMigration(indexName, "green");
bulk(indexName, "_OLD", 5);
break;
case MIXED:
waitForClusterHealthWithNoShardMigration(indexName, "yellow");
break;
case UPGRADED:
Settings.Builder settingsBuilder = Settings.builder()
.put(IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING.getKey(), true);
updateIndexSettings(indexName, settingsBuilder);
waitForClusterHealthWithNoShardMigration(indexName, "green");
break;
default:
throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]");
}

int expectedCount;
switch (CLUSTER_TYPE) {
case OLD:
expectedCount = 5;
break;
case MIXED:
if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) {
expectedCount = 5;
} else {
expectedCount = 10;
}
break;
case UPGRADED:
expectedCount = 15;
break;
default:
throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]");
}

waitForSearchableDocs(indexName, shardCount, replicaCount);
assertCount(indexName, expectedCount);

if (CLUSTER_TYPE != ClusterType.OLD) {
bulk(indexName, "_" + CLUSTER_TYPE, 5);
logger.info("--> Index one doc (to be deleted next) and verify doc count");
Request toBeDeleted = new Request("PUT", "/" + indexName + "/_doc/to_be_deleted");
toBeDeleted.addParameter("refresh", "true");
toBeDeleted.setJsonEntity("{\"f1\": \"delete-me\"}");
client().performRequest(toBeDeleted);
waitForSearchableDocs(indexName, shardCount, replicaCount);
assertCount(indexName, expectedCount + 6);

logger.info("--> Delete previously added doc and verify doc count");
Request delete = new Request("DELETE", "/" + indexName + "/_doc/to_be_deleted");
delete.addParameter("refresh", "true");
client().performRequest(delete);
waitForSearchableDocs(indexName, shardCount, replicaCount);
assertCount(indexName, expectedCount + 5);

//forceMergeAndVerify(indexName, shardCount * (1 + replicaCount));
}
}

public void testAutoIdWithOpTypeCreate() throws IOException {
final String indexName = "auto_id_and_op_type_create_index";
StringBuilder b = new StringBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,9 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING,
IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING,

IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING,
IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING,

// validate that built-in similarities don't get redefined
Setting.groupSetting("index.similarity.", (s) -> {
Map<String, Settings> groups = s.getAsGroups();
Expand Down
52 changes: 52 additions & 0 deletions server/src/main/java/org/opensearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@

import static org.opensearch.Version.V_2_7_0;
import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY;
import static org.opensearch.index.codec.fuzzy.FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING;
import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING;
Expand Down Expand Up @@ -658,6 +659,22 @@ public static IndexMergePolicy fromString(String text) {
Property.Dynamic
);

public static final Setting<Boolean> INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING = Setting.boolSetting(
"index.doc_id_fuzzy_set.enabled",
false,
Property.IndexScope,
Property.Dynamic
);

public static final Setting<Double> INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING = Setting.doubleSetting(
"index.doc_id_fuzzy_set.false_positive_probability",
DEFAULT_FALSE_POSITIVE_PROBABILITY,
0.01,
0.50,
Property.IndexScope,
Property.Dynamic
);

public static final TimeValue DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL = new TimeValue(650, TimeUnit.MILLISECONDS);
public static final TimeValue MINIMUM_REMOTE_TRANSLOG_BUFFER_INTERVAL = TimeValue.ZERO;
public static final Setting<TimeValue> INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING = Setting.timeSetting(
Expand Down Expand Up @@ -787,6 +804,16 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) {
*/
private volatile UnaryOperator<MergePolicy> mergeOnFlushPolicy;

/**
* Is fuzzy set enabled for doc id
*/
private volatile boolean enableFuzzySetForDocId;

/**
* False positive probability to use while creating fuzzy set.
*/
private volatile double docIdFuzzySetFalsePositiveProbability;

/**
* Returns the default search fields for this index.
*/
Expand Down Expand Up @@ -926,6 +953,10 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
* Now this sortField (IndexSort) is stored in SegmentInfo and we need to maintain backward compatibility for them.
*/
widenIndexSortType = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).before(V_2_7_0);

enableFuzzySetForDocId = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING);
docIdFuzzySetFalsePositiveProbability = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING);

scopedSettings.addSettingsUpdateConsumer(
TieredMergePolicyProvider.INDEX_COMPOUND_FORMAT_SETTING,
tieredMergePolicyProvider::setNoCFSRatio
Expand Down Expand Up @@ -1032,6 +1063,11 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti
this::setRemoteTranslogUploadBufferInterval
);
scopedSettings.addSettingsUpdateConsumer(INDEX_REMOTE_TRANSLOG_KEEP_EXTRA_GEN_SETTING, this::setRemoteTranslogKeepExtraGen);
scopedSettings.addSettingsUpdateConsumer(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING, this::setEnableFuzzySetForDocId);
scopedSettings.addSettingsUpdateConsumer(
INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING,
this::setDocIdFuzzySetFalsePositiveProbability
);
}

private void setSearchIdleAfter(TimeValue searchIdleAfter) {
Expand Down Expand Up @@ -1801,4 +1837,20 @@ public void setDefaultSearchPipeline(String defaultSearchPipeline) {
public boolean shouldWidenIndexSortType() {
return this.widenIndexSortType;
}

public boolean isEnableFuzzySetForDocId() {
return enableFuzzySetForDocId;
}

public void setEnableFuzzySetForDocId(boolean enableFuzzySetForDocId) {
this.enableFuzzySetForDocId = enableFuzzySetForDocId;
}

Check warning on line 1847 in server/src/main/java/org/opensearch/index/IndexSettings.java

View check run for this annotation

Codecov / codecov/patch

server/src/main/java/org/opensearch/index/IndexSettings.java#L1846-L1847

Added lines #L1846 - L1847 were not covered by tests

public double getDocIdFuzzySetFalsePositiveProbability() {
return docIdFuzzySetFalsePositiveProbability;
}

public void setDocIdFuzzySetFalsePositiveProbability(double docIdFuzzySetFalsePositiveProbability) {
this.docIdFuzzySetFalsePositiveProbability = docIdFuzzySetFalsePositiveProbability;
}

Check warning on line 1855 in server/src/main/java/org/opensearch/index/IndexSettings.java

View check run for this annotation

Codecov / codecov/patch

server/src/main/java/org/opensearch/index/IndexSettings.java#L1854-L1855

Added lines #L1854 - L1855 were not covered by tests
}
Loading

0 comments on commit 6287359

Please sign in to comment.