Skip to content

Commit

Permalink
Merge branch 'opensearch-project:main' into dynamically-add-SearchReq…
Browse files Browse the repository at this point in the history
…uestOperationsListener
  • Loading branch information
ansjcy authored Jan 2, 2024
2 parents d3e8c10 + 63f4f13 commit b2deddf
Show file tree
Hide file tree
Showing 39 changed files with 733 additions and 214 deletions.
33 changes: 20 additions & 13 deletions CHANGELOG.md

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ dependencies {
api 'org.apache.commons:commons-compress:1.25.0'
api 'org.apache.ant:ant:1.10.14'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0'
api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0'
api 'com.netflix.nebula:nebula-publishing-plugin:21.0.0'
api 'com.netflix.nebula:gradle-info-plugin:12.1.6'
api 'org.apache.rat:apache-rat:0.15'
api 'commons-io:commons-io:2.15.1'
Expand All @@ -115,7 +115,7 @@ dependencies {
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
api 'de.thetaphi:forbiddenapis:3.6'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.5'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
api 'org.apache.maven:maven-model:3.9.6'
api 'com.networknt:json-schema-validator:1.0.86'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ repositories {
}

dependencies {
implementation "org.apache.logging.log4j:log4j-core:2.22.0"
implementation "org.apache.logging.log4j:log4j-core:2.22.1"
}

["0.0.1", "0.0.2"].forEach { v ->
Expand Down
39 changes: 0 additions & 39 deletions libs/core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -36,45 +36,6 @@ base {
archivesName = 'opensearch-core'
}

// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs
if (!isEclipse) {
sourceSets {
java11 {
java {
srcDirs = ['src/main/java11']
}
}
}

configurations {
java11Compile.extendsFrom(compile)
}

dependencies {
java11Implementation sourceSets.main.output
}

compileJava11Java {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}

forbiddenApisJava11 {
if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) {
targetCompatibility = JavaVersion.VERSION_11
}
replaceSignatureFiles 'jdk-signatures'
}

jar {
metaInf {
into 'versions/11'
from sourceSets.java11.output
}
manifest.attributes('Multi-Release': 'true')
}
}

dependencies {
api project(':libs:opensearch-common')

Expand Down
4 changes: 2 additions & 2 deletions modules/ingest-geoip/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ opensearchplugin {
}

dependencies {
api('com.maxmind.geoip2:geoip2:4.1.0')
api('com.maxmind.geoip2:geoip2:4.2.0')
// geoip2 dependencies:
api('com.maxmind.db:maxmind-db:3.0.0')
api('com.maxmind.db:maxmind-db:3.1.0')
api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}")

Expand Down
1 change: 0 additions & 1 deletion modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1

This file was deleted.

1 change: 1 addition & 0 deletions modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
78ff932dc13ac41dd1f0fd9e7405a7f4ad815ce0
1 change: 0 additions & 1 deletion modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1

This file was deleted.

1 change: 1 addition & 0 deletions modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2008992ab45d61c7b28a18678b5df82272529da3
2 changes: 1 addition & 1 deletion plugins/ingest-attachment/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ dependencies {
runtimeOnly "com.google.guava:guava:${versions.guava}"
// Other dependencies
api 'org.tukaani:xz:1.9'
api 'commons-io:commons-io:2.15.0'
api 'commons-io:commons-io:2.15.1'
api "org.slf4j:slf4j-api:${versions.slf4j}"

// character set detection
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f11560da189ab563a5c8e351941415430e9304ea
4 changes: 2 additions & 2 deletions plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,13 @@ dependencies {
api 'com.google.code.gson:gson:2.10.1'
runtimeOnly "com.google.guava:guava:${versions.guava}"
api "commons-logging:commons-logging:${versions.commonslogging}"
api 'commons-cli:commons-cli:1.5.0'
api 'commons-cli:commons-cli:1.6.0'
api "commons-codec:commons-codec:${versions.commonscodec}"
api 'commons-collections:commons-collections:3.2.2'
api "org.apache.commons:commons-compress:${versions.commonscompress}"
api 'org.apache.commons:commons-configuration2:2.9.0'
api 'commons-io:commons-io:2.14.0'
api 'org.apache.commons:commons-lang3:3.13.0'
api 'org.apache.commons:commons-lang3:3.14.0'
implementation 'com.google.re2j:re2j:1.7'
api 'javax.servlet:servlet-api:2.5'
api "org.slf4j:slf4j-api:${versions.slf4j}"
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
38166a23afb5bd5520f739b87b3be87f7f0fb96d

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
1ed471194b02f2c6cb734a0cd6f6f107c673afae
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
# number_of_shards for the target index.

- skip:
version: " - 2.99.99"
reason: "only available in 3.0+"
version: " - 2.4.99"
reason: "max_shard_size was introduced in 2.5.0"
features: allowed_warnings

- do:
Expand Down
39 changes: 0 additions & 39 deletions server/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,45 +57,6 @@ sourceSets {
}
}
}
// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs
if (!isEclipse) {
sourceSets {
java11 {
java {
srcDirs = ['src/main/java11']
}
}
}

configurations {
java11Implementation.extendsFrom(api)
}

dependencies {
java11Implementation sourceSets.main.output
}

compileJava11Java {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}

tasks.named('forbiddenApisJava11').configure {
doFirst {
if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) {
targetCompatibility = JavaVersion.VERSION_11
}
}
}

jar {
metaInf {
into 'versions/11'
from sourceSets.java11.output
}
manifest.attributes('Multi-Release': 'true')
}
}

dependencies {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,26 @@

import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.opensearch.action.admin.indices.shrink.ResizeType;
import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.index.Index;
import org.opensearch.index.IndexModule;
import org.opensearch.indices.IndicesService;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;

import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE;
import static org.opensearch.indices.IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING;
import static org.opensearch.indices.IndicesService.CLUSTER_SETTING_REPLICATION_TYPE;
import static org.hamcrest.Matchers.hasSize;

@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
public class SegmentReplicationClusterSettingIT extends OpenSearchIntegTestCase {
Expand All @@ -29,6 +39,9 @@ public class SegmentReplicationClusterSettingIT extends OpenSearchIntegTestCase
protected static final int SHARD_COUNT = 1;
protected static final int REPLICA_COUNT = 1;

protected static final String REPLICATION_MISMATCH_VALIDATION_ERROR =
"Validation Failed: 1: index setting [index.replication.type] is not allowed to be set as [cluster.index.restrict.replication.type=true];";

@Override
public Settings indexSettings() {
return Settings.builder()
Expand All @@ -44,14 +57,6 @@ protected boolean addMockInternalEngine() {
return false;
}

@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
.build();
}

public void testIndexReplicationSettingOverridesSegRepClusterSetting() throws Exception {
Settings settings = Settings.builder().put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build();
final String ANOTHER_INDEX = "test-index";
Expand Down Expand Up @@ -123,4 +128,125 @@ public void testIndexReplicationSettingOverridesDocRepClusterSetting() throws Ex
assertEquals(indicesService.indexService(anotherIndex).getIndexSettings().isSegRepEnabled(), false);
}

public void testReplicationTypesOverrideNotAllowed_IndexAPI() {
// Generate mutually exclusive replication strategies at cluster and index level
List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList();
ReplicationType clusterLevelReplication = replicationStrategies.get(0);
ReplicationType indexLevelReplication = replicationStrategies.get(1);
Settings nodeSettings = Settings.builder()
.put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication)
.put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true)
.build();
internalCluster().startClusterManagerOnlyNode(nodeSettings);
internalCluster().startDataOnlyNode(nodeSettings);
Settings indexSettings = Settings.builder().put(indexSettings()).put(SETTING_REPLICATION_TYPE, indexLevelReplication).build();
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> createIndex(INDEX_NAME, indexSettings));
assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage());
}

public void testReplicationTypesOverrideNotAllowed_WithTemplates() {
// Generate mutually exclusive replication strategies at cluster and index level
List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList();
ReplicationType clusterLevelReplication = replicationStrategies.get(0);
ReplicationType templateReplicationType = replicationStrategies.get(1);
Settings nodeSettings = Settings.builder()
.put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication)
.put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true)
.build();
internalCluster().startClusterManagerOnlyNode(nodeSettings);
internalCluster().startDataOnlyNode(nodeSettings);
internalCluster().startDataOnlyNode(nodeSettings);
logger.info(
"--> Create index with template replication {} and cluster level replication {}",
templateReplicationType,
clusterLevelReplication
);
// Create index template
client().admin()
.indices()
.preparePutTemplate("template_1")
.setPatterns(Collections.singletonList("test-idx*"))
.setSettings(Settings.builder().put(indexSettings()).put(SETTING_REPLICATION_TYPE, templateReplicationType).build())
.setOrder(0)
.get();

GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get();
assertThat(response.getIndexTemplates(), hasSize(1));
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> createIndex(INDEX_NAME));
assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage());
}

public void testReplicationTypesOverrideNotAllowed_WithResizeAction() {
// Generate mutually exclusive replication strategies at cluster and index level
List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList();
ReplicationType clusterLevelReplication = replicationStrategies.get(0);
ReplicationType indexLevelReplication = replicationStrategies.get(1);
Settings nodeSettings = Settings.builder()
.put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication)
.put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true)
.build();
internalCluster().startClusterManagerOnlyNode(nodeSettings);
internalCluster().startDataOnlyNode(nodeSettings);
internalCluster().startDataOnlyNode(nodeSettings);
logger.info(
"--> Create index with index level replication {} and cluster level replication {}",
indexLevelReplication,
clusterLevelReplication
);

// Define resize action and target shard count.
List<Tuple<ResizeType, Integer>> resizeActionsList = new ArrayList<>();
final int initialShardCount = 2;
resizeActionsList.add(new Tuple<>(ResizeType.SPLIT, 2 * initialShardCount));
resizeActionsList.add(new Tuple<>(ResizeType.SHRINK, SHARD_COUNT));
resizeActionsList.add(new Tuple<>(ResizeType.CLONE, initialShardCount));

Tuple<ResizeType, Integer> resizeActionTuple = resizeActionsList.get(random().nextInt(resizeActionsList.size()));
final String targetIndexName = resizeActionTuple.v1().name().toLowerCase(Locale.ROOT) + "-target";

logger.info("--> Performing resize action {} with shard count {}", resizeActionTuple.v1(), resizeActionTuple.v2());

Settings indexSettings = Settings.builder()
.put(indexSettings())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, initialShardCount)
.put(SETTING_REPLICATION_TYPE, clusterLevelReplication)
.build();
createIndex(INDEX_NAME, indexSettings);

// Block writes
client().admin().indices().prepareUpdateSettings(INDEX_NAME).setSettings(Settings.builder().put("index.blocks.write", true)).get();
ensureGreen();

// Validate resize action fails
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> client().admin()
.indices()
.prepareResizeIndex(INDEX_NAME, targetIndexName)
.setResizeType(resizeActionTuple.v1())
.setSettings(
Settings.builder()
.put("index.number_of_replicas", 0)
.put("index.number_of_shards", resizeActionTuple.v2())
.putNull("index.blocks.write")
.put(SETTING_REPLICATION_TYPE, indexLevelReplication)
.build()
)
.get()
);
assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage());
}

/**
* Generate a list of ReplicationType with random ordering
*
* @return List of ReplicationType values
*/
private List<ReplicationType> getRandomReplicationTypesAsList() {
List<ReplicationType> replicationStrategies = List.of(ReplicationType.SEGMENT, ReplicationType.DOCUMENT);
int randomReplicationIndex = random().nextInt(replicationStrategies.size());
ReplicationType clusterLevelReplication = replicationStrategies.get(randomReplicationIndex);
ReplicationType indexLevelReplication = replicationStrategies.get(1 - randomReplicationIndex);
return List.of(clusterLevelReplication, indexLevelReplication);
}
}
Loading

0 comments on commit b2deddf

Please sign in to comment.