diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 3697750dab97a..923c82028cd1b 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v2.1.0 + uses: lycheeverse/lychee-action@v2.2.0 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/CHANGELOG.md b/CHANGELOG.md index 5029909a25fcf..45bc56b505eb3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,7 +21,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Support prefix list for remote repository attributes([#16271](https://github.com/opensearch-project/OpenSearch/pull/16271)) - Add new configuration setting `synonym_analyzer`, to the `synonym` and `synonym_graph` filters, enabling the specification of a custom analyzer for reading the synonym file ([#16488](https://github.com/opensearch-project/OpenSearch/pull/16488)). - Add stats for remote publication failure and move download failure stats to remote methods([#16682](https://github.com/opensearch-project/OpenSearch/pull/16682/)) +- Update script supports java.lang.String.sha1() and java.lang.String.sha256() methods ([#16923](https://github.com/opensearch-project/OpenSearch/pull/16923)) - Added a precaution to handle extreme date values during sorting to prevent `arithmetic_exception: long overflow` ([#16812](https://github.com/opensearch-project/OpenSearch/pull/16812)). +- Add search replica stats to segment replication stats API ([#16678](https://github.com/opensearch-project/OpenSearch/pull/16678)) +- Introduce a setting to disable download of full cluster state from remote on term mismatch([#16798](https://github.com/opensearch-project/OpenSearch/pull/16798/)) +- Added ability to retrieve value from DocValues in a flat_object filed([#16802](https://github.com/opensearch-project/OpenSearch/pull/16802)) +- Introduce framework for auxiliary transports and an experimental gRPC transport plugin ([#16534](https://github.com/opensearch-project/OpenSearch/pull/16534)) ### Dependencies - Bump `com.google.cloud:google-cloud-core-http` from 2.23.0 to 2.47.0 ([#16504](https://github.com/opensearch-project/OpenSearch/pull/16504)) @@ -30,18 +35,26 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.apis:google-api-services-compute` from v1-rev20240407-2.0.0 to v1-rev20241105-2.0.0 ([#16502](https://github.com/opensearch-project/OpenSearch/pull/16502), [#16548](https://github.com/opensearch-project/OpenSearch/pull/16548), [#16613](https://github.com/opensearch-project/OpenSearch/pull/16613)) - Bump `com.azure:azure-storage-blob` from 12.23.0 to 12.28.1 ([#16501](https://github.com/opensearch-project/OpenSearch/pull/16501)) - Bump `org.apache.hadoop:hadoop-minicluster` from 3.4.0 to 3.4.1 ([#16550](https://github.com/opensearch-project/OpenSearch/pull/16550)) -- Bump `org.apache.xmlbeans:xmlbeans` from 5.2.1 to 5.2.2 ([#16612](https://github.com/opensearch-project/OpenSearch/pull/16612)) +- Bump `org.apache.xmlbeans:xmlbeans` from 5.2.1 to 5.3.0 ([#16612](https://github.com/opensearch-project/OpenSearch/pull/16612), [#16854](https://github.com/opensearch-project/OpenSearch/pull/16854)) - Bump `com.nimbusds:nimbus-jose-jwt` from 9.41.1 to 9.47 ([#16611](https://github.com/opensearch-project/OpenSearch/pull/16611), [#16807](https://github.com/opensearch-project/OpenSearch/pull/16807)) -- Bump `lycheeverse/lychee-action` from 2.0.2 to 2.1.0 ([#16610](https://github.com/opensearch-project/OpenSearch/pull/16610)) +- Bump `lycheeverse/lychee-action` from 2.0.2 to 2.2.0 ([#16610](https://github.com/opensearch-project/OpenSearch/pull/16610), [#16897](https://github.com/opensearch-project/OpenSearch/pull/16897)) - Bump `me.champeau.gradle.japicmp` from 0.4.4 to 0.4.5 ([#16614](https://github.com/opensearch-project/OpenSearch/pull/16614)) - Bump `mockito` from 5.14.1 to 5.14.2, `objenesis` from 3.2 to 3.3 and `bytebuddy` from 1.15.4 to 1.15.10 ([#16655](https://github.com/opensearch-project/OpenSearch/pull/16655)) - Bump `Netty` from 4.1.114.Final to 4.1.115.Final ([#16661](https://github.com/opensearch-project/OpenSearch/pull/16661)) - Bump `org.xerial.snappy:snappy-java` from 1.1.10.6 to 1.1.10.7 ([#16665](https://github.com/opensearch-project/OpenSearch/pull/16665)) - Bump `codecov/codecov-action` from 4 to 5 ([#16667](https://github.com/opensearch-project/OpenSearch/pull/16667)) -- Bump `org.apache.logging.log4j:log4j-core` from 2.24.1 to 2.24.2 ([#16718](https://github.com/opensearch-project/OpenSearch/pull/16718)) +- Bump `org.apache.logging.log4j:log4j-core` from 2.24.1 to 2.24.3 ([#16718](https://github.com/opensearch-project/OpenSearch/pull/16718), [#16858](https://github.com/opensearch-project/OpenSearch/pull/16858)) - Bump `jackson` from 2.17.2 to 2.18.2 ([#16733](https://github.com/opensearch-project/OpenSearch/pull/16733)) -- Bump `ch.qos.logback:logback-classic` from 1.2.13 to 1.5.12 ([#16716](https://github.com/opensearch-project/OpenSearch/pull/16716)) +- Bump `ch.qos.logback:logback-classic` from 1.2.13 to 1.5.15 ([#16716](https://github.com/opensearch-project/OpenSearch/pull/16716), [#16898](https://github.com/opensearch-project/OpenSearch/pull/16898)) - Bump `com.azure:azure-identity` from 1.13.2 to 1.14.2 ([#16778](https://github.com/opensearch-project/OpenSearch/pull/16778)) +- Bump Apache Lucene from 9.12.0 to 9.12.1 ([#16846](https://github.com/opensearch-project/OpenSearch/pull/16846)) +- Bump `com.gradle.develocity` from 3.18.2 to 3.19 ([#16855](https://github.com/opensearch-project/OpenSearch/pull/16855)) +- Bump `org.jline:jline` from 3.27.1 to 3.28.0 ([#16857](https://github.com/opensearch-project/OpenSearch/pull/16857)) +- Bump `com.azure:azure-core` from 1.51.0 to 1.54.1 ([#16856](https://github.com/opensearch-project/OpenSearch/pull/16856)) +- Bump `com.nimbusds:oauth2-oidc-sdk` from 11.19.1 to 11.20.1 ([#16895](https://github.com/opensearch-project/OpenSearch/pull/16895)) +- Bump `com.netflix.nebula.ospackage-base` from 11.10.0 to 11.10.1 ([#16896](https://github.com/opensearch-project/OpenSearch/pull/16896)) +- Bump `com.microsoft.azure:msal4j` from 1.17.2 to 1.18.0 ([#16918](https://github.com/opensearch-project/OpenSearch/pull/16918)) +- Bump `org.apache.commons:commons-text` from 1.12.0 to 1.13.0 ([#16919](https://github.com/opensearch-project/OpenSearch/pull/16919)) ### Changed - Indexed IP field supports `terms_query` with more than 1025 IP masks [#16391](https://github.com/opensearch-project/OpenSearch/pull/16391) @@ -66,6 +79,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bound the size of cache in deprecation logger ([16702](https://github.com/opensearch-project/OpenSearch/issues/16702)) - Ensure consistency of system flag on IndexMetadata after diff is applied ([#16644](https://github.com/opensearch-project/OpenSearch/pull/16644)) - Skip remote-repositories validations for node-joins when RepositoriesService is not in sync with cluster-state ([#16763](https://github.com/opensearch-project/OpenSearch/pull/16763)) +- Fix _list/shards API failing when closed indices are present ([#16606](https://github.com/opensearch-project/OpenSearch/pull/16606)) +- Fix remote shards balance ([#15335](https://github.com/opensearch-project/OpenSearch/pull/15335)) ### Security diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java index a74781ac44720..6842f0e541abe 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java @@ -229,8 +229,7 @@ public Set getJarsToScan() { @TaskAction public void runThirdPartyAudit() throws IOException { Set jars = getJarsToScan(); - - extractJars(jars); + Set extractedJars = extractJars(jars); final String forbiddenApisOutput = runForbiddenAPIsCli(); @@ -248,7 +247,7 @@ public void runThirdPartyAudit() throws IOException { Set jdkJarHellClasses = null; if (this.jarHellEnabled) { - jdkJarHellClasses = runJdkJarHellCheck(); + jdkJarHellClasses = runJdkJarHellCheck(extractedJars); } if (missingClassExcludes != null) { @@ -301,16 +300,26 @@ private void logForbiddenAPIsOutput(String forbiddenApisOutput) { getLogger().error("Forbidden APIs output:\n{}==end of forbidden APIs==", forbiddenApisOutput); } - private void extractJars(Set jars) { + /** + * Extract project jars to build directory as specified by getJarExpandDir. + * Handle multi release jars by keeping versions closest to `targetCompatibility` version. + * @param jars to extract to build dir + * @return File set of extracted jars + */ + private Set extractJars(Set jars) { + Set extractedJars = new TreeSet<>(); File jarExpandDir = getJarExpandDir(); // We need to clean up to make sure old dependencies don't linger getProject().delete(jarExpandDir); jars.forEach(jar -> { + String jarPrefix = jar.getName().replace(".jar", ""); + File jarSubDir = new File(jarExpandDir, jarPrefix); + extractedJars.add(jarSubDir); FileTree jarFiles = getProject().zipTree(jar); getProject().copy(spec -> { spec.from(jarFiles); - spec.into(jarExpandDir); + spec.into(jarSubDir); // exclude classes from multi release jars spec.exclude("META-INF/versions/**"); }); @@ -329,7 +338,7 @@ private void extractJars(Set jars) { Integer.parseInt(targetCompatibility.get().getMajorVersion()) ).forEach(majorVersion -> getProject().copy(spec -> { spec.from(getProject().zipTree(jar)); - spec.into(jarExpandDir); + spec.into(jarSubDir); String metaInfPrefix = "META-INF/versions/" + majorVersion; spec.include(metaInfPrefix + "/**"); // Drop the version specific prefix @@ -337,6 +346,8 @@ private void extractJars(Set jars) { spec.setIncludeEmptyDirs(false); })); }); + + return extractedJars; } private void assertNoJarHell(Set jdkJarHellClasses) { @@ -398,7 +409,12 @@ private String runForbiddenAPIsCli() throws IOException { return forbiddenApisOutput; } - private Set runJdkJarHellCheck() throws IOException { + /** + * Execute java with JDK_JAR_HELL_MAIN_CLASS against provided jars with OpenSearch core in the classpath. + * @param jars to scan for jarHell violations. + * @return standard out of jarHell process. + */ + private Set runJdkJarHellCheck(Set jars) throws IOException { ByteArrayOutputStream standardOut = new ByteArrayOutputStream(); InjectedExecOps execOps = getProject().getObjects().newInstance(InjectedExecOps.class); ExecResult execResult = execOps.getExecOps().javaexec(spec -> { @@ -407,9 +423,8 @@ private Set runJdkJarHellCheck() throws IOException { getRuntimeConfiguration(), getProject().getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME) ); - spec.getMainClass().set(JDK_JAR_HELL_MAIN_CLASS); - spec.args(getJarExpandDir()); + spec.args(jars); spec.setIgnoreExitValue(true); if (javaHome != null) { spec.setExecutable(javaHome + "/bin/java"); diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index 3db2a6e7c2733..83bec727b1502 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -17,7 +17,7 @@ repositories { } dependencies { - implementation "org.apache.logging.log4j:log4j-core:2.24.2" + implementation "org.apache.logging.log4j:log4j-core:2.24.3" } ["0.0.1", "0.0.2"].forEach { v -> diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 25af649bb4aed..e1fa4de5a0caa 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.10.0" + id "com.netflix.nebula.ospackage-base" version "11.10.1" } void addProcessFilesTask(String type, boolean jdk) { diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index be59e1d3a5ab6..f357fb248520c 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,6 +1,6 @@ [versions] opensearch = "3.0.0" -lucene = "9.12.0" +lucene = "9.12.1" bundled_jdk_vendor = "adoptium" bundled_jdk = "23.0.1+11" @@ -27,7 +27,7 @@ google_http_client = "1.44.1" google_auth = "1.29.0" tdigest = "3.3" hdrhistogram = "2.2.2" -grpc = "1.68.0" +grpc = "1.68.2" # when updating the JNA version, also update the version in buildSrc/build.gradle jna = "5.13.0" diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle index 751da941d25dd..5a98a60e806ea 100644 --- a/gradle/missing-javadoc.gradle +++ b/gradle/missing-javadoc.gradle @@ -170,7 +170,6 @@ configure([ project(":libs:opensearch-common"), project(":libs:opensearch-core"), project(":libs:opensearch-compress"), - project(":plugins:events-correlation-engine"), project(":server") ]) { project.tasks.withType(MissingJavadocTask) { diff --git a/libs/core/licenses/lucene-core-9.12.0.jar.sha1 b/libs/core/licenses/lucene-core-9.12.0.jar.sha1 deleted file mode 100644 index e55f896dedb63..0000000000000 --- a/libs/core/licenses/lucene-core-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fdb055d569bb20bfce9618fe2b01c29bab7f290c \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.12.1.jar.sha1 b/libs/core/licenses/lucene-core-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..2521c91a81d64 --- /dev/null +++ b/libs/core/licenses/lucene-core-9.12.1.jar.sha1 @@ -0,0 +1 @@ +91447c90c1180122142773b5baddaf8547124794 \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index ec0a18dbbf882..dd804fcc6db70 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -112,9 +112,9 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_17_1 = new Version(2170199, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_17_2 = new Version(2170299, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_2_18_1 = new Version(2180199, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_0); + public static final Version V_2_18_1 = new Version(2180199, org.apache.lucene.util.Version.LUCENE_9_12_1); + public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_1); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_1); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 deleted file mode 100644 index 476049a66cc08..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5ba843374a0aab3dfe0b11cb28b251844d85bf5b \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.12.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..9e0a5c2d7df21 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.12.1.jar.sha1 @@ -0,0 +1 @@ +667ee99f31c8e42eac70b0adcf8deb4232935430 \ No newline at end of file diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java index 55dc23f665d2e..b3f6f7d0730fd 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java @@ -66,6 +66,7 @@ import org.opensearch.script.ScriptContext; import org.opensearch.script.ScriptEngine; import org.opensearch.script.ScriptService; +import org.opensearch.script.UpdateScript; import org.opensearch.search.aggregations.pipeline.MovingFunctionScript; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; @@ -109,6 +110,11 @@ public final class PainlessModulePlugin extends Plugin implements ScriptPlugin, ingest.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.ingest.txt")); map.put(IngestScript.CONTEXT, ingest); + // Functions available to update scripts + List update = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); + update.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.update.txt")); + map.put(UpdateScript.CONTEXT, update); + // Functions available to derived fields List derived = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); derived.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.derived.txt")); diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt new file mode 100644 index 0000000000000..144614b3862b0 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.update.txt @@ -0,0 +1,14 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# + +# This file contains an allowlist for the update scripts + +class java.lang.String { + String org.opensearch.painless.api.Augmentation sha1() + String org.opensearch.painless.api.Augmentation sha256() +} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index cb118ed9d562f..e0f3068810ed8 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -123,3 +123,39 @@ - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Iterable object is self-referencing itself" } + +# update script supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Update script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: test_1 + id: 1 + body: + foo: bar + + - do: + update: + index: test_1 + id: 1 + body: + script: + lang: painless + source: "ctx._source.foo_sha1 = ctx._source.foo.sha1();ctx._source.foo_sha256 = ctx._source.foo.sha256();" + + - match: { _index: test_1 } + - match: { _id: "1" } + - match: { _version: 2 } + + - do: + get: + index: test_1 + id: 1 + + - match: { _source.foo: bar } + - match: { _source.foo_sha1: "62cdb7020ff920e5aa642c3d4066950dd1f01f4d" } + - match: { _source.foo_sha256: "fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml index 9c38b13bb1ff0..5c218aa00ca4f 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml @@ -440,3 +440,41 @@ lang: painless source: syntax errors are fun! - match: {error.reason: 'compile error'} + +# script in reindex supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: twitter + id: 1 + body: { "user": "foobar" } + - do: + indices.refresh: {} + + - do: + reindex: + refresh: true + body: + source: + index: twitter + dest: + index: new_twitter + script: + lang: painless + source: ctx._source.user_sha1 = ctx._source.user.sha1();ctx._source.user_sha256 = ctx._source.user.sha256() + - match: {created: 1} + - match: {noops: 0} + + - do: + get: + index: new_twitter + id: 1 + + - match: { _source.user: foobar } + - match: { _source.user_sha1: "8843d7f92416211de9ebb963ff4ce28125932878" } + - match: { _source.user_sha256: "c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml index a8de49d812677..b52b1428e08bb 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml @@ -432,3 +432,38 @@ lang: painless source: syntax errors are fun! - match: {error.reason: 'compile error'} + +# script in update_by_query supports java.lang.String.sha1() and java.lang.String.sha256() methods +# related issue: https://github.com/opensearch-project/OpenSearch/issues/16423 +--- +"Script supports sha1() and sha256() method for strings": + - skip: + version: " - 2.18.99" + reason: "introduced in 2.19.0" + - do: + index: + index: twitter + id: 1 + body: { "user": "foobar" } + - do: + indices.refresh: {} + + - do: + update_by_query: + index: twitter + refresh: true + body: + script: + lang: painless + source: ctx._source.user_sha1 = ctx._source.user.sha1();ctx._source.user_sha256 = ctx._source.user.sha256() + - match: {updated: 1} + - match: {noops: 0} + + - do: + get: + index: twitter + id: 1 + + - match: { _source.user: foobar } + - match: { _source.user_sha1: "8843d7f92416211de9ebb963ff4ce28125932878" } + - match: { _source.user_sha256: "c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2" } diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 deleted file mode 100644 index 31398b27708a3..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a9232b6a4882979118d3281b98dfdb6e0e1cb5ca \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..acb73de8b5dc9 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.1.jar.sha1 @@ -0,0 +1 @@ +abaef4767ad64289e62abdd4606bf6ed2ddea0fd \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 deleted file mode 100644 index fa4c9d2d09d6e..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3a6950ffc22e76a082e1b3cefb022b9f7870d29 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..916778086a6bd --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.1.jar.sha1 @@ -0,0 +1 @@ +635c41143b896f402589d29e33695dcfabae9cc5 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 deleted file mode 100644 index 576b924286d2d..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e71f85b72ed3939039ba8897b28b065dd11918b9 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..9c057370df5d1 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.1.jar.sha1 @@ -0,0 +1 @@ +e265410a6a4d9cd23b2e9c73321e6bd307bc1422 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 deleted file mode 100644 index c8c146bbd0d25..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6baa3ae7ab20d6e644cf0bedb271c50a44c0e259 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..30db9fc8d69e2 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.1.jar.sha1 @@ -0,0 +1 @@ +3787b8edc0cfad21998abc6aeb9d2cbf152b4b26 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 deleted file mode 100644 index 54ea0b19f2a7b..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f183e1e8b1eaaa4dec444774a285bb8b66518522 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..96f8d70e6ee53 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.1.jar.sha1 @@ -0,0 +1 @@ +e935f600bf153c46f5725198ca9352c32025f274 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 deleted file mode 100644 index 5442a40f5bba2..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b81a609934e65d12ab9d2d84bc2ea6f56a360e57 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..d6d5f1c2609ff --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.1.jar.sha1 @@ -0,0 +1 @@ +c4e1c94b1adbd1cb9dbdc0d3c2d2c33beabfc777 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 deleted file mode 100644 index 60fd4015cfde0..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bec069f286b45f20b743c81e84202369cd0467e7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..661f3062458e2 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.1.jar.sha1 @@ -0,0 +1 @@ +d8e4716dab6d829e7b37a8b185cbd242650aeb9e \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 deleted file mode 100644 index bf45716c5b8ce..0000000000000 --- a/plugins/discovery-gce/licenses/grpc-api-1.68.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/discovery-gce/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/events-correlation-engine/build.gradle b/plugins/events-correlation-engine/build.gradle deleted file mode 100644 index c3eff30012b1d..0000000000000 --- a/plugins/events-correlation-engine/build.gradle +++ /dev/null @@ -1,21 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -apply plugin: 'opensearch.java-rest-test' -apply plugin: 'opensearch.internal-cluster-test' - -opensearchplugin { - description 'OpenSearch Events Correlation Engine.' - classname 'org.opensearch.plugin.correlation.EventsCorrelationPlugin' -} - -dependencies { -} diff --git a/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java b/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java deleted file mode 100644 index 028848a91213e..0000000000000 --- a/plugins/events-correlation-engine/src/internalClusterTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTransportIT.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.apache.lucene.search.join.ScoreMode; -import org.opensearch.action.admin.cluster.node.info.NodeInfo; -import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.index.query.NestedQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationQuery; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.PluginInfo; -import org.opensearch.rest.RestRequest; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.OpenSearchIntegTestCase; -import org.junit.Assert; - -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * Transport Action tests for events-correlation-plugin - */ -public class EventsCorrelationPluginTransportIT extends OpenSearchIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(EventsCorrelationPlugin.class); - } - - /** - * test events-correlation-plugin is installed - */ - public void testPluginsAreInstalled() { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); - nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); - NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); - List pluginInfos = nodesInfoResponse.getNodes() - .stream() - .flatMap( - (Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() - ) - .collect(Collectors.toList()); - Assert.assertTrue( - pluginInfos.stream() - .anyMatch(pluginInfo -> pluginInfo.getName().equals("org.opensearch.plugin.correlation.EventsCorrelationPlugin")) - ); - } - - /** - * test creating a correlation rule - * @throws Exception Exception - */ - public void testCreatingACorrelationRule() throws Exception { - List correlationQueries = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", "@timestamp", List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule = new CorrelationRule("s3 to app logs", correlationQueries); - IndexCorrelationRuleRequest request = new IndexCorrelationRuleRequest(correlationRule, RestRequest.Method.POST); - - IndexCorrelationRuleResponse response = client().execute(IndexCorrelationRuleAction.INSTANCE, request).get(); - Assert.assertEquals(RestStatus.CREATED, response.getStatus()); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test filtering correlation rules - * @throws Exception Exception - */ - public void testFilteringCorrelationRules() throws Exception { - List correlationQueries1 = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", "@timestamp", List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule1 = new CorrelationRule("s3 to app logs", correlationQueries1); - IndexCorrelationRuleRequest request1 = new IndexCorrelationRuleRequest(correlationRule1, RestRequest.Method.POST); - client().execute(IndexCorrelationRuleAction.INSTANCE, request1).get(); - - List correlationQueries2 = Arrays.asList( - new CorrelationQuery("windows", "host.hostname:EC2AMAZ*", "@timestamp", List.of("windows")), - new CorrelationQuery("app_logs", "endpoint:/customer_records.txt", "@timestamp", List.of("others_application")) - ); - CorrelationRule correlationRule2 = new CorrelationRule("windows to app logs", correlationQueries2); - IndexCorrelationRuleRequest request2 = new IndexCorrelationRuleRequest(correlationRule2, RestRequest.Method.POST); - client().execute(IndexCorrelationRuleAction.INSTANCE, request2).get(); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test creating a correlation rule with no timestamp field - * @throws Exception Exception - */ - @SuppressWarnings("unchecked") - public void testCreatingACorrelationRuleWithNoTimestampField() throws Exception { - List correlationQueries = Arrays.asList( - new CorrelationQuery("s3_access_logs", "aws.cloudtrail.eventName:ReplicateObject", null, List.of("s3")), - new CorrelationQuery("app_logs", "keywords:PermissionDenied", null, List.of("others_application")) - ); - CorrelationRule correlationRule = new CorrelationRule("s3 to app logs", correlationQueries); - IndexCorrelationRuleRequest request = new IndexCorrelationRuleRequest(correlationRule, RestRequest.Method.POST); - - IndexCorrelationRuleResponse response = client().execute(IndexCorrelationRuleAction.INSTANCE, request).get(); - Assert.assertEquals(RestStatus.CREATED, response.getStatus()); - - NestedQueryBuilder queryBuilder = QueryBuilders.nestedQuery( - "correlate", - QueryBuilders.matchQuery("correlate.index", "s3_access_logs"), - ScoreMode.None - ); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.query(queryBuilder); - searchSourceBuilder.fetchSource(true); - - SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices(CorrelationRule.CORRELATION_RULE_INDEX); - searchRequest.source(searchSourceBuilder); - - SearchResponse searchResponse = client().search(searchRequest).get(); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - Assert.assertEquals( - "_timestamp", - ((List>) (searchResponse.getHits().getHits()[0].getSourceAsMap().get("correlate"))).get(0) - .get("timestampField") - ); - } -} diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java deleted file mode 100644 index 414fe1948f053..0000000000000 --- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.apache.hc.core5.http.Header; -import org.apache.hc.core5.http.HttpEntity; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.client.ResponseException; -import org.opensearch.client.RestClient; -import org.opensearch.client.WarningsHandler; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.common.Strings; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.IndexSettings; -import org.opensearch.test.rest.OpenSearchRestTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * Correlation Vectors Engine e2e tests - */ -public class CorrelationVectorsEngineIT extends OpenSearchRestTestCase { - - private static final int DIMENSION = 4; - private static final String PROPERTIES_FIELD_NAME = "properties"; - private static final String TYPE_FIELD_NAME = "type"; - private static final String CORRELATION_VECTOR_TYPE = "correlation_vector"; - private static final String DIMENSION_FIELD_NAME = "dimension"; - private static final int M = 16; - private static final int EF_CONSTRUCTION = 128; - private static final String INDEX_NAME = "test-index-1"; - private static final Float[][] TEST_VECTORS = new Float[][] { - { 1.0f, 1.0f, 1.0f, 1.0f }, - { 2.0f, 2.0f, 2.0f, 2.0f }, - { 3.0f, 3.0f, 3.0f, 3.0f } }; - private static final float[][] TEST_QUERY_VECTORS = new float[][] { - { 1.0f, 1.0f, 1.0f, 1.0f }, - { 2.0f, 2.0f, 2.0f, 2.0f }, - { 3.0f, 3.0f, 3.0f, 3.0f } }; - private static final Map> VECTOR_SIMILARITY_TO_SCORE = Map.of( - VectorSimilarityFunction.EUCLIDEAN, - (similarity) -> 1 / (1 + similarity), - VectorSimilarityFunction.DOT_PRODUCT, - (similarity) -> (1 + similarity) / 2, - VectorSimilarityFunction.COSINE, - (similarity) -> (1 + similarity) / 2 - ); - - /** - * test the e2e storage and query layer of events-correlation-engine - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testQuery() throws IOException { - String textField = "text-field"; - String luceneField = "lucene-field"; - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(PROPERTIES_FIELD_NAME) - .startObject(textField) - .field(TYPE_FIELD_NAME, "text") - .endObject() - .startObject(luceneField) - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, DIMENSION) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", M) - .field("ef_construction", EF_CONSTRUCTION) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - - String mapping = builder.toString(); - createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); - - for (int idx = 0; idx < TEST_VECTORS.length; ++idx) { - addCorrelationDoc( - INDEX_NAME, - String.valueOf(idx + 1), - List.of(textField, luceneField), - List.of(java.util.UUID.randomUUID().toString(), TEST_VECTORS[idx]) - ); - } - refreshAllIndices(); - Assert.assertEquals(TEST_VECTORS.length, getDocCount(INDEX_NAME)); - - int k = 2; - for (float[] query : TEST_QUERY_VECTORS) { - - String correlationQuery = "{\n" - + " \"query\": {\n" - + " \"correlation\": {\n" - + " \"lucene-field\": {\n" - + " \"vector\": \n" - + Arrays.toString(query) - + " ,\n" - + " \"k\": 2,\n" - + " \"boost\": 1\n" - + " }\n" - + " }\n" - + " }\n" - + "}"; - - Response response = searchCorrelationIndex(INDEX_NAME, correlationQuery, k); - Map responseBody = entityAsMap(response); - Assert.assertEquals(2, ((List) ((Map) responseBody.get("hits")).get("hits")).size()); - @SuppressWarnings("unchecked") - double actualScore1 = Double.parseDouble( - ((List>) ((Map) responseBody.get("hits")).get("hits")).get(0).get("_score").toString() - ); - @SuppressWarnings("unchecked") - double actualScore2 = Double.parseDouble( - ((List>) ((Map) responseBody.get("hits")).get("hits")).get(1).get("_score").toString() - ); - @SuppressWarnings("unchecked") - List hit1 = ((Map>) ((List>) ((Map) responseBody.get("hits")) - .get("hits")).get(0).get("_source")).get(luceneField).stream().map(Double::floatValue).collect(Collectors.toList()); - float[] resultVector1 = new float[hit1.size()]; - for (int i = 0; i < hit1.size(); ++i) { - resultVector1[i] = hit1.get(i); - } - - @SuppressWarnings("unchecked") - List hit2 = ((Map>) ((List>) ((Map) responseBody.get("hits")) - .get("hits")).get(1).get("_source")).get(luceneField).stream().map(Double::floatValue).collect(Collectors.toList()); - float[] resultVector2 = new float[hit2.size()]; - for (int i = 0; i < hit2.size(); ++i) { - resultVector2[i] = hit2.get(i); - } - - double rawScore1 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector1, query); - Assert.assertEquals(rawScore1, actualScore1, 0.0001); - double rawScore2 = VectorSimilarityFunction.EUCLIDEAN.compare(resultVector2, query); - Assert.assertEquals(rawScore2, actualScore2, 0.0001); - } - } - - /** - * unhappy test for the e2e storage and query layer of events-correlation-engine with no index exist - */ - public void testQueryWithNoIndexExist() { - float[] query = new float[] { 1.0f, 1.0f, 1.0f, 1.0f }; - String correlationQuery = "{\n" - + " \"query\": {\n" - + " \"correlation\": {\n" - + " \"lucene-field\": {\n" - + " \"vector\": \n" - + Arrays.toString(query) - + " ,\n" - + " \"k\": 2,\n" - + " \"boost\": 1\n" - + " }\n" - + " }\n" - + " }\n" - + "}"; - Exception ex = assertThrows(ResponseException.class, () -> { searchCorrelationIndex(INDEX_NAME, correlationQuery, 2); }); - String expectedMessage = String.format(Locale.ROOT, "no such index [%s]", INDEX_NAME); - String actualMessage = ex.getMessage(); - Assert.assertTrue(actualMessage.contains(expectedMessage)); - } - - /** - * unhappy test for the e2e storage and query layer of events-correlation-engine with wrong mapping - */ - public void testQueryWithWrongMapping() throws IOException { - String textField = "text-field"; - String luceneField = "lucene-field"; - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject(PROPERTIES_FIELD_NAME) - .startObject(textField) - .field(TYPE_FIELD_NAME, "text") - .endObject() - .startObject(luceneField) - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field("test", DIMENSION) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", M) - .field("ef_construction", EF_CONSTRUCTION) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - - String mapping = builder.toString(); - Exception ex = assertThrows(ResponseException.class, () -> { - createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); - }); - - String expectedMessage = String.format( - Locale.ROOT, - "unknown parameter [test] on mapper [%s] of type [correlation_vector]", - luceneField - ); - String actualMessage = ex.getMessage(); - Assert.assertTrue(actualMessage.contains(expectedMessage)); - } - - private String createTestIndexWithMappingJson(RestClient client, String index, String mapping, Settings settings) throws IOException { - Request request = new Request("PUT", "/" + index); - String entity = "{\"settings\": " + Strings.toString(MediaTypeRegistry.JSON, settings); - if (mapping != null) { - entity = entity + ",\"mappings\" : " + mapping; - } - - entity = entity + "}"; - if (!settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) { - expectSoftDeletesWarning(request, index); - } - - request.setJsonEntity(entity); - client.performRequest(request); - return index; - } - - private Settings getCorrelationDefaultIndexSettings() { - return Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).put("index.correlation", true).build(); - } - - private void addCorrelationDoc(String index, String docId, List fieldNames, List vectors) throws IOException { - Request request = new Request("POST", "/" + index + "/_doc/" + docId + "?refresh=true"); - - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - for (int i = 0; i < fieldNames.size(); i++) { - builder.field(fieldNames.get(i), vectors.get(i)); - } - builder.endObject(); - - request.setJsonEntity(builder.toString()); - Response response = client().performRequest(request); - assertEquals(request.getEndpoint() + ": failed", RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); - } - - private Response searchCorrelationIndex(String index, String correlationQuery, int resultSize) throws IOException { - Request request = new Request("POST", "/" + index + "/_search"); - - request.addParameter("size", Integer.toString(resultSize)); - request.addParameter("explain", Boolean.toString(true)); - request.addParameter("search_type", "query_then_fetch"); - request.setJsonEntity(correlationQuery); - - Response response = client().performRequest(request); - Assert.assertEquals("Search failed", RestStatus.OK, restStatus(response)); - return response; - } - - private int getDocCount(String index) throws IOException { - Response response = makeRequest( - client(), - "GET", - String.format(Locale.getDefault(), "/%s/_count", index), - Collections.emptyMap(), - null - ); - Assert.assertEquals(RestStatus.OK, restStatus(response)); - return Integer.parseInt(entityAsMap(response).get("count").toString()); - } - - private Response makeRequest( - RestClient client, - String method, - String endpoint, - Map params, - HttpEntity entity, - Header... headers - ) throws IOException { - Request request = new Request(method, endpoint); - RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); - options.setWarningsHandler(WarningsHandler.PERMISSIVE); - - for (Header header : headers) { - options.addHeader(header.getName(), header.getValue()); - } - request.setOptions(options.build()); - request.addParameters(params); - if (entity != null) { - request.setEntity(entity); - } - return client.performRequest(request); - } - - private RestStatus restStatus(Response response) { - return RestStatus.fromCode(response.getStatusLine().getStatusCode()); - } -} diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java deleted file mode 100644 index 3791a5cdf5db0..0000000000000 --- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/EventsCorrelationPluginRestIT.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Request; -import org.opensearch.client.Response; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.test.rest.OpenSearchRestTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * Rest Action tests for events-correlation-plugin - */ -public class EventsCorrelationPluginRestIT extends OpenSearchRestTestCase { - - /** - * test events-correlation-plugin is installed - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testPluginsAreInstalled() throws IOException { - Request request = new Request("GET", "/_cat/plugins?s=component&h=name,component,version,description&format=json"); - Response response = client().performRequest(request); - List pluginsList = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - response.getEntity().getContent() - ).list(); - Assert.assertTrue( - pluginsList.stream() - .map(o -> (Map) o) - .anyMatch(plugin -> plugin.get("component").equals("events-correlation-engine")) - ); - } - - /** - * test creating a correlation rule - * @throws IOException IOException - */ - public void testCreatingACorrelationRule() throws IOException { - Request request = new Request("POST", "/_correlation/rules"); - request.setJsonEntity(sampleCorrelationRule()); - Response response = client().performRequest(request); - - Assert.assertEquals(201, response.getStatusLine().getStatusCode()); - - Map responseMap = entityAsMap(response); - String id = responseMap.get("_id").toString(); - - request = new Request("POST", "/.opensearch-correlation-rules-config/_search"); - request.setJsonEntity(matchIdQuery(id)); - response = client().performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - SearchResponse searchResponse = SearchResponse.fromXContent( - createParser(JsonXContent.jsonXContent, response.getEntity().getContent()) - ); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - } - - /** - * test creating a correlation rule with no timestamp field - * @throws IOException IOException - */ - @SuppressWarnings("unchecked") - public void testCreatingACorrelationRuleWithNoTimestampField() throws IOException { - Request request = new Request("POST", "/_correlation/rules"); - request.setJsonEntity(sampleCorrelationRuleWithNoTimestamp()); - Response response = client().performRequest(request); - - Assert.assertEquals(201, response.getStatusLine().getStatusCode()); - - Map responseMap = entityAsMap(response); - String id = responseMap.get("_id").toString(); - - request = new Request("POST", "/.opensearch-correlation-rules-config/_search"); - request.setJsonEntity(matchIdQuery(id)); - response = client().performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - SearchResponse searchResponse = SearchResponse.fromXContent( - createParser(JsonXContent.jsonXContent, response.getEntity().getContent()) - ); - Assert.assertEquals(1L, searchResponse.getHits().getTotalHits().value); - Assert.assertEquals( - "_timestamp", - ((List>) (searchResponse.getHits().getHits()[0].getSourceAsMap().get("correlate"))).get(0) - .get("timestampField") - ); - } - - private String sampleCorrelationRule() { - return "{\n" - + " \"name\": \"s3 to app logs\",\n" - + " \"correlate\": [\n" - + " {\n" - + " \"index\": \"s3_access_logs\",\n" - + " \"query\": \"aws.cloudtrail.eventName:ReplicateObject\",\n" - + " \"timestampField\": \"@timestamp\",\n" - + " \"tags\": [\n" - + " \"s3\"\n" - + " ]\n" - + " },\n" - + " {\n" - + " \"index\": \"app_logs\",\n" - + " \"query\": \"keywords:PermissionDenied\",\n" - + " \"timestampField\": \"@timestamp\",\n" - + " \"tags\": [\n" - + " \"others_application\"\n" - + " ]\n" - + " }\n" - + " ]\n" - + "}"; - } - - private String sampleCorrelationRuleWithNoTimestamp() { - return "{\n" - + " \"name\": \"s3 to app logs\",\n" - + " \"correlate\": [\n" - + " {\n" - + " \"index\": \"s3_access_logs\",\n" - + " \"query\": \"aws.cloudtrail.eventName:ReplicateObject\",\n" - + " \"tags\": [\n" - + " \"s3\"\n" - + " ]\n" - + " },\n" - + " {\n" - + " \"index\": \"app_logs\",\n" - + " \"query\": \"keywords:PermissionDenied\",\n" - + " \"tags\": [\n" - + " \"others_application\"\n" - + " ]\n" - + " }\n" - + " ]\n" - + "}"; - } - - private String matchIdQuery(String id) { - return "{\n" + " \"query\" : {\n" + " \"match\":{\n" + " \"_id\": \"" + id + "\"\n" + " }\n" + " }\n" + "}"; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java deleted file mode 100644 index 9637042974d03..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/EventsCorrelationPlugin.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.action.ActionRequest; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.IndexScopedSettings; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.NamedWriteableRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.env.Environment; -import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.codec.CodecServiceFactory; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecService; -import org.opensearch.plugin.correlation.core.index.mapper.CorrelationVectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.query.CorrelationQueryBuilder; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.resthandler.RestIndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.transport.TransportIndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.settings.EventsCorrelationSettings; -import org.opensearch.plugin.correlation.utils.CorrelationRuleIndices; -import org.opensearch.plugins.ActionPlugin; -import org.opensearch.plugins.EnginePlugin; -import org.opensearch.plugins.MapperPlugin; -import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.SearchPlugin; -import org.opensearch.repositories.RepositoriesService; -import org.opensearch.rest.RestController; -import org.opensearch.rest.RestHandler; -import org.opensearch.script.ScriptService; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.watcher.ResourceWatcherService; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.function.Supplier; - -/** - * Plugin class for events-correlation-engine - */ -public class EventsCorrelationPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin, EnginePlugin { - - /** - * events-correlation-engine base uri - */ - public static final String PLUGINS_BASE_URI = "/_correlation"; - /** - * events-correlation-engine rules uri - */ - public static final String CORRELATION_RULES_BASE_URI = PLUGINS_BASE_URI + "/rules"; - - private CorrelationRuleIndices correlationRuleIndices; - - /** - * Default constructor - */ - public EventsCorrelationPlugin() {} - - @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier - ) { - correlationRuleIndices = new CorrelationRuleIndices(client, clusterService); - return List.of(correlationRuleIndices); - } - - @Override - public List getRestHandlers( - Settings settings, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster - ) { - return List.of(new RestIndexCorrelationRuleAction()); - } - - @Override - public Map getMappers() { - return Collections.singletonMap(CorrelationVectorFieldMapper.CONTENT_TYPE, new VectorFieldMapper.TypeParser()); - } - - @Override - public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { - if (indexSettings.getValue(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING)) { - return Optional.of(CorrelationCodecService::new); - } - return Optional.empty(); - } - - @Override - public List> getQueries() { - return Collections.singletonList( - new QuerySpec<>( - CorrelationQueryBuilder.NAME_FIELD.getPreferredName(), - CorrelationQueryBuilder::new, - CorrelationQueryBuilder::parse - ) - ); - } - - @Override - public List> getActions() { - return List.of(new ActionPlugin.ActionHandler<>(IndexCorrelationRuleAction.INSTANCE, TransportIndexCorrelationRuleAction.class)); - } - - @Override - public List> getSettings() { - return List.of(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING, EventsCorrelationSettings.CORRELATION_TIME_WINDOW); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java deleted file mode 100644 index fef9200a73091..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContext.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ToXContentFragment; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.mapper.MapperParsingException; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; - -/** - * Defines vector similarity function, m and ef_construction hyper parameters field mappings for correlation_vector type. - * - * @opensearch.internal - */ -public class CorrelationParamsContext implements ToXContentFragment, Writeable { - - /** - * Vector Similarity Function field - */ - public static final String VECTOR_SIMILARITY_FUNCTION = "similarityFunction"; - /** - * Parameters field to define m and ef_construction - */ - public static final String PARAMETERS = "parameters"; - - private final VectorSimilarityFunction similarityFunction; - private final Map parameters; - - /** - * Parameterized ctor for CorrelationParamsContext - * @param similarityFunction Vector Similarity Function - * @param parameters Parameters to define m and ef_construction - */ - public CorrelationParamsContext(VectorSimilarityFunction similarityFunction, Map parameters) { - this.similarityFunction = similarityFunction; - this.parameters = parameters; - } - - /** - * Parameterized ctor for CorrelationParamsContext - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationParamsContext(StreamInput sin) throws IOException { - this.similarityFunction = VectorSimilarityFunction.valueOf(sin.readString()); - if (sin.available() > 0) { - this.parameters = sin.readMap(); - } else { - this.parameters = null; - } - } - - /** - * Parse into CorrelationParamsContext - * @param in Object - * @return CorrelationParamsContext - */ - public static CorrelationParamsContext parse(Object in) { - if (!(in instanceof Map)) { - throw new MapperParsingException("Unable to parse CorrelationParamsContext"); - } - - @SuppressWarnings("unchecked") - Map contextMap = (Map) in; - VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.EUCLIDEAN; - Map parameters = new HashMap<>(); - - if (contextMap.containsKey(VECTOR_SIMILARITY_FUNCTION)) { - Object value = contextMap.get(VECTOR_SIMILARITY_FUNCTION); - - if (value != null && !(value instanceof String)) { - throw new MapperParsingException(String.format(Locale.getDefault(), "%s must be a string", VECTOR_SIMILARITY_FUNCTION)); - } - - try { - similarityFunction = VectorSimilarityFunction.valueOf((String) value); - } catch (IllegalArgumentException ex) { - throw new MapperParsingException(String.format(Locale.getDefault(), "Invalid %s: %s", VECTOR_SIMILARITY_FUNCTION, value)); - } - } - if (contextMap.containsKey(PARAMETERS)) { - Object value = contextMap.get(PARAMETERS); - if (!(value instanceof Map)) { - throw new MapperParsingException("Unable to parse parameters for Correlation context"); - } - - @SuppressWarnings("unchecked") - Map valueMap = (Map) value; - parameters.putAll(valueMap); - } - return new CorrelationParamsContext(similarityFunction, parameters); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VECTOR_SIMILARITY_FUNCTION, similarityFunction.name()); - if (params == null) { - builder.field(PARAMETERS, (String) null); - } else { - builder.startObject(PARAMETERS); - for (Map.Entry parameter : parameters.entrySet()) { - builder.field(parameter.getKey(), parameter.getValue()); - } - builder.endObject(); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(similarityFunction.name()); - if (this.parameters != null) { - out.writeMap(parameters); - } - } - - /** - * get Vector Similarity Function - * @return Vector Similarity Function - */ - public VectorSimilarityFunction getSimilarityFunction() { - return similarityFunction; - } - - /** - * Get Parameters to define m and ef_construction - * @return Parameters to define m and ef_construction - */ - public Map getParameters() { - return parameters; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java deleted file mode 100644 index 61efd6b9a87ae..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/VectorField.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexableFieldType; -import org.apache.lucene.util.BytesRef; -import org.opensearch.common.io.stream.BytesStreamOutput; - -import java.io.IOException; - -/** - * Generic Vector Field defining a correlation vector name, float array. - * - * @opensearch.internal - */ -public class VectorField extends Field { - - /** - * Parameterized ctor for VectorField - * @param name name of the field - * @param value float array value for the field - * @param type type of the field - */ - public VectorField(String name, float[] value, IndexableFieldType type) { - super(name, new BytesRef(), type); - try { - final byte[] floatToByte = floatToByteArray(value); - this.setBytesValue(floatToByte); - } catch (IOException ex) { - throw new RuntimeException(ex); - } - } - - /** - * converts float array based vector to byte array. - * @param input float array - * @return byte array - */ - protected static byte[] floatToByteArray(float[] input) throws IOException { - BytesStreamOutput objectStream = new BytesStreamOutput(); - objectStream.writeFloatArray(input); - return objectStream.bytes().toBytesRef().bytes; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java deleted file mode 100644 index 00b55eb75995c..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/BasePerFieldCorrelationVectorsFormat.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.mapper.CorrelationVectorFieldMapper; - -import java.util.Locale; -import java.util.Map; -import java.util.Optional; -import java.util.function.BiFunction; -import java.util.function.Supplier; - -/** - * Class to define the hyper-parameters m and ef_construction for insert and store of correlation vectors into HNSW graphs based lucene index. - * - * @opensearch.internal - */ -public abstract class BasePerFieldCorrelationVectorsFormat extends PerFieldKnnVectorsFormat { - /** - * the hyper-parameters for constructing HNSW graphs. - * HnswGraph.html - */ - public static final String METHOD_PARAMETER_M = "m"; - /** - * the hyper-parameters for constructing HNSW graphs. - * HnswGraph.html - */ - public static final String METHOD_PARAMETER_EF_CONSTRUCTION = "ef_construction"; - - private final Optional mapperService; - private final int defaultMaxConnections; - private final int defaultBeamWidth; - private final Supplier defaultFormatSupplier; - private final BiFunction formatSupplier; - - /** - * Parameterized ctor of BasePerFieldCorrelationVectorsFormat - * @param mapperService mapper service - * @param defaultMaxConnections default m - * @param defaultBeamWidth default ef_construction - * @param defaultFormatSupplier default format supplier - * @param formatSupplier format supplier - */ - public BasePerFieldCorrelationVectorsFormat( - Optional mapperService, - int defaultMaxConnections, - int defaultBeamWidth, - Supplier defaultFormatSupplier, - BiFunction formatSupplier - ) { - this.mapperService = mapperService; - this.defaultMaxConnections = defaultMaxConnections; - this.defaultBeamWidth = defaultBeamWidth; - this.defaultFormatSupplier = defaultFormatSupplier; - this.formatSupplier = formatSupplier; - } - - @Override - public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - if (!isCorrelationVectorFieldType(field)) { - return defaultFormatSupplier.get(); - } - - var type = (CorrelationVectorFieldMapper.CorrelationVectorFieldType) mapperService.orElseThrow( - () -> new IllegalArgumentException( - String.format(Locale.getDefault(), "Cannot read field type for field [%s] because mapper service is not available", field) - ) - ).fieldType(field); - - var params = type.getCorrelationParams().getParameters(); - int maxConnections = getMaxConnections(params); - int beamWidth = getBeamWidth(params); - - return formatSupplier.apply(maxConnections, beamWidth); - } - - private boolean isCorrelationVectorFieldType(final String field) { - return mapperService.isPresent() - && mapperService.get().fieldType(field) instanceof CorrelationVectorFieldMapper.CorrelationVectorFieldType; - } - - private int getMaxConnections(final Map params) { - if (params != null && params.containsKey(METHOD_PARAMETER_M)) { - return (int) params.get(METHOD_PARAMETER_M); - } - return defaultMaxConnections; - } - - private int getBeamWidth(final Map params) { - if (params != null && params.containsKey(METHOD_PARAMETER_EF_CONSTRUCTION)) { - return (int) params.get(METHOD_PARAMETER_EF_CONSTRUCTION); - } - return defaultBeamWidth; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java deleted file mode 100644 index 09d5e1d2c19e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecService.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.codecs.Codec; -import org.opensearch.index.codec.CodecService; -import org.opensearch.index.codec.CodecServiceConfig; -import org.opensearch.index.mapper.MapperService; - -/** - * custom Correlation Codec Service - * - * @opensearch.internal - */ -public class CorrelationCodecService extends CodecService { - - private final MapperService mapperService; - - /** - * Parameterized ctor for CorrelationCodecService - * @param codecServiceConfig Generic codec service config - */ - public CorrelationCodecService(CodecServiceConfig codecServiceConfig) { - super(codecServiceConfig.getMapperService(), codecServiceConfig.getIndexSettings(), codecServiceConfig.getLogger()); - mapperService = codecServiceConfig.getMapperService(); - } - - @Override - public Codec codec(String name) { - return CorrelationCodecVersion.current().getCorrelationCodecSupplier().apply(super.codec(name), mapperService); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java deleted file mode 100644 index 9dbb695f14b78..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec; - -import org.apache.lucene.backward_codecs.lucene99.Lucene99Codec; -import org.apache.lucene.codecs.Codec; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec; -import org.opensearch.plugin.correlation.core.index.codec.correlation990.PerFieldCorrelationVectorsFormat; - -import java.util.Optional; -import java.util.function.BiFunction; -import java.util.function.Supplier; - -/** - * CorrelationCodecVersion enum - * - * @opensearch.internal - */ -public enum CorrelationCodecVersion { - V_9_9_0( - "CorrelationCodec", - new Lucene99Codec(), - new PerFieldCorrelationVectorsFormat(Optional.empty()), - (userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))), - CorrelationCodec::new - ); - - private static final CorrelationCodecVersion CURRENT = V_9_9_0; - private final String codecName; - private final Codec defaultCodecDelegate; - private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; - private final BiFunction correlationCodecSupplier; - private final Supplier defaultCorrelationCodecSupplier; - - CorrelationCodecVersion( - String codecName, - Codec defaultCodecDelegate, - PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat, - BiFunction correlationCodecSupplier, - Supplier defaultCorrelationCodecSupplier - ) { - this.codecName = codecName; - this.defaultCodecDelegate = defaultCodecDelegate; - this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; - this.correlationCodecSupplier = correlationCodecSupplier; - this.defaultCorrelationCodecSupplier = defaultCorrelationCodecSupplier; - } - - /** - * get codec name - * @return codec name - */ - public String getCodecName() { - return codecName; - } - - /** - * get default codec delegate - * @return default codec delegate - */ - public Codec getDefaultCodecDelegate() { - return defaultCodecDelegate; - } - - /** - * get correlation vectors format - * @return correlation vectors format - */ - public PerFieldCorrelationVectorsFormat getPerFieldCorrelationVectorsFormat() { - return perFieldCorrelationVectorsFormat; - } - - /** - * get correlation codec supplier - * @return correlation codec supplier - */ - public BiFunction getCorrelationCodecSupplier() { - return correlationCodecSupplier; - } - - /** - * get default correlation codec supplier - * @return default correlation codec supplier - */ - public Supplier getDefaultCorrelationCodecSupplier() { - return defaultCorrelationCodecSupplier; - } - - /** - * static method to get correlation codec version - * @return correlation codec version - */ - public static final CorrelationCodecVersion current() { - return CURRENT; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java deleted file mode 100644 index 022972e2e06c3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion; - -/** - * Correlation Codec class - * - * @opensearch.internal - */ -public class CorrelationCodec extends FilterCodec { - private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_9_0; - private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; - - /** - * ctor for CorrelationCodec - */ - public CorrelationCodec() { - this(VERSION.getDefaultCodecDelegate(), VERSION.getPerFieldCorrelationVectorsFormat()); - } - - /** - * Parameterized ctor for CorrelationCodec - * @param delegate codec delegate - * @param perFieldCorrelationVectorsFormat correlation vectors format - */ - public CorrelationCodec(Codec delegate, PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat) { - super(VERSION.getCodecName(), delegate); - this.perFieldCorrelationVectorsFormat = perFieldCorrelationVectorsFormat; - } - - @Override - public KnnVectorsFormat knnVectorsFormat() { - return perFieldCorrelationVectorsFormat; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java deleted file mode 100644 index 89cc0b614a1a5..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat; - -import java.util.Optional; - -/** - * Class to define the hyper-parameters m and ef_construction for insert and store of correlation vectors into HNSW graphs based lucene index. - */ -public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVectorsFormat { - - /** - * Parameterized ctor for PerFieldCorrelationVectorsFormat - * @param mapperService mapper service - */ - public PerFieldCorrelationVectorsFormat(final Optional mapperService) { - super( - mapperService, - Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - Lucene99HnswVectorsFormat::new, - Lucene99HnswVectorsFormat::new - ); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java deleted file mode 100644 index fc2a9de58a73a..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * custom Lucene9.5 codec package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index.codec.correlation990; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java deleted file mode 100644 index 862b7cd253f04..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * custom codec package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index.codec; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java deleted file mode 100644 index 18c9dd222e2cf..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapper.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.common.Explicit; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.plugin.correlation.core.index.VectorField; - -import java.io.IOException; -import java.util.Locale; -import java.util.Optional; - -/** - * Field mapper for the correlation vector type - * - * @opensearch.internal - */ -public class CorrelationVectorFieldMapper extends VectorFieldMapper { - - private static final int LUCENE_MAX_DIMENSION = KnnVectorsFormat.DEFAULT_MAX_DIMENSIONS; - - private final FieldType vectorFieldType; - - /** - * Parameterized ctor for CorrelationVectorFieldMapper - * @param input Object containing name of the field, type and other details. - */ - public CorrelationVectorFieldMapper(final CreateLuceneFieldMapperInput input) { - super( - input.getName(), - input.getMappedFieldType(), - input.getMultiFields(), - input.getCopyTo(), - input.getIgnoreMalformed(), - input.isStored(), - input.isHasDocValues() - ); - - this.correlationParams = input.getCorrelationParams(); - final VectorSimilarityFunction vectorSimilarityFunction = this.correlationParams.getSimilarityFunction(); - - final int dimension = input.getMappedFieldType().getDimension(); - if (dimension > LUCENE_MAX_DIMENSION) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "Dimension value cannot be greater than [%s] but got [%s] for vector [%s]", - LUCENE_MAX_DIMENSION, - dimension, - input.getName() - ) - ); - } - - this.fieldType = KnnFloatVectorField.createFieldType(dimension, vectorSimilarityFunction); - - if (this.hasDocValues) { - this.vectorFieldType = buildDocValuesFieldType(); - } else { - this.vectorFieldType = null; - } - } - - private static FieldType buildDocValuesFieldType() { - FieldType field = new FieldType(); - field.setDocValuesType(DocValuesType.BINARY); - field.freeze(); - return field; - } - - @Override - protected void parseCreateField(ParseContext context, int dimension) throws IOException { - Optional arrayOptional = getFloatsFromContext(context, dimension); - - if (arrayOptional.isEmpty()) { - return; - } - final float[] array = arrayOptional.get(); - - KnnFloatVectorField point = new KnnFloatVectorField(name(), array, fieldType); - - context.doc().add(point); - if (fieldType.stored()) { - context.doc().add(new StoredField(name(), point.toString())); - } - if (hasDocValues && vectorFieldType != null) { - context.doc().add(new VectorField(name(), array, vectorFieldType)); - } - context.path().remove(); - } - - static class CreateLuceneFieldMapperInput { - String name; - - CorrelationVectorFieldType mappedFieldType; - - FieldMapper.MultiFields multiFields; - - FieldMapper.CopyTo copyTo; - - Explicit ignoreMalformed; - boolean stored; - boolean hasDocValues; - - CorrelationParamsContext correlationParams; - - public CreateLuceneFieldMapperInput( - String name, - CorrelationVectorFieldType mappedFieldType, - FieldMapper.MultiFields multiFields, - FieldMapper.CopyTo copyTo, - Explicit ignoreMalformed, - boolean stored, - boolean hasDocValues, - CorrelationParamsContext correlationParams - ) { - this.name = name; - this.mappedFieldType = mappedFieldType; - this.multiFields = multiFields; - this.copyTo = copyTo; - this.ignoreMalformed = ignoreMalformed; - this.stored = stored; - this.hasDocValues = hasDocValues; - this.correlationParams = correlationParams; - } - - public String getName() { - return name; - } - - public CorrelationVectorFieldType getMappedFieldType() { - return mappedFieldType; - } - - public FieldMapper.MultiFields getMultiFields() { - return multiFields; - } - - public FieldMapper.CopyTo getCopyTo() { - return copyTo; - } - - public Explicit getIgnoreMalformed() { - return ignoreMalformed; - } - - public boolean isStored() { - return stored; - } - - public boolean isHasDocValues() { - return hasDocValues; - } - - public CorrelationParamsContext getCorrelationParams() { - return correlationParams; - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java deleted file mode 100644 index 5ac6d92792295..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/VectorFieldMapper.java +++ /dev/null @@ -1,399 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.search.FieldExistsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.Explicit; -import org.opensearch.common.xcontent.support.XContentMapValues; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.ParametrizedFieldMapper; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.index.mapper.TextSearchInfo; -import org.opensearch.index.mapper.ValueFetcher; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.QueryShardException; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.search.lookup.SearchLookup; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; - -/** - * Parameterized field mapper for Correlation Vector type - * - * @opensearch.internal - */ -public abstract class VectorFieldMapper extends ParametrizedFieldMapper { - - /** - * name of Correlation Vector type - */ - public static final String CONTENT_TYPE = "correlation_vector"; - /** - * dimension of the correlation vectors - */ - public static final String DIMENSION = "dimension"; - /** - * context e.g. parameters and vector similarity function of Correlation Vector type - */ - public static final String CORRELATION_CONTEXT = "correlation_ctx"; - - private static VectorFieldMapper toType(FieldMapper in) { - return (VectorFieldMapper) in; - } - - /** - * definition of VectorFieldMapper.Builder - */ - public static class Builder extends ParametrizedFieldMapper.Builder { - protected Boolean ignoreMalformed; - - protected final Parameter stored = Parameter.boolParam("store", false, m -> toType(m).stored, false); - protected final Parameter hasDocValues = Parameter.boolParam("doc_values", false, m -> toType(m).hasDocValues, true); - protected final Parameter dimension = new Parameter<>(DIMENSION, false, () -> -1, (n, c, o) -> { - if (o == null) { - throw new IllegalArgumentException("Dimension cannot be null"); - } - int value; - try { - value = XContentMapValues.nodeIntegerValue(o); - } catch (Exception ex) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "Unable to parse [dimension] from provided value [%s] for vector [%s]", o, name) - ); - } - if (value <= 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "Dimension value must be greater than 0 for vector: %s", name) - ); - } - return value; - }, m -> toType(m).dimension); - - protected final Parameter correlationParamsContext = new Parameter<>( - CORRELATION_CONTEXT, - false, - () -> null, - (n, c, o) -> CorrelationParamsContext.parse(o), - m -> toType(m).correlationParams - ); - - protected final Parameter> meta = Parameter.metaParam(); - - /** - * Parameterized ctor for VectorFieldMapper.Builder - * @param name name - */ - public Builder(String name) { - super(name); - } - - @Override - protected List> getParameters() { - return Arrays.asList(stored, hasDocValues, dimension, meta, correlationParamsContext); - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - @Override - public ParametrizedFieldMapper build(BuilderContext context) { - final CorrelationParamsContext correlationParams = correlationParamsContext.getValue(); - final MultiFields multiFieldsBuilder = this.multiFieldsBuilder.build(this, context); - final CopyTo copyToBuilder = copyTo.build(); - final Explicit ignoreMalformed = ignoreMalformed(context); - final Map metaValue = meta.getValue(); - - final CorrelationVectorFieldType mappedFieldType = new CorrelationVectorFieldType( - buildFullName(context), - metaValue, - dimension.getValue(), - correlationParams - ); - - CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput createLuceneFieldMapperInput = - new CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput( - name, - mappedFieldType, - multiFieldsBuilder, - copyToBuilder, - ignoreMalformed, - stored.get(), - hasDocValues.get(), - correlationParams - ); - return new CorrelationVectorFieldMapper(createLuceneFieldMapperInput); - } - } - - /** - * deifintion of VectorFieldMapper.TypeParser - */ - public static class TypeParser implements Mapper.TypeParser { - - /** - * default constructor of VectorFieldMapper.TypeParser - */ - public TypeParser() {} - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext context) throws MapperParsingException { - Builder builder = new VectorFieldMapper.Builder(name); - builder.parse(name, context, node); - - if (builder.dimension.getValue() == -1) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "Dimension value missing for vector: %s", name)); - } - return builder; - } - } - - /** - * deifintion of VectorFieldMapper.CorrelationVectorFieldType - */ - public static class CorrelationVectorFieldType extends MappedFieldType { - int dimension; - CorrelationParamsContext correlationParams; - - /** - * Parameterized ctor for VectorFieldMapper.CorrelationVectorFieldType - * @param name name of the field - * @param meta meta of the field - * @param dimension dimension of the field - */ - public CorrelationVectorFieldType(String name, Map meta, int dimension) { - this(name, meta, dimension, null); - } - - /** - * Parameterized ctor for VectorFieldMapper.CorrelationVectorFieldType - * @param name name of the field - * @param meta meta of the field - * @param dimension dimension of the field - * @param correlationParams correlation params for the field - */ - public CorrelationVectorFieldType( - String name, - Map meta, - int dimension, - CorrelationParamsContext correlationParams - ) { - super(name, false, false, true, TextSearchInfo.NONE, meta); - this.dimension = dimension; - this.correlationParams = correlationParams; - } - - @Override - public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String s) { - throw new UnsupportedOperationException("Correlation Vector do not support fields search"); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new FieldExistsQuery(name()); - } - - @Override - public Query termQuery(Object o, QueryShardContext context) { - throw new QueryShardException( - context, - String.format( - Locale.getDefault(), - "Correlation vector do not support exact searching, use Correlation queries instead: [%s]", - name() - ) - ); - } - - /** - * get dimension - * @return dimension - */ - public int getDimension() { - return dimension; - } - - /** - * get correlation params - * @return correlation params - */ - public CorrelationParamsContext getCorrelationParams() { - return correlationParams; - } - } - - protected Explicit ignoreMalformed; - protected boolean stored; - protected boolean hasDocValues; - protected Integer dimension; - protected CorrelationParamsContext correlationParams; - - /** - * Parameterized ctor for VectorFieldMapper - * @param simpleName name of field - * @param mappedFieldType field type of field - * @param multiFields multi fields - * @param copyTo copy to - * @param ignoreMalformed ignore malformed - * @param stored stored field - * @param hasDocValues has doc values - */ - public VectorFieldMapper( - String simpleName, - CorrelationVectorFieldType mappedFieldType, - FieldMapper.MultiFields multiFields, - FieldMapper.CopyTo copyTo, - Explicit ignoreMalformed, - boolean stored, - boolean hasDocValues - ) { - super(simpleName, mappedFieldType, multiFields, copyTo); - this.ignoreMalformed = ignoreMalformed; - this.stored = stored; - this.hasDocValues = hasDocValues; - this.dimension = mappedFieldType.getDimension(); - } - - @Override - protected VectorFieldMapper clone() { - return (VectorFieldMapper) super.clone(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void parseCreateField(ParseContext parseContext) throws IOException { - parseCreateField(parseContext, fieldType().getDimension()); - } - - protected abstract void parseCreateField(ParseContext parseContext, int dimension) throws IOException; - - Optional getFloatsFromContext(ParseContext context, int dimension) throws IOException { - context.path().add(simpleName()); - - List vector = new ArrayList<>(); - XContentParser.Token token = context.parser().currentToken(); - float value; - if (token == XContentParser.Token.START_ARRAY) { - token = context.parser().nextToken(); - while (token != XContentParser.Token.END_ARRAY) { - value = context.parser().floatValue(); - - if (Float.isNaN(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be NaN"); - } - - if (Float.isInfinite(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be infinity"); - } - vector.add(value); - token = context.parser().nextToken(); - } - } else if (token == XContentParser.Token.VALUE_NUMBER) { - value = context.parser().floatValue(); - if (Float.isNaN(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be NaN"); - } - - if (Float.isInfinite(value)) { - throw new IllegalArgumentException("Correlation vector values cannot be infinity"); - } - vector.add(value); - context.parser().nextToken(); - } else if (token == XContentParser.Token.VALUE_NULL) { - context.path().remove(); - return Optional.empty(); - } - - if (dimension != vector.size()) { - String errorMessage = String.format( - Locale.ROOT, - "Vector dimension mismatch. Expected: %d, Given: %d", - dimension, - vector.size() - ); - throw new IllegalArgumentException(errorMessage); - } - - float[] array = new float[vector.size()]; - int i = 0; - for (Float f : vector) { - array[i++] = f; - } - return Optional.of(array); - } - - @Override - protected boolean docValuesByDefault() { - return true; - } - - @Override - public ParametrizedFieldMapper.Builder getMergeBuilder() { - return new VectorFieldMapper.Builder(simpleName()).init(this); - } - - @Override - public boolean parsesArrayValue() { - return true; - } - - @Override - public CorrelationVectorFieldType fieldType() { - return (CorrelationVectorFieldType) super.fieldType(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); - } - } - - /** - * Class for constants used in parent class VectorFieldMapper - */ - public static class Names { - public static final String IGNORE_MALFORMED = "ignore_malformed"; - } - - /** - * Class for constants used in parent class VectorFieldMapper - */ - public static class Defaults { - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java deleted file mode 100644 index 4fdc622c3d886..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/mapper/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * correlation field mapper package - */ -package org.opensearch.plugin.correlation.core.index.mapper; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java deleted file mode 100644 index cfc0ffdfa81f1..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * package to wrap Lucene KnnFloatVectorField and KnnFloatVectorQuery for Opensearch events-correlation-engine - */ -package org.opensearch.plugin.correlation.core.index; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java deleted file mode 100644 index e95b68e855cca..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilder.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.Query; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.ParsingException; -import org.opensearch.core.common.Strings; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.index.query.AbstractQueryBuilder; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.WithFieldName; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -/** - * Constructs a query to get correlated events or documents for a particular event or document. - * - * @opensearch.internal - */ -public class CorrelationQueryBuilder extends AbstractQueryBuilder implements WithFieldName { - - private static final Logger log = LogManager.getLogger(CorrelationQueryBuilder.class); - protected static final ParseField VECTOR_FIELD = new ParseField("vector"); - protected static final ParseField K_FIELD = new ParseField("k"); - protected static final ParseField FILTER_FIELD = new ParseField("filter"); - /** - * max number of neighbors that can be retrieved. - */ - public static int K_MAX = 10000; - - /** - * name of the query - */ - public static final ParseField NAME_FIELD = new ParseField("correlation"); - - private String fieldName; - private float[] vector; - private int k = 0; - private double boost; - private QueryBuilder filter; - - private CorrelationQueryBuilder() {} - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param fieldName field name for query - * @param vector query vector - * @param k number of nearby neighbors - */ - public CorrelationQueryBuilder(String fieldName, float[] vector, int k) { - this(fieldName, vector, k, null); - } - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param fieldName field name for query - * @param vector query vector - * @param k number of nearby neighbors - * @param filter optional filter query - */ - public CorrelationQueryBuilder(String fieldName, float[] vector, int k, QueryBuilder filter) { - if (Strings.isNullOrEmpty(fieldName)) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] requires fieldName", NAME_FIELD.getPreferredName()) - ); - } - if (vector == null) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] requires query vector", NAME_FIELD.getPreferredName()) - ); - } - if (vector.length == 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "[%s] query vector is empty", NAME_FIELD.getPreferredName()) - ); - } - if (k <= 0) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k > 0", NAME_FIELD.getPreferredName())); - } - if (k > K_MAX) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "[%s] requires k <= ", K_MAX)); - } - - this.fieldName = fieldName; - this.vector = vector; - this.k = k; - this.filter = filter; - } - - /** - * parameterized ctor for CorrelationQueryBuilder - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationQueryBuilder(StreamInput sin) throws IOException { - super(sin); - this.fieldName = sin.readString(); - this.vector = sin.readFloatArray(); - this.k = sin.readInt(); - this.filter = sin.readOptionalNamedWriteable(QueryBuilder.class); - } - - private static float[] objectsToFloats(List objs) { - float[] vector = new float[objs.size()]; - for (int i = 0; i < objs.size(); ++i) { - vector[i] = ((Number) objs.get(i)).floatValue(); - } - return vector; - } - - /** - * parse into CorrelationQueryBuilder - * @param xcp XContentParser - * @return CorrelationQueryBuilder - */ - public static CorrelationQueryBuilder parse(XContentParser xcp) throws IOException { - String fieldName = null; - List vector = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - - int k = 0; - QueryBuilder filter = null; - String queryName = null; - String currentFieldName = null; - XContentParser.Token token; - while ((token = xcp.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = xcp.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - throwParsingExceptionOnMultipleFields(NAME_FIELD.getPreferredName(), xcp.getTokenLocation(), fieldName, currentFieldName); - fieldName = currentFieldName; - while ((token = xcp.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = xcp.currentName(); - } else if (token.isValue() || token == XContentParser.Token.START_ARRAY) { - if (VECTOR_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - vector = xcp.list(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - boost = xcp.floatValue(); - } else if (K_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - k = (Integer) NumberFieldMapper.NumberType.INTEGER.parse(xcp.objectBytes(), false); - } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, xcp.getDeprecationHandler())) { - queryName = xcp.text(); - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] query does not support [" + currentFieldName + "]" - ); - } - } else if (token == XContentParser.Token.START_OBJECT) { - String tokenName = xcp.currentName(); - if (FILTER_FIELD.getPreferredName().equals(tokenName)) { - filter = parseInnerQueryBuilder(xcp); - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] unknown token [" + token + "]" - ); - } - } else { - throw new ParsingException( - xcp.getTokenLocation(), - "[" + NAME_FIELD.getPreferredName() + "] unknown token [" + token + "] after [" + currentFieldName + "]" - ); - } - } - } else { - throwParsingExceptionOnMultipleFields(NAME_FIELD.getPreferredName(), xcp.getTokenLocation(), fieldName, xcp.currentName()); - fieldName = xcp.currentName(); - vector = xcp.list(); - } - } - - assert vector != null; - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(fieldName, objectsToFloats(vector), k, filter); - correlationQueryBuilder.queryName(queryName); - correlationQueryBuilder.boost(boost); - return correlationQueryBuilder; - } - - public void setFieldName(String fieldName) { - this.fieldName = fieldName; - } - - /** - * get field name - * @return field name - */ - @Override - public String fieldName() { - return fieldName; - } - - public void setVector(float[] vector) { - this.vector = vector; - } - - /** - * get query vector - * @return query vector - */ - public Object vector() { - return vector; - } - - public void setK(int k) { - this.k = k; - } - - /** - * get number of nearby neighbors - * @return number of nearby neighbors - */ - public int getK() { - return k; - } - - public void setBoost(double boost) { - this.boost = boost; - } - - /** - * get boost - * @return boost - */ - public double getBoost() { - return boost; - } - - public void setFilter(QueryBuilder filter) { - this.filter = filter; - } - - /** - * get optional filter - * @return optional filter - */ - public QueryBuilder getFilter() { - return filter; - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeFloatArray(vector); - out.writeInt(k); - out.writeOptionalNamedWriteable(filter); - } - - @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(fieldName); - - builder.field(VECTOR_FIELD.getPreferredName(), vector); - builder.field(K_FIELD.getPreferredName(), k); - if (filter != null) { - builder.field(FILTER_FIELD.getPreferredName(), filter); - } - printBoostAndQueryName(builder); - builder.endObject(); - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType mappedFieldType = context.fieldMapper(fieldName); - - if (!(mappedFieldType instanceof VectorFieldMapper.CorrelationVectorFieldType)) { - throw new IllegalArgumentException(String.format(Locale.getDefault(), "Field '%s' is not knn_vector type.", this.fieldName)); - } - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = - (VectorFieldMapper.CorrelationVectorFieldType) mappedFieldType; - int fieldDimension = correlationVectorFieldType.getDimension(); - - if (fieldDimension != vector.length) { - throw new IllegalArgumentException( - String.format( - Locale.getDefault(), - "Query vector has invalid dimension: %d. Dimension should be: %d", - vector.length, - fieldDimension - ) - ); - } - - String indexName = context.index().getName(); - CorrelationQueryFactory.CreateQueryRequest createQueryRequest = new CorrelationQueryFactory.CreateQueryRequest( - indexName, - this.fieldName, - this.vector, - this.k, - this.filter, - context - ); - return CorrelationQueryFactory.create(createQueryRequest); - } - - @Override - protected boolean doEquals(CorrelationQueryBuilder other) { - return Objects.equals(fieldName, other.fieldName) && Arrays.equals(vector, other.vector) && Objects.equals(k, other.k); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, vector, k); - } - - @Override - public String getWriteableName() { - return NAME_FIELD.getPreferredName(); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java deleted file mode 100644 index d5db299bfa3a5..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryFactory.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.lucene.search.KnnFloatVectorQuery; -import org.apache.lucene.search.Query; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Optional; - -/** - * CorrelationQueryFactory util class is used to construct a Lucene KnnFloatVectorQuery. - * - * @opensearch.internal - */ -public class CorrelationQueryFactory { - - /** - * static method which takes input params to construct a Lucene KnnFloatVectorQuery. - * @param createQueryRequest object parameter containing inputs for constructing Lucene KnnFloatVectorQuery. - * @return generic Lucene Query object - */ - public static Query create(CreateQueryRequest createQueryRequest) { - final String indexName = createQueryRequest.getIndexName(); - final String fieldName = createQueryRequest.getFieldName(); - final int k = createQueryRequest.getK(); - final float[] vector = createQueryRequest.getVector(); - - if (createQueryRequest.getFilter().isPresent()) { - final QueryShardContext context = createQueryRequest.getContext() - .orElseThrow(() -> new RuntimeException("Shard context cannot be null")); - - try { - final Query filterQuery = createQueryRequest.getFilter().get().toQuery(context); - return new KnnFloatVectorQuery(fieldName, vector, k, filterQuery); - } catch (IOException ex) { - throw new RuntimeException("Cannot create knn query with filter", ex); - } - } - return new KnnFloatVectorQuery(fieldName, vector, k); - } - - /** - * class containing params to construct a Lucene KnnFloatVectorQuery. - * - * @opensearch.internal - */ - public static class CreateQueryRequest { - private String indexName; - - private String fieldName; - - private float[] vector; - - private int k; - - private QueryBuilder filter; - - private QueryShardContext context; - - /** - * Parameterized ctor for CreateQueryRequest - * @param indexName index name - * @param fieldName field name - * @param vector query vector - * @param k number of nearby neighbors - * @param filter additional filter query - * @param context QueryShardContext - */ - public CreateQueryRequest( - String indexName, - String fieldName, - float[] vector, - int k, - QueryBuilder filter, - QueryShardContext context - ) { - this.indexName = indexName; - this.fieldName = fieldName; - this.vector = vector; - this.k = k; - this.filter = filter; - this.context = context; - } - - /** - * get index name - * @return get index name - */ - public String getIndexName() { - return indexName; - } - - /** - * get field name - * @return get field name - */ - public String getFieldName() { - return fieldName; - } - - /** - * get vector - * @return get vector - */ - public float[] getVector() { - return vector; - } - - /** - * get number of nearby neighbors - * @return number of nearby neighbors - */ - public int getK() { - return k; - } - - /** - * get optional filter query - * @return get optional filter query - */ - public Optional getFilter() { - return Optional.ofNullable(filter); - } - - /** - * get optional query shard context - * @return get optional query shard context - */ - public Optional getContext() { - return Optional.ofNullable(context); - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java deleted file mode 100644 index 82be787af5a72..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * base package of events-correlation-engine - */ -package org.opensearch.plugin.correlation; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java deleted file mode 100644 index ab6f05ec0e6a3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleAction.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.action.ActionType; - -/** - * Transport Action for indexing correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleAction extends ActionType { - - /** - * Instance of IndexCorrelationRuleAction - */ - public static final IndexCorrelationRuleAction INSTANCE = new IndexCorrelationRuleAction(); - /** - * Name of IndexCorrelationRuleAction - */ - public static final String NAME = "cluster:admin/correlation/rules"; - - private IndexCorrelationRuleAction() { - super(NAME, IndexCorrelationRuleResponse::new); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java deleted file mode 100644 index 3fe25d144059d..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleRequest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.rest.RestRequest; - -import java.io.IOException; - -/** - * A request to index correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleRequest extends ActionRequest { - - private String correlationRuleId; - - private CorrelationRule correlationRule; - - private RestRequest.Method method; - - /** - * Parameterized ctor for IndexCorrelationRuleRequest - * @param correlationRule correlation rule - * @param method Rest method of request PUT or POST - */ - public IndexCorrelationRuleRequest(CorrelationRule correlationRule, RestRequest.Method method) { - super(); - this.correlationRuleId = ""; - this.correlationRule = correlationRule; - this.method = method; - } - - /** - * Parameterized ctor for IndexCorrelationRuleRequest - * @param correlationRuleId correlation rule id - * @param correlationRule correlation rule - * @param method Rest method of request PUT or POST - */ - public IndexCorrelationRuleRequest(String correlationRuleId, CorrelationRule correlationRule, RestRequest.Method method) { - super(); - this.correlationRuleId = correlationRuleId; - this.correlationRule = correlationRule; - this.method = method; - } - - /** - * StreamInput ctor of IndexCorrelationRuleRequest - * @param sin StreamInput - * @throws IOException IOException - */ - public IndexCorrelationRuleRequest(StreamInput sin) throws IOException { - this(sin.readString(), CorrelationRule.readFrom(sin), sin.readEnum(RestRequest.Method.class)); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(correlationRuleId); - correlationRule.writeTo(out); - } - - /** - * get correlation rule id - * @return correlation rule id - */ - public String getCorrelationRuleId() { - return correlationRuleId; - } - - /** - * get correlation rule - * @return correlation rule - */ - public CorrelationRule getCorrelationRule() { - return correlationRule; - } - - /** - * get Rest method - * @return Rest method - */ - public RestRequest.Method getMethod() { - return method; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java deleted file mode 100644 index 8102e6585825e..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/IndexCorrelationRuleResponse.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.action; - -import org.opensearch.core.ParseField; -import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; - -import java.io.IOException; - -/** - * Transport Response for indexing correlation rules. - * - * @opensearch.internal - */ -public class IndexCorrelationRuleResponse extends ActionResponse implements ToXContentObject { - - private static final ParseField _ID = new ParseField("_id"); - private static final ParseField _VERSION = new ParseField("_version"); - - private String id; - - private Long version; - - private RestStatus status; - - private CorrelationRule correlationRule; - - /** - * Parameterized ctor for IndexCorrelationRuleResponse - * @param version version of rule - * @param status Rest status of indexing rule - * @param correlationRule correlation rule - */ - public IndexCorrelationRuleResponse(String id, Long version, RestStatus status, CorrelationRule correlationRule) { - super(); - this.id = id; - this.version = version; - this.status = status; - this.correlationRule = correlationRule; - } - - /** - * StreamInput ctor of IndexCorrelationRuleResponse - * @param sin StreamInput - * @throws IOException IOException - */ - public IndexCorrelationRuleResponse(StreamInput sin) throws IOException { - this(sin.readString(), sin.readLong(), sin.readEnum(RestStatus.class), CorrelationRule.readFrom(sin)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().field(_ID.getPreferredName(), id).field(_VERSION.getPreferredName(), version); - - builder.field("rule", correlationRule); - return builder.endObject(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeLong(version); - out.writeEnum(status); - correlationRule.writeTo(out); - } - - /** - * get id - * @return id of rule - */ - public String getId() { - return id; - } - - /** - * get status - * @return Rest status of indexing rule - */ - public RestStatus getStatus() { - return status; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java deleted file mode 100644 index c01f2936a20ca..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/action/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Transport Actions, Requests and Responses for correlation rules - */ -package org.opensearch.plugin.correlation.rules.action; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java deleted file mode 100644 index 3797e0c7043dc..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationQuery.java +++ /dev/null @@ -1,197 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.model; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -/** - * Correlation Query DSL - * { - * "index": "s3_access_logs", - * "query": "aws.cloudtrail.eventName:ReplicateObject", - * "timestampField": "@timestamp", - * "tags": [ - * "s3" - * ] - * } - */ -public class CorrelationQuery implements Writeable, ToXContentObject { - - private static final Logger log = LogManager.getLogger(CorrelationQuery.class); - private static final ParseField INDEX_FIELD = new ParseField("index"); - private static final ParseField QUERY_FIELD = new ParseField("query"); - private static final ParseField TIMESTAMP_FIELD = new ParseField("timestampField"); - private static final ParseField TAGS_FIELD = new ParseField("tags"); - private static final ObjectParser PARSER = new ObjectParser<>("CorrelationQuery", CorrelationQuery::new); - - static { - PARSER.declareString(CorrelationQuery::setIndex, INDEX_FIELD); - PARSER.declareString(CorrelationQuery::setQuery, QUERY_FIELD); - PARSER.declareStringOrNull(CorrelationQuery::setTimestampField, TIMESTAMP_FIELD); - PARSER.declareField((xcp, query, context) -> { - List tags = new ArrayList<>(); - XContentParser.Token currentToken = xcp.currentToken(); - if (currentToken == XContentParser.Token.START_ARRAY) { - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - tags.add(xcp.text()); - } - } - query.setTags(tags); - }, TAGS_FIELD, ObjectParser.ValueType.STRING_ARRAY); - } - - private String index; - - private String query; - - private String timestampField; - - private List tags; - - private CorrelationQuery() { - this.timestampField = "_timestamp"; - } - - /** - * Parameterized ctor of Correlation Query - * @param index event index to correlate - * @param query query to filter relevant events for correlations from index - * @param timestampField timestamp field in the index - * @param tags tags to store additional metadata as part of correlation queries. - */ - public CorrelationQuery(String index, String query, String timestampField, List tags) { - this.index = index; - this.query = query; - this.timestampField = timestampField != null ? timestampField : "_timestamp"; - this.tags = tags; - } - - /** - * StreamInput ctor of Correlation Query - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationQuery(StreamInput sin) throws IOException { - this(sin.readString(), sin.readString(), sin.readString(), sin.readStringList()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); - out.writeString(query); - out.writeString(timestampField); - out.writeStringCollection(tags); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX_FIELD.getPreferredName(), index) - .field(QUERY_FIELD.getPreferredName(), query) - .field(TIMESTAMP_FIELD.getPreferredName(), timestampField) - .field(TAGS_FIELD.getPreferredName(), tags); - return builder.endObject(); - } - - /** - * parse into CorrelationQuery - * @param xcp XContentParser - * @return CorrelationQuery - */ - public static CorrelationQuery parse(XContentParser xcp) { - return PARSER.apply(xcp, null); - } - - /** - * convert StreamInput to CorrelationQuery - * @param sin StreamInput - * @return CorrelationQuery - * @throws IOException IOException - */ - public static CorrelationQuery readFrom(StreamInput sin) throws IOException { - return new CorrelationQuery(sin); - } - - /** - * Set index - * @param index event index to correlate - */ - public void setIndex(String index) { - this.index = index; - } - - /** - * Get index - * @return event index to correlate - */ - public String getIndex() { - return index; - } - - /** - * Set query - * @param query query to filter relevant events for correlations from index - */ - public void setQuery(String query) { - this.query = query; - } - - /** - * Get query - * @return query to filter relevant events for correlations from index - */ - public String getQuery() { - return query; - } - - /** - * Set timestamp field - * @param timestampField timestamp field in the index - */ - public void setTimestampField(String timestampField) { - this.timestampField = timestampField != null ? timestampField : "_timestamp"; - } - - /** - * Get timestamp field - * @return timestamp field in the index - */ - public String getTimestampField() { - return timestampField; - } - - /** - * Set tags - * @param tags tags to store additional metadata as part of correlation queries. - */ - public void setTags(List tags) { - this.tags = tags; - } - - /** - * Get tags - * @return tags to store additional metadata as part of correlation queries. - */ - public List getTags() { - return tags; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java deleted file mode 100644 index 6978d7248e199..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/CorrelationRule.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.model; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.core.ParseField; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.core.xcontent.ObjectParser; -import org.opensearch.core.xcontent.ToXContentObject; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -/** - * Correlation Rule DSL - * { - * "name": "s3 to app logs", - * "correlate": [ - * { - * "index": "s3_access_logs", - * "query": "aws.cloudtrail.eventName:ReplicateObject", - * "timestampField": "@timestamp", - * "tags": [ - * "s3" - * ] - * } - * ] - * } - * - * @opensearch.api - * @opensearch.experimental - */ -public class CorrelationRule implements Writeable, ToXContentObject { - - private static final Logger log = LogManager.getLogger(CorrelationRule.class); - - /** - * Correlation Rule Index - */ - public static final String CORRELATION_RULE_INDEX = ".opensearch-correlation-rules-config"; - - private static final ParseField ID_FIELD = new ParseField("id"); - private static final ParseField VERSION_FIELD = new ParseField("version"); - private static final ParseField NAME_FIELD = new ParseField("name"); - private static final ParseField CORRELATION_QUERIES_FIELD = new ParseField("correlate"); - private static final ObjectParser PARSER = new ObjectParser<>("CorrelationRule", CorrelationRule::new); - - static { - PARSER.declareString(CorrelationRule::setId, ID_FIELD); - PARSER.declareLong(CorrelationRule::setVersion, VERSION_FIELD); - PARSER.declareString(CorrelationRule::setName, NAME_FIELD); - PARSER.declareField((xcp, rule, context) -> { - List correlationQueries = new ArrayList<>(); - XContentParser.Token currentToken = xcp.currentToken(); - if (currentToken == XContentParser.Token.START_ARRAY) { - while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { - correlationQueries.add(CorrelationQuery.parse(xcp)); - } - } - rule.setCorrelationQueries(correlationQueries); - }, CORRELATION_QUERIES_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); - } - - private String id; - - private Long version; - - private String name; - - private List correlationQueries; - - private CorrelationRule() {} - - /** - * Parameterized ctor of Correlation Rule - * @param name name of rule - * @param correlationQueries list of correlation queries part of rule - */ - public CorrelationRule(String name, List correlationQueries) { - this("", 1L, name, correlationQueries); - } - - /** - * Parameterized ctor of Correlation Rule - * @param id id of rule - * @param version version of rule - * @param name name of rule - * @param correlationQueries list of correlation queries part of rule - */ - public CorrelationRule(String id, Long version, String name, List correlationQueries) { - this.id = id; - this.version = version; - this.name = name; - this.correlationQueries = correlationQueries; - } - - /** - * StreamInput ctor of Correlation Rule - * @param sin StreamInput - * @throws IOException IOException - */ - public CorrelationRule(StreamInput sin) throws IOException { - this(sin.readString(), sin.readLong(), sin.readString(), sin.readList(CorrelationQuery::readFrom)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(ID_FIELD.getPreferredName(), id); - builder.field(VERSION_FIELD.getPreferredName(), version); - builder.field(NAME_FIELD.getPreferredName(), name); - - CorrelationQuery[] correlationQueries = new CorrelationQuery[] {}; - correlationQueries = this.correlationQueries.toArray(correlationQueries); - builder.field(CORRELATION_QUERIES_FIELD.getPreferredName(), correlationQueries); - return builder.endObject(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeLong(version); - out.writeString(name); - - for (CorrelationQuery query : correlationQueries) { - query.writeTo(out); - } - } - - /** - * parse into CorrelationRule - * @param xcp XContentParser - * @param id id of rule - * @param version version of rule - * @return CorrelationRule - */ - public static CorrelationRule parse(XContentParser xcp, String id, Long version) { - return PARSER.apply(xcp, null); - } - - /** - * convert StreamInput to CorrelationRule - * @param sin StreamInput - * @return CorrelationRule - * @throws IOException IOException - */ - public static CorrelationRule readFrom(StreamInput sin) throws IOException { - return new CorrelationRule(sin); - } - - /** - * set id - * @param id id of rule - */ - public void setId(String id) { - this.id = id; - } - - /** - * get id - * @return id of rule - */ - public String getId() { - return id; - } - - /** - * set version - * @param version version of rule - */ - public void setVersion(Long version) { - this.version = version; - } - - /** - * get version - * @return version of rule - */ - public Long getVersion() { - return version; - } - - /** - * set name - * @param name name of rule - */ - public void setName(String name) { - this.name = name; - } - - /** - * get name - * @return name of rule - */ - public String getName() { - return name; - } - - /** - * set correlation queries - * @param correlationQueries set correlation queries for the rule - */ - public void setCorrelationQueries(List correlationQueries) { - this.correlationQueries = correlationQueries; - } - - /** - * get correlation queries - * @return correlation queries for the rule - */ - public List getCorrelationQueries() { - return correlationQueries; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CorrelationRule that = (CorrelationRule) o; - return id.equals(that.id) - && version.equals(that.version) - && name.equals(that.name) - && correlationQueries.equals(that.correlationQueries); - } - - @Override - public int hashCode() { - return Objects.hash(id, version, name, correlationQueries); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java deleted file mode 100644 index b04b7be3c62e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/model/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * data models for correlation rules - */ -package org.opensearch.plugin.correlation.rules.model; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java deleted file mode 100644 index 3b2b7eb02ae5f..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/RestIndexCorrelationRuleAction.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.resthandler; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.client.node.NodeClient; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.plugin.correlation.EventsCorrelationPlugin; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.rest.BytesRestResponse; -import org.opensearch.rest.RestChannel; -import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestResponse; -import org.opensearch.rest.action.RestResponseListener; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; - -/** - * Rest action for indexing correlation rules. - * - * @opensearch.api - */ -public class RestIndexCorrelationRuleAction extends BaseRestHandler { - - private static final Logger log = LogManager.getLogger(RestIndexCorrelationRuleAction.class); - - /** - * Default constructor - */ - public RestIndexCorrelationRuleAction() {} - - @Override - public String getName() { - return "index_correlation_rule_action"; - } - - @Override - public List routes() { - return List.of( - new Route(RestRequest.Method.POST, EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI), - new Route( - RestRequest.Method.PUT, - String.format(Locale.ROOT, "%s/{%s}", EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI, "rule_id") - ) - ); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - log.debug(String.format(Locale.ROOT, "%s %s", request.method(), EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI)); - - String id = request.param("rule_id", ""); - - XContentParser xcp = request.contentParser(); - - CorrelationRule correlationRule = CorrelationRule.parse(xcp, id, 1L); - IndexCorrelationRuleRequest indexCorrelationRuleRequest = new IndexCorrelationRuleRequest(id, correlationRule, request.method()); - return channel -> client.execute( - IndexCorrelationRuleAction.INSTANCE, - indexCorrelationRuleRequest, - indexCorrelationRuleResponse(channel, request.method()) - ); - } - - private RestResponseListener indexCorrelationRuleResponse( - RestChannel channel, - RestRequest.Method restMethod - ) { - return new RestResponseListener<>(channel) { - @Override - public RestResponse buildResponse(IndexCorrelationRuleResponse response) throws Exception { - RestStatus returnStatus = RestStatus.CREATED; - if (restMethod == RestRequest.Method.PUT) { - returnStatus = RestStatus.OK; - } - - BytesRestResponse restResponse = new BytesRestResponse( - returnStatus, - response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS) - ); - - if (restMethod == RestRequest.Method.POST) { - String location = String.format( - Locale.ROOT, - "%s/%s", - EventsCorrelationPlugin.CORRELATION_RULES_BASE_URI, - response.getId() - ); - restResponse.addHeader("Location", location); - } - - return restResponse; - } - }; - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java deleted file mode 100644 index 607ec355801ad..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/resthandler/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Rest Handlers for correlation rules - */ -package org.opensearch.plugin.correlation.rules.resthandler; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java deleted file mode 100644 index 7b4fb670c4aee..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/TransportIndexCorrelationRuleAction.java +++ /dev/null @@ -1,234 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.rules.transport; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchStatusException; -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.index.IndexResponse; -import org.opensearch.action.support.ActionFilters; -import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.action.support.WriteRequest; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.inject.Inject; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.rest.RestStatus; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleAction; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleRequest; -import org.opensearch.plugin.correlation.rules.action.IndexCorrelationRuleResponse; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; -import org.opensearch.plugin.correlation.utils.CorrelationRuleIndices; -import org.opensearch.plugin.correlation.utils.IndexUtils; -import org.opensearch.rest.RestRequest; -import org.opensearch.tasks.Task; -import org.opensearch.transport.TransportService; - -import java.io.IOException; -import java.util.Locale; - -/** - * Transport Action for indexing correlation rules. - * - * @opensearch.internal - */ -public class TransportIndexCorrelationRuleAction extends HandledTransportAction { - - private static final Logger log = LogManager.getLogger(TransportIndexCorrelationRuleAction.class); - - private final Client client; - - private final CorrelationRuleIndices correlationRuleIndices; - - private final ClusterService clusterService; - - /** - * Parameterized ctor for Transport Action - * @param transportService TransportService - * @param client OS client - * @param actionFilters ActionFilters - * @param clusterService ClusterService - * @param correlationRuleIndices CorrelationRuleIndices which manages lifecycle of correlation rule index - */ - @Inject - public TransportIndexCorrelationRuleAction( - TransportService transportService, - Client client, - ActionFilters actionFilters, - ClusterService clusterService, - CorrelationRuleIndices correlationRuleIndices - ) { - super(IndexCorrelationRuleAction.NAME, transportService, actionFilters, IndexCorrelationRuleRequest::new); - this.client = client; - this.clusterService = clusterService; - this.correlationRuleIndices = correlationRuleIndices; - } - - @Override - protected void doExecute(Task task, IndexCorrelationRuleRequest request, ActionListener listener) { - AsyncIndexCorrelationRuleAction asyncAction = new AsyncIndexCorrelationRuleAction(request, listener); - asyncAction.start(); - } - - private class AsyncIndexCorrelationRuleAction { - private final IndexCorrelationRuleRequest request; - - private final ActionListener listener; - - AsyncIndexCorrelationRuleAction(IndexCorrelationRuleRequest request, ActionListener listener) { - this.request = request; - this.listener = listener; - } - - void start() { - try { - if (correlationRuleIndices.correlationRuleIndexExists() == false) { - try { - correlationRuleIndices.initCorrelationRuleIndex(new ActionListener<>() { - @Override - public void onResponse(CreateIndexResponse response) { - try { - onCreateMappingsResponse(response); - indexCorrelationRule(); - } catch (IOException e) { - onFailures(e); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - }); - } catch (IOException e) { - onFailures(e); - } - } else if (!IndexUtils.correlationRuleIndexUpdated) { - IndexUtils.updateIndexMapping( - CorrelationRule.CORRELATION_RULE_INDEX, - CorrelationRuleIndices.correlationRuleIndexMappings(), - clusterService.state(), - client.admin().indices(), - new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse response) { - onUpdateMappingsResponse(response); - try { - indexCorrelationRule(); - } catch (IOException e) { - onFailures(e); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - } - ); - } else { - indexCorrelationRule(); - } - } catch (IOException ex) { - onFailures(ex); - } - } - - void indexCorrelationRule() throws IOException { - IndexRequest indexRequest; - if (request.getMethod() == RestRequest.Method.POST) { - indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .timeout(TimeValue.timeValueSeconds(60)); - } else { - indexRequest = new IndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).setRefreshPolicy( - WriteRequest.RefreshPolicy.IMMEDIATE - ) - .source(request.getCorrelationRule().toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .id(request.getCorrelationRuleId()) - .timeout(TimeValue.timeValueSeconds(60)); - } - - client.index(indexRequest, new ActionListener<>() { - @Override - public void onResponse(IndexResponse response) { - if (response.status().equals(RestStatus.CREATED) || response.status().equals(RestStatus.OK)) { - CorrelationRule ruleResponse = request.getCorrelationRule(); - ruleResponse.setId(response.getId()); - onOperation(ruleResponse); - } else { - onFailures(new OpenSearchStatusException(response.toString(), RestStatus.INTERNAL_SERVER_ERROR)); - } - } - - @Override - public void onFailure(Exception e) { - onFailures(e); - } - }); - } - - private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { - if (response.isAcknowledged()) { - log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); - IndexUtils.correlationRuleIndexUpdated(); - } else { - log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); - throw new OpenSearchStatusException( - String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), - RestStatus.INTERNAL_SERVER_ERROR - ); - } - } - - private void onUpdateMappingsResponse(AcknowledgedResponse response) { - if (response.isAcknowledged()) { - log.info(String.format(Locale.ROOT, "Created %s with mappings.", CorrelationRule.CORRELATION_RULE_INDEX)); - IndexUtils.correlationRuleIndexUpdated(); - } else { - log.error(String.format(Locale.ROOT, "Create %s mappings call not acknowledged.", CorrelationRule.CORRELATION_RULE_INDEX)); - throw new OpenSearchStatusException( - String.format(Locale.getDefault(), "Create %s mappings call not acknowledged", CorrelationRule.CORRELATION_RULE_INDEX), - RestStatus.INTERNAL_SERVER_ERROR - ); - } - } - - private void onOperation(CorrelationRule correlationRule) { - finishHim(correlationRule, null); - } - - private void onFailures(Exception t) { - finishHim(null, t); - } - - private void finishHim(CorrelationRule correlationRule, Exception t) { - if (t != null) { - listener.onFailure(t); - } else { - listener.onResponse( - new IndexCorrelationRuleResponse( - correlationRule.getId(), - correlationRule.getVersion(), - request.getMethod() == RestRequest.Method.POST ? RestStatus.CREATED : RestStatus.OK, - correlationRule - ) - ); - } - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java deleted file mode 100644 index 7a47efbb9bb45..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/rules/transport/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Transport Actions for correlation rules. - */ -package org.opensearch.plugin.correlation.rules.transport; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java deleted file mode 100644 index 2e2dbbffbeaa2..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettings.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.settings; - -import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.TimeValue; - -import java.util.concurrent.TimeUnit; - -import static org.opensearch.common.settings.Setting.Property.IndexScope; - -/** - * Settings for events-correlation-engine. - * - * @opensearch.api - * @opensearch.experimental - */ -public class EventsCorrelationSettings { - /** - * Correlation Index setting name - */ - public static final String CORRELATION_INDEX = "index.correlation"; - /** - * Boolean setting to check if an OS index is a correlation index. - */ - public static final Setting IS_CORRELATION_INDEX_SETTING = Setting.boolSetting(CORRELATION_INDEX, false, IndexScope); - /** - * Global time window setting for Correlations - */ - public static final Setting CORRELATION_TIME_WINDOW = Setting.positiveTimeSetting( - "plugins.security_analytics.correlation_time_window", - new TimeValue(5, TimeUnit.MINUTES), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - /** - * Default constructor - */ - public EventsCorrelationSettings() {} -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java deleted file mode 100644 index 795291cd0de2e..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/settings/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * Settings for events-correlation-engine - */ -package org.opensearch.plugin.correlation.settings; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java deleted file mode 100644 index 3656bd413733a..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/CorrelationRuleIndices.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.utils; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.Settings; -import org.opensearch.core.action.ActionListener; -import org.opensearch.plugin.correlation.rules.model.CorrelationRule; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Objects; - -/** - * Correlation Rule Index manager - * - * @opensearch.internal - */ -public class CorrelationRuleIndices { - private static final Logger log = LogManager.getLogger(CorrelationRuleIndices.class); - - private final Client client; - - private final ClusterService clusterService; - - /** - * Parameterized ctor for CorrelationRuleIndices - * @param client OS Client - * @param clusterService ClusterService - */ - public CorrelationRuleIndices(Client client, ClusterService clusterService) { - this.client = client; - this.clusterService = clusterService; - } - - /** - * get correlation rule index mappings - * @return mappings of correlation rule index - * @throws IOException IOException - */ - public static String correlationRuleIndexMappings() throws IOException { - return new String( - Objects.requireNonNull(CorrelationRuleIndices.class.getClassLoader().getResourceAsStream("mappings/correlation-rules.json")) - .readAllBytes(), - Charset.defaultCharset() - ); - } - - /** - * init the correlation rule index - * @param actionListener listener - * @throws IOException IOException - */ - public void initCorrelationRuleIndex(ActionListener actionListener) throws IOException { - if (correlationRuleIndexExists() == false) { - CreateIndexRequest indexRequest = new CreateIndexRequest(CorrelationRule.CORRELATION_RULE_INDEX).mapping( - correlationRuleIndexMappings() - ).settings(Settings.builder().put("index.hidden", true).build()); - client.admin().indices().create(indexRequest, actionListener); - } - } - - /** - * check if correlation rule index exists - * @return boolean - */ - public boolean correlationRuleIndexExists() { - ClusterState clusterState = clusterService.state(); - return clusterState.getRoutingTable().hasIndex(CorrelationRule.CORRELATION_RULE_INDEX); - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java deleted file mode 100644 index 362be3d2932e3..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/IndexUtils.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.utils; - -import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.client.IndicesAdminClient; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; - -import static org.opensearch.core.ParseField.CommonFields._META; - -/** - * Index Management utils - * - * @opensearch.internal - */ -public class IndexUtils { - private static final Integer NO_SCHEMA_VERSION = 0; - private static final String SCHEMA_VERSION = "schema_version"; - - /** - * manages the mappings lifecycle for correlation rule index - */ - public static Boolean correlationRuleIndexUpdated = false; - - private IndexUtils() {} - - /** - * updates the status of correlationRuleIndexUpdated to true - */ - public static void correlationRuleIndexUpdated() { - correlationRuleIndexUpdated = true; - } - - /** - * util method which decides based on schema version whether to update an index. - * @param index IndexMetadata - * @param mapping new mappings - * @return Boolean - * @throws IOException IOException - */ - public static Boolean shouldUpdateIndex(IndexMetadata index, String mapping) throws IOException { - Integer oldVersion = NO_SCHEMA_VERSION; - Integer newVersion = getSchemaVersion(mapping); - - Map indexMapping = index.mapping().sourceAsMap(); - if (indexMapping != null - && indexMapping.containsKey(_META.getPreferredName()) - && indexMapping.get(_META.getPreferredName()) instanceof HashMap) { - Map metaData = (HashMap) indexMapping.get(_META.getPreferredName()); - if (metaData.containsKey(SCHEMA_VERSION)) { - oldVersion = (Integer) metaData.get(SCHEMA_VERSION); - } - } - return newVersion > oldVersion; - } - - /** - * Gets the schema version for the mapping - * @param mapping mappings as input - * @return schema version - * @throws IOException IOException - */ - public static Integer getSchemaVersion(String mapping) throws IOException { - XContentParser xcp = MediaTypeRegistry.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, mapping); - - while (!xcp.isClosed()) { - XContentParser.Token token = xcp.currentToken(); - if (token != null && token != XContentParser.Token.END_OBJECT && token != XContentParser.Token.START_OBJECT) { - if (!Objects.equals(xcp.currentName(), _META.getPreferredName())) { - xcp.nextToken(); - xcp.skipChildren(); - } else { - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - switch (xcp.currentName()) { - case SCHEMA_VERSION: - int version = xcp.intValue(); - if (version < 0) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "%s cannot be negative", SCHEMA_VERSION) - ); - } - return version; - default: - xcp.nextToken(); - } - } - } - } - xcp.nextToken(); - } - return NO_SCHEMA_VERSION; - } - - /** - * updates the mappings for the index. - * @param index index for which mapping needs to be updated - * @param mapping new mappings - * @param clusterState ClusterState - * @param client Admin client - * @param actionListener listener - * @throws IOException IOException - */ - public static void updateIndexMapping( - String index, - String mapping, - ClusterState clusterState, - IndicesAdminClient client, - ActionListener actionListener - ) throws IOException { - if (clusterState.metadata().indices().containsKey(index)) { - if (shouldUpdateIndex(clusterState.metadata().index(index), mapping)) { - PutMappingRequest putMappingRequest = new PutMappingRequest(index).source(mapping, MediaTypeRegistry.JSON); - client.putMapping(putMappingRequest, actionListener); - } else { - actionListener.onResponse(new AcknowledgedResponse(true)); - } - } - } -} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java deleted file mode 100644 index 798196c47df20..0000000000000 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/utils/package-info.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/** - * utils package for events-correlation-engine - */ -package org.opensearch.plugin.correlation.utils; diff --git a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec deleted file mode 100644 index 013c17e4a9736..0000000000000 --- a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ /dev/null @@ -1 +0,0 @@ -org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec diff --git a/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json b/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json deleted file mode 100644 index 7741b160eca24..0000000000000 --- a/plugins/events-correlation-engine/src/main/resources/mappings/correlation-rules.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_meta" : { - "schema_version": 1 - }, - "properties": { - "name": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "correlate": { - "type": "nested", - "properties": { - "index": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "query": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "tags": { - "type": "text", - "fields" : { - "keyword" : { - "type" : "keyword" - } - } - }, - "timestampField": { - "type": "text", - "analyzer" : "whitespace", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - } - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java deleted file mode 100644 index 005ffa2097b03..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/EventsCorrelationPluginTests.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation; - -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -public class EventsCorrelationPluginTests extends OpenSearchTestCase { - - public void testDummy() { - Assert.assertEquals(1, 1); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java deleted file mode 100644 index 19ce3b33514d8..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/CorrelationParamsContextTests.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.plugin.correlation.core.index.CorrelationParamsContext.PARAMETERS; -import static org.opensearch.plugin.correlation.core.index.CorrelationParamsContext.VECTOR_SIMILARITY_FUNCTION; - -/** - * Unit tests for CorrelationsParamsContext - */ -public class CorrelationParamsContextTests extends OpenSearchTestCase { - - /** - * Test reading from and writing to streams - */ - public void testStreams() throws IOException { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - - BytesStreamOutput streamOutput = new BytesStreamOutput(); - context.writeTo(streamOutput); - - CorrelationParamsContext copy = new CorrelationParamsContext(streamOutput.bytes().streamInput()); - Assert.assertEquals(context.getSimilarityFunction(), copy.getSimilarityFunction()); - Assert.assertEquals(context.getParameters(), copy.getParameters()); - } - - /** - * test get vector similarity function - */ - public void testVectorSimilarityFunction() { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, context.getSimilarityFunction()); - } - - /** - * test get parameters - */ - public void testParameters() { - int efConstruction = 321; - int m = 12; - - Map parameters = new HashMap<>(); - parameters.put("m", m); - parameters.put("ef_construction", efConstruction); - - CorrelationParamsContext context = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, parameters); - Assert.assertEquals(parameters, context.getParameters()); - } - - /** - * test parse method with invalid input - * @throws IOException IOException - */ - public void testParse_Invalid() throws IOException { - // Invalid input type - Integer invalidIn = 12; - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(invalidIn)); - - // Invalid vector similarity function - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(CorrelationParamsContext.VECTOR_SIMILARITY_FUNCTION, 0) - .endObject(); - - final Map in2 = xContentBuilderToMap(xContentBuilder); - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(in2)); - - // Invalid parameters - xContentBuilder = XContentFactory.jsonBuilder().startObject().field(PARAMETERS, 0).endObject(); - - final Map in4 = xContentBuilderToMap(xContentBuilder); - expectThrows(MapperParsingException.class, () -> CorrelationParamsContext.parse(in4)); - } - - /** - * test parse with null parameters - * @throws IOException IOException - */ - public void testParse_NullParameters() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .field(PARAMETERS, (String) null) - .endObject(); - Map in = xContentBuilderToMap(xContentBuilder); - Assert.assertThrows(MapperParsingException.class, () -> { CorrelationParamsContext.parse(in); }); - } - - /** - * test parse method - * @throws IOException IOException - */ - public void testParse_Valid() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .startObject(PARAMETERS) - .field("m", 2) - .field("ef_construction", 128) - .endObject() - .endObject(); - - Map in = xContentBuilderToMap(xContentBuilder); - CorrelationParamsContext context = CorrelationParamsContext.parse(in); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, context.getSimilarityFunction()); - Assert.assertEquals(Map.of("m", 2, "ef_construction", 128), context.getParameters()); - } - - /** - * test toXContent method - * @throws IOException IOException - */ - public void testToXContent() throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(VECTOR_SIMILARITY_FUNCTION, VectorSimilarityFunction.EUCLIDEAN) - .startObject(PARAMETERS) - .field("m", 2) - .field("ef_construction", 128) - .endObject() - .endObject(); - - Map in = xContentBuilderToMap(xContentBuilder); - CorrelationParamsContext context = CorrelationParamsContext.parse(in); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder = context.toXContent(builder, ToXContent.EMPTY_PARAMS); - - Map out = xContentBuilderToMap(builder); - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN.name(), out.get(VECTOR_SIMILARITY_FUNCTION)); - } - - private Map xContentBuilderToMap(XContentBuilder xContentBuilder) { - return XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java deleted file mode 100644 index 32c71dcd37196..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/VectorFieldTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index; - -import org.apache.lucene.document.FieldType; -import org.opensearch.ExceptionsHelper; -import org.opensearch.OpenSearchException; -import org.opensearch.common.Randomness; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.io.stream.BytesStreamInput; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Random; - -/** - * Unit tests for VectorField - */ -public class VectorFieldTests extends OpenSearchTestCase { - - private final Random random = Randomness.get(); - - /** - * test VectorField ctor - */ - public void testVectorField_ctor() { - VectorField field = new VectorField("test-field", new float[] { 1.0f, 1.0f }, new FieldType()); - Assert.assertEquals("test-field", field.name()); - } - - /** - * test float vector to array serializer - * @throws IOException IOException - */ - public void testVectorAsArraySerializer() throws IOException { - final float[] vector = getArrayOfRandomFloats(20); - - final BytesStreamOutput objectStream = new BytesStreamOutput(); - objectStream.writeFloatArray(vector); - final byte[] serializedVector = objectStream.bytes().toBytesRef().bytes; - - final byte[] actualSerializedVector = VectorField.floatToByteArray(vector); - - Assert.assertNotNull(actualSerializedVector); - Assert.assertArrayEquals(serializedVector, actualSerializedVector); - - final float[] actualDeserializedVector = byteToFloatArray(actualSerializedVector); - Assert.assertNotNull(actualDeserializedVector); - Assert.assertArrayEquals(vector, actualDeserializedVector, 0.1f); - } - - /** - * test byte array to float vector failures - */ - public void testByteToFloatArrayFailures() { - final byte[] serializedVector = "test-dummy".getBytes(StandardCharsets.UTF_8); - expectThrows(OpenSearchException.class, () -> { byteToFloatArray(serializedVector); }); - } - - private float[] getArrayOfRandomFloats(int length) { - float[] vector = new float[length]; - for (int i = 0; i < 20; ++i) { - vector[i] = random.nextFloat(); - } - return vector; - } - - private static float[] byteToFloatArray(byte[] byteStream) { - try (BytesStreamInput objectStream = new BytesStreamInput(byteStream)) { - return objectStream.readFloatArray(); - } catch (IOException ex) { - throw ExceptionsHelper.convertToOpenSearchException(ex); - } - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java deleted file mode 100644 index 7223b450a136c..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.codec.correlation990; - -import org.apache.lucene.codecs.Codec; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.SerialMergeScheduler; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.store.Directory; -import org.apache.lucene.tests.index.RandomIndexWriter; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugin.correlation.core.index.query.CorrelationQueryFactory; -import org.opensearch.test.OpenSearchTestCase; - -import java.util.Map; -import java.util.Optional; -import java.util.function.Function; - -import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_EF_CONSTRUCTION; -import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_M; -import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_9_0; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -/** - * Unit tests for custom correlation codec - */ -public class CorrelationCodecTests extends OpenSearchTestCase { - - private static final String FIELD_NAME_ONE = "test_vector_one"; - private static final String FIELD_NAME_TWO = "test_vector_two"; - - /** - * test correlation vector index - * @throws Exception Exception - */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8329") - public void testCorrelationVectorIndex() throws Exception { - Function perFieldCorrelationVectorsProvider = - mapperService -> new PerFieldCorrelationVectorsFormat(Optional.of(mapperService)); - Function correlationCodecProvider = (correlationVectorsFormat -> new CorrelationCodec( - V_9_9_0.getDefaultCodecDelegate(), - correlationVectorsFormat - )); - testCorrelationVectorIndex(correlationCodecProvider, perFieldCorrelationVectorsProvider); - } - - private void testCorrelationVectorIndex( - final Function codecProvider, - final Function perFieldCorrelationVectorsProvider - ) throws Exception { - final MapperService mapperService = mock(MapperService.class); - final CorrelationParamsContext correlationParamsContext = new CorrelationParamsContext( - VectorSimilarityFunction.EUCLIDEAN, - Map.of(METHOD_PARAMETER_M, 16, METHOD_PARAMETER_EF_CONSTRUCTION, 256) - ); - - final VectorFieldMapper.CorrelationVectorFieldType mappedFieldType1 = new VectorFieldMapper.CorrelationVectorFieldType( - FIELD_NAME_ONE, - Map.of(), - 3, - correlationParamsContext - ); - final VectorFieldMapper.CorrelationVectorFieldType mappedFieldType2 = new VectorFieldMapper.CorrelationVectorFieldType( - FIELD_NAME_TWO, - Map.of(), - 2, - correlationParamsContext - ); - when(mapperService.fieldType(eq(FIELD_NAME_ONE))).thenReturn(mappedFieldType1); - when(mapperService.fieldType(eq(FIELD_NAME_TWO))).thenReturn(mappedFieldType2); - - var perFieldCorrelationVectorsFormatSpy = spy(perFieldCorrelationVectorsProvider.apply(mapperService)); - final Codec codec = codecProvider.apply(perFieldCorrelationVectorsFormatSpy); - - Directory dir = newFSDirectory(createTempDir()); - IndexWriterConfig iwc = newIndexWriterConfig(); - iwc.setMergeScheduler(new SerialMergeScheduler()); - iwc.setCodec(codec); - - final FieldType luceneFieldType = KnnFloatVectorField.createFieldType(3, VectorSimilarityFunction.EUCLIDEAN); - float[] array = { 1.0f, 3.0f, 4.0f }; - KnnFloatVectorField vectorField = new KnnFloatVectorField(FIELD_NAME_ONE, array, luceneFieldType); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); - Document doc = new Document(); - doc.add(vectorField); - writer.addDocument(doc); - writer.commit(); - IndexReader reader = writer.getReader(); - writer.close(); - - verify(perFieldCorrelationVectorsFormatSpy).getKnnVectorsFormatForField(eq(FIELD_NAME_ONE)); - - IndexSearcher searcher = new IndexSearcher(reader); - Query query = CorrelationQueryFactory.create( - new CorrelationQueryFactory.CreateQueryRequest("dummy", FIELD_NAME_ONE, new float[] { 1.0f, 0.0f, 0.0f }, 1, null, null) - ); - - assertEquals(1, searcher.count(query)); - - reader.close(); - dir.close(); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java deleted file mode 100644 index 674f35069a742..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/mapper/CorrelationVectorFieldMapperTests.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.mapper; - -import org.apache.lucene.document.KnnFloatVectorField; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.search.FieldExistsQuery; -import org.opensearch.Version; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Explicit; -import org.opensearch.common.settings.IndexScopedSettings; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.ContentPath; -import org.opensearch.index.mapper.FieldMapper; -import org.opensearch.index.mapper.Mapper; -import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.ParseContext; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.QueryShardException; -import org.opensearch.plugin.correlation.core.index.CorrelationParamsContext; -import org.opensearch.search.lookup.SearchLookup; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import org.mockito.Mockito; - -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Unit tests for correlation vector field mapper - */ -public class CorrelationVectorFieldMapperTests extends OpenSearchTestCase { - - private static final String CORRELATION_VECTOR_TYPE = "correlation_vector"; - private static final String DIMENSION_FIELD_NAME = "dimension"; - private static final String TYPE_FIELD_NAME = "type"; - - /** - * test builder construction from parse of correlation params context - * @throws IOException IOException - */ - public void testBuilder_parse_fromCorrelationParamsContext() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - int dimension = 10; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, dimension) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - VectorFieldMapper.Builder builder = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ); - Mapper.BuilderContext builderContext = new Mapper.BuilderContext(settings, new ContentPath()); - builder.build(builderContext); - - Assert.assertEquals(VectorSimilarityFunction.EUCLIDEAN, builder.correlationParamsContext.getValue().getSimilarityFunction()); - Assert.assertEquals(321, builder.correlationParamsContext.getValue().getParameters().get("ef_construction")); - - XContentBuilder xContentBuilderEmptyParams = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, dimension) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .endObject() - .endObject(); - - VectorFieldMapper.Builder builderEmptyParams = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilderEmptyParams), true, xContentBuilderEmptyParams.contentType()) - .v2(), - buildParserContext(indexName, settings) - ); - - Assert.assertEquals( - VectorSimilarityFunction.EUCLIDEAN, - builderEmptyParams.correlationParamsContext.getValue().getSimilarityFunction() - ); - Assert.assertTrue(builderEmptyParams.correlationParamsContext.getValue().getParameters().isEmpty()); - } - - /** - * test type parser construction throw error for invalid dimension of correlation vectors - * @throws IOException IOException - */ - public void testTypeParser_parse_fromCorrelationParamsContext_InvalidDimension() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, 2000) - .startObject("correlation_ctx") - .field("similarityFunction", VectorSimilarityFunction.EUCLIDEAN.name()) - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - VectorFieldMapper.Builder builder = (VectorFieldMapper.Builder) typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ); - - expectThrows(IllegalArgumentException.class, () -> builder.build(new Mapper.BuilderContext(settings, new ContentPath()))); - } - - /** - * test type parser construction error for invalid vector similarity function - * @throws IOException IOException - */ - public void testTypeParser_parse_fromCorrelationParamsContext_InvalidVectorSimilarityFunction() throws IOException { - String fieldName = "test-field-name"; - String indexName = "test-index-name"; - Settings settings = Settings.builder().put(settings(Version.CURRENT).build()).build(); - - VectorFieldMapper.TypeParser typeParser = new VectorFieldMapper.TypeParser(); - - int efConstruction = 321; - int m = 12; - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() - .startObject() - .field(TYPE_FIELD_NAME, CORRELATION_VECTOR_TYPE) - .field(DIMENSION_FIELD_NAME, 2000) - .startObject("correlation_ctx") - .field("similarityFunction", "invalid") - .startObject("parameters") - .field("m", m) - .field("ef_construction", efConstruction) - .endObject() - .endObject() - .endObject(); - - expectThrows( - MapperParsingException.class, - () -> typeParser.parse( - fieldName, - XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true, xContentBuilder.contentType()).v2(), - buildParserContext(indexName, settings) - ) - ); - } - - /** - * test parseCreateField in CorrelationVectorFieldMapper - * @throws IOException ioexception - */ - public void testCorrelationVectorFieldMapper_parseCreateField() throws IOException { - String fieldName = "test-field-name"; - int dimension = 10; - float[] testVector = createInitializedFloatArray(dimension, 1.0f); - CorrelationParamsContext correlationParamsContext = new CorrelationParamsContext(VectorSimilarityFunction.EUCLIDEAN, Map.of()); - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = new VectorFieldMapper.CorrelationVectorFieldType( - fieldName, - Map.of(), - dimension, - correlationParamsContext - ); - - CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput input = new CorrelationVectorFieldMapper.CreateLuceneFieldMapperInput( - fieldName, - correlationVectorFieldType, - FieldMapper.MultiFields.empty(), - FieldMapper.CopyTo.empty(), - new Explicit<>(true, true), - false, - false, - correlationParamsContext - ); - - ParseContext.Document document = new ParseContext.Document(); - ContentPath contentPath = new ContentPath(); - ParseContext parseContext = mock(ParseContext.class); - when(parseContext.doc()).thenReturn(document); - when(parseContext.path()).thenReturn(contentPath); - - CorrelationVectorFieldMapper correlationVectorFieldMapper = Mockito.spy(new CorrelationVectorFieldMapper(input)); - doReturn(Optional.of(testVector)).when(correlationVectorFieldMapper).getFloatsFromContext(parseContext, dimension); - - correlationVectorFieldMapper.parseCreateField(parseContext, dimension); - - List fields = document.getFields(); - assertEquals(1, fields.size()); - IndexableField field = fields.get(0); - - Assert.assertTrue(field instanceof KnnFloatVectorField); - KnnFloatVectorField knnFloatVectorField = (KnnFloatVectorField) field; - Assert.assertArrayEquals(testVector, knnFloatVectorField.vectorValue(), 0.001f); - } - - /** - * test CorrelationVectorFieldType subclass - */ - public void testCorrelationVectorFieldType() { - String fieldName = "test-field-name"; - int dimension = 10; - QueryShardContext context = mock(QueryShardContext.class); - SearchLookup searchLookup = mock(SearchLookup.class); - - VectorFieldMapper.CorrelationVectorFieldType correlationVectorFieldType = new VectorFieldMapper.CorrelationVectorFieldType( - fieldName, - Map.of(), - dimension - ); - Assert.assertThrows(QueryShardException.class, () -> { correlationVectorFieldType.termQuery(new Object(), context); }); - Assert.assertThrows( - UnsupportedOperationException.class, - () -> { correlationVectorFieldType.valueFetcher(context, searchLookup, ""); } - ); - Assert.assertTrue(correlationVectorFieldType.existsQuery(context) instanceof FieldExistsQuery); - Assert.assertEquals(VectorFieldMapper.CONTENT_TYPE, correlationVectorFieldType.typeName()); - } - - /** - * test constants in VectorFieldMapper - */ - public void testVectorFieldMapperConstants() { - Assert.assertNotNull(VectorFieldMapper.Defaults.IGNORE_MALFORMED); - Assert.assertNotNull(VectorFieldMapper.Names.IGNORE_MALFORMED); - } - - private IndexMetadata buildIndexMetaData(String index, Settings settings) { - return IndexMetadata.builder(index) - .settings(settings) - .numberOfShards(1) - .numberOfReplicas(0) - .version(7) - .mappingVersion(0) - .settingsVersion(0) - .aliasesVersion(0) - .creationDate(0) - .build(); - } - - private Mapper.TypeParser.ParserContext buildParserContext(String index, Settings settings) { - IndexSettings indexSettings = new IndexSettings( - buildIndexMetaData(index, settings), - Settings.EMPTY, - new IndexScopedSettings(Settings.EMPTY, new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS)) - ); - - MapperService mapperService = mock(MapperService.class); - when(mapperService.getIndexSettings()).thenReturn(indexSettings); - - return new Mapper.TypeParser.ParserContext( - null, - mapperService, - type -> new VectorFieldMapper.TypeParser(), - Version.CURRENT, - null, - null, - null - ); - } - - private static float[] createInitializedFloatArray(int dimension, float value) { - float[] array = new float[dimension]; - Arrays.fill(array, value); - return array; - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java deleted file mode 100644 index 3e567d0c04e53..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.core.index.query; - -import org.apache.lucene.search.KnnFloatVectorQuery; -import org.opensearch.Version; -import org.opensearch.cluster.ClusterModule; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.common.Strings; -import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; -import org.opensearch.core.common.io.stream.NamedWriteableRegistry; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.index.Index; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.plugin.correlation.core.index.mapper.VectorFieldMapper; -import org.opensearch.plugins.SearchPlugin; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; - -import java.io.IOException; -import java.util.List; -import java.util.Optional; - -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Unit tests for Correlation Query Builder - */ -public class CorrelationQueryBuilderTests extends OpenSearchTestCase { - - private static final String FIELD_NAME = "myvector"; - private static final int K = 1; - private static final TermQueryBuilder TERM_QUERY = QueryBuilders.termQuery("field", "value"); - private static final float[] QUERY_VECTOR = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; - - /** - * test invalid number of nearby neighbors - */ - public void testInvalidK() { - float[] queryVector = { 1.0f, 1.0f }; - - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, -K)); - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, 0)); - expectThrows( - IllegalArgumentException.class, - () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, CorrelationQueryBuilder.K_MAX + 1) - ); - } - - /** - * test empty vector scenario - */ - public void testEmptyVector() { - final float[] queryVector = null; - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector, 1)); - final float[] queryVector1 = new float[] {}; - expectThrows(IllegalArgumentException.class, () -> new CorrelationQueryBuilder(FIELD_NAME, queryVector1, 1)); - } - - /** - * test serde with xcontent - * @throws IOException IOException - */ - public void testFromXContent() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.startObject(correlationQueryBuilder.fieldName()); - builder.field(CorrelationQueryBuilder.VECTOR_FIELD.getPreferredName(), correlationQueryBuilder.vector()); - builder.field(CorrelationQueryBuilder.K_FIELD.getPreferredName(), correlationQueryBuilder.getK()); - builder.endObject(); - builder.endObject(); - XContentParser contentParser = createParser(builder); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(actualBuilder, correlationQueryBuilder); - } - - /** - * test serde with xcontent - * @throws IOException IOException - */ - public void testFromXContentFromString() throws IOException { - String correlationQuery = "{\n" - + " \"myvector\" : {\n" - + " \"vector\" : [\n" - + " 1.0,\n" - + " 2.0,\n" - + " 3.0,\n" - + " 4.0\n" - + " ],\n" - + " \"k\" : 1,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; - XContentParser contentParser = createParser(JsonXContent.jsonXContent, correlationQuery); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(correlationQuery.replace("\n", "").replace(" ", ""), Strings.toString(MediaTypeRegistry.JSON, actualBuilder)); - } - - /** - * test serde with xcontent with filters - * @throws IOException IOException - */ - public void testFromXContentWithFilters() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, TERM_QUERY); - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.startObject(correlationQueryBuilder.fieldName()); - builder.field(CorrelationQueryBuilder.VECTOR_FIELD.getPreferredName(), correlationQueryBuilder.vector()); - builder.field(CorrelationQueryBuilder.K_FIELD.getPreferredName(), correlationQueryBuilder.getK()); - builder.field(CorrelationQueryBuilder.FILTER_FIELD.getPreferredName(), correlationQueryBuilder.getFilter()); - builder.endObject(); - builder.endObject(); - XContentParser contentParser = createParser(builder); - contentParser.nextToken(); - CorrelationQueryBuilder actualBuilder = CorrelationQueryBuilder.parse(contentParser); - Assert.assertEquals(actualBuilder, correlationQueryBuilder); - } - - /** - * test conversion o KnnFloatVectorQuery logic - * @throws IOException IOException - */ - public void testDoToQuery() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(4); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - KnnFloatVectorQuery query = (KnnFloatVectorQuery) correlationQueryBuilder.doToQuery(mockQueryShardContext); - Assert.assertEquals(FIELD_NAME, query.getField()); - Assert.assertArrayEquals(QUERY_VECTOR, query.getTargetCopy(), 0.1f); - Assert.assertEquals(K, query.getK()); - } - - /** - * test conversion o KnnFloatVectorQuery logic with filter - * @throws IOException IOException - */ - public void testDoToQueryWithFilter() throws IOException { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, TERM_QUERY); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(4); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - KnnFloatVectorQuery query = (KnnFloatVectorQuery) correlationQueryBuilder.doToQuery(mockQueryShardContext); - Assert.assertEquals(FIELD_NAME, query.getField()); - Assert.assertArrayEquals(QUERY_VECTOR, query.getTargetCopy(), 0.1f); - Assert.assertEquals(K, query.getK()); - Assert.assertEquals(TERM_QUERY.toQuery(mockQueryShardContext), query.getFilter()); - } - - /** - * test conversion o KnnFloatVectorQuery logic failure with invalid dimensions - */ - public void testDoToQueryInvalidDimensions() { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - VectorFieldMapper.CorrelationVectorFieldType mockCorrVectorField = mock(VectorFieldMapper.CorrelationVectorFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockCorrVectorField.getDimension()).thenReturn(400); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - expectThrows(IllegalArgumentException.class, () -> correlationQueryBuilder.doToQuery(mockQueryShardContext)); - } - - /** - * test conversion o KnnFloatVectorQuery logic failure with invalid field type - */ - public void testDoToQueryInvalidFieldType() { - CorrelationQueryBuilder correlationQueryBuilder = new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - Index dummyIndex = new Index("dummy", "dummy"); - QueryShardContext mockQueryShardContext = mock(QueryShardContext.class); - NumberFieldMapper.NumberFieldType mockCorrVectorField = mock(NumberFieldMapper.NumberFieldType.class); - when(mockQueryShardContext.index()).thenReturn(dummyIndex); - when(mockQueryShardContext.fieldMapper(anyString())).thenReturn(mockCorrVectorField); - expectThrows(IllegalArgumentException.class, () -> correlationQueryBuilder.doToQuery(mockQueryShardContext)); - } - - /** - * test serialization of Correlation Query Builder - * @throws Exception throws an IOException if serialization fails - * @throws Exception Exception - */ - public void testSerialization() throws Exception { - assertSerialization(Optional.empty()); - assertSerialization(Optional.of(TERM_QUERY)); - } - - private void assertSerialization(final Optional queryBuilderOptional) throws IOException { - final CorrelationQueryBuilder builder = queryBuilderOptional.isPresent() - ? new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K, queryBuilderOptional.get()) - : new CorrelationQueryBuilder(FIELD_NAME, QUERY_VECTOR, K); - - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.CURRENT); - output.writeNamedWriteable(builder); - - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.CURRENT); - final QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); - - assertNotNull(deserializedQuery); - assertTrue(deserializedQuery instanceof CorrelationQueryBuilder); - final CorrelationQueryBuilder deserializedKnnQueryBuilder = (CorrelationQueryBuilder) deserializedQuery; - assertEquals(FIELD_NAME, deserializedKnnQueryBuilder.fieldName()); - assertArrayEquals(QUERY_VECTOR, (float[]) deserializedKnnQueryBuilder.vector(), 0.0f); - assertEquals(K, deserializedKnnQueryBuilder.getK()); - if (queryBuilderOptional.isPresent()) { - assertNotNull(deserializedKnnQueryBuilder.getFilter()); - assertEquals(queryBuilderOptional.get(), deserializedKnnQueryBuilder.getFilter()); - } else { - assertNull(deserializedKnnQueryBuilder.getFilter()); - } - } - } - } - - @Override - protected NamedXContentRegistry xContentRegistry() { - List list = ClusterModule.getNamedXWriteables(); - SearchPlugin.QuerySpec spec = new SearchPlugin.QuerySpec<>( - TermQueryBuilder.NAME, - TermQueryBuilder::new, - TermQueryBuilder::fromXContent - ); - list.add(new NamedXContentRegistry.Entry(QueryBuilder.class, spec.getName(), (p, c) -> spec.getParser().fromXContent(p))); - NamedXContentRegistry registry = new NamedXContentRegistry(list); - return registry; - } - - @Override - protected NamedWriteableRegistry writableRegistry() { - final List entries = ClusterModule.getNamedWriteables(); - entries.add( - new NamedWriteableRegistry.Entry( - QueryBuilder.class, - CorrelationQueryBuilder.NAME_FIELD.getPreferredName(), - CorrelationQueryBuilder::new - ) - ); - entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new)); - return new NamedWriteableRegistry(entries); - } -} diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java deleted file mode 100644 index 45cb47b05b5c2..0000000000000 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/settings/EventsCorrelationSettingsTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.plugin.correlation.settings; - -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.plugin.correlation.EventsCorrelationPlugin; -import org.opensearch.test.OpenSearchTestCase; -import org.junit.Assert; -import org.junit.Before; - -import java.util.List; -import java.util.concurrent.TimeUnit; - -/** - * Unit tests for Correlation Engine settings - */ -public class EventsCorrelationSettingsTests extends OpenSearchTestCase { - - private EventsCorrelationPlugin plugin; - - @Before - public void setup() { - plugin = new EventsCorrelationPlugin(); - } - - /** - * test all plugin settings returned - */ - public void testAllPluginSettingsReturned() { - List expectedSettings = List.of( - EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING, - EventsCorrelationSettings.CORRELATION_TIME_WINDOW - ); - - List> settings = plugin.getSettings(); - Assert.assertTrue(settings.containsAll(expectedSettings)); - } - - /** - * test settings get value - */ - public void testSettingsGetValue() { - Settings settings = Settings.builder().put("index.correlation", true).build(); - Assert.assertEquals(EventsCorrelationSettings.IS_CORRELATION_INDEX_SETTING.get(settings), true); - settings = Settings.builder() - .put("plugins.security_analytics.correlation_time_window", new TimeValue(10, TimeUnit.MINUTES)) - .build(); - Assert.assertEquals(EventsCorrelationSettings.CORRELATION_TIME_WINDOW.get(settings), new TimeValue(10, TimeUnit.MINUTES)); - } -} diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 4f30ea9ea7e22..2948ca12904f5 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -89,7 +89,7 @@ dependencies { api "org.apache.poi:poi:${versions.poi}" api "org.apache.poi:poi-ooxml-lite:${versions.poi}" api "commons-codec:commons-codec:${versions.commonscodec}" - api 'org.apache.xmlbeans:xmlbeans:5.2.2' + api 'org.apache.xmlbeans:xmlbeans:5.3.0' api 'org.apache.commons:commons-collections4:4.4' // MS Office api "org.apache.poi:poi-scratchpad:${versions.poi}" diff --git a/plugins/ingest-attachment/licenses/xmlbeans-5.2.2.jar.sha1 b/plugins/ingest-attachment/licenses/xmlbeans-5.2.2.jar.sha1 deleted file mode 100644 index 613c1028dbd6d..0000000000000 --- a/plugins/ingest-attachment/licenses/xmlbeans-5.2.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -586ffe10ae9864e19e85c24bd060790a70586f72 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xmlbeans-5.3.0.jar.sha1 b/plugins/ingest-attachment/licenses/xmlbeans-5.3.0.jar.sha1 new file mode 100644 index 0000000000000..4dbb0149da890 --- /dev/null +++ b/plugins/ingest-attachment/licenses/xmlbeans-5.3.0.jar.sha1 @@ -0,0 +1 @@ +f93c3ba820d7240b7fec4ec5bc35e7223cc6fc1f \ No newline at end of file diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index d419f6fafeb30..03ea07623dbaf 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -44,7 +44,7 @@ opensearchplugin { } dependencies { - api 'com.azure:azure-core:1.51.0' + api 'com.azure:azure-core:1.54.1' api 'com.azure:azure-json:1.3.0' api 'com.azure:azure-xml:1.1.0' api 'com.azure:azure-storage-common:12.28.0' @@ -61,8 +61,8 @@ dependencies { // Start of transitive dependencies for azure-identity api 'com.microsoft.azure:msal4j-persistence-extension:1.3.0' api "net.java.dev.jna:jna-platform:${versions.jna}" - api 'com.microsoft.azure:msal4j:1.17.2' - api 'com.nimbusds:oauth2-oidc-sdk:11.19.1' + api 'com.microsoft.azure:msal4j:1.18.0' + api 'com.nimbusds:oauth2-oidc-sdk:11.20.1' api 'com.nimbusds:nimbus-jose-jwt:9.41.1' api 'com.nimbusds:content-type:2.3' api 'com.nimbusds:lang-tag:1.7' @@ -108,7 +108,6 @@ thirdPartyAudit { // Optional and not enabled by Elasticsearch 'com.google.common.util.concurrent.internal.InternalFutureFailureAccess', 'com.google.common.util.concurrent.internal.InternalFutures', - 'com.azure.core.credential.ProofOfPossessionOptions', 'com.azure.storage.internal.avro.implementation.AvroObject', 'com.azure.storage.internal.avro.implementation.AvroReader', 'com.azure.storage.internal.avro.implementation.AvroReaderFactory', diff --git a/plugins/repository-azure/licenses/azure-core-1.51.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.51.0.jar.sha1 deleted file mode 100644 index 7200f59af2f9a..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-1.51.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ff5d0aedf75ca45ec0ace24673f790d2f7a57096 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-1.54.1.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.54.1.jar.sha1 new file mode 100644 index 0000000000000..9246d0dd8443a --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-1.54.1.jar.sha1 @@ -0,0 +1 @@ +9ae0cc4a8ff02a0146510ec9e1c06ab48950a66b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 b/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 deleted file mode 100644 index b5219ee17e9fa..0000000000000 --- a/plugins/repository-azure/licenses/msal4j-1.17.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6211e3d71d0388929babaa0ff0951b30d001852 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 b/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 new file mode 100644 index 0000000000000..292259e9d862d --- /dev/null +++ b/plugins/repository-azure/licenses/msal4j-1.18.0.jar.sha1 @@ -0,0 +1 @@ +a47e4e9257a5d9cdb8282c331278492968e06250 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 deleted file mode 100644 index 7d83b0e8ca639..0000000000000 --- a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.19.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -58db85a807a56ae76baffa519772271ad5808195 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 new file mode 100644 index 0000000000000..7527d31eb1d37 --- /dev/null +++ b/plugins/repository-azure/licenses/oauth2-oidc-sdk-11.20.1.jar.sha1 @@ -0,0 +1 @@ +8d1ecd62d31945534a7cd63062c3c48ff0df9c43 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 deleted file mode 100644 index bf45716c5b8ce..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-api-1.68.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a9f25c58d8d5b0fcf37ae889a50fec87e34ac08 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/transport-grpc/build.gradle b/plugins/transport-grpc/build.gradle new file mode 100644 index 0000000000000..47f62b2b8c3f3 --- /dev/null +++ b/plugins/transport-grpc/build.gradle @@ -0,0 +1,168 @@ +import org.gradle.api.attributes.java.TargetJvmEnvironment + +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +opensearchplugin { + description 'gRPC based transport implementation' + classname 'org.opensearch.transport.grpc.GrpcPlugin' +} + +dependencies { + compileOnly "com.google.code.findbugs:jsr305:3.0.2" + runtimeOnly "com.google.guava:guava:${versions.guava}" + implementation "com.google.errorprone:error_prone_annotations:2.24.1" + implementation "com.google.guava:failureaccess:1.0.1" + implementation "io.grpc:grpc-api:${versions.grpc}" + implementation "io.grpc:grpc-core:${versions.grpc}" + implementation "io.grpc:grpc-netty-shaded:${versions.grpc}" + implementation "io.grpc:grpc-protobuf-lite:${versions.grpc}" + implementation "io.grpc:grpc-protobuf:${versions.grpc}" + implementation "io.grpc:grpc-services:${versions.grpc}" + implementation "io.grpc:grpc-stub:${versions.grpc}" + implementation "io.grpc:grpc-util:${versions.grpc}" + implementation "io.perfmark:perfmark-api:0.26.0" +} + +tasks.named("dependencyLicenses").configure { + mapping from: /grpc-.*/, to: 'grpc' +} + +thirdPartyAudit { + ignoreMissingClasses( + 'com.aayushatharva.brotli4j.Brotli4jLoader', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', + 'com.aayushatharva.brotli4j.encoder.BrotliEncoderChannel', + 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', + 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', + // classes are missing + + // from io.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from Log4j (deliberate, Netty will fallback to Log4j 2) + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + + // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + 'org.bouncycastle.openssl.PEMEncryptedKeyPair', + 'org.bouncycastle.openssl.PEMParser', + 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter', + 'org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8DecryptorProviderBuilder', + 'org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder', + 'org.bouncycastle.pkcs.PKCS8EncryptedPrivateKeyInfo', + + // from io.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.spi.LocationAwareLogger', + + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonToken', + 'com.google.protobuf.util.Durations', + 'com.google.protobuf.util.Timestamps', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', + 'com.google.protobuf.nano.MessageNano', + 'com.google.rpc.Status', + 'com.google.rpc.Status$Builder', + 'com.ning.compress.BufferRecycler', + 'com.ning.compress.lzf.ChunkDecoder', + 'com.ning.compress.lzf.ChunkEncoder', + 'com.ning.compress.lzf.LZFChunk', + 'com.ning.compress.lzf.LZFEncoder', + 'com.ning.compress.lzf.util.ChunkDecoderFactory', + 'com.ning.compress.lzf.util.ChunkEncoderFactory', + 'lzma.sdk.lzma.Encoder', + 'net.jpountz.lz4.LZ4Compressor', + 'net.jpountz.lz4.LZ4Factory', + 'net.jpountz.lz4.LZ4FastDecompressor', + 'net.jpountz.xxhash.XXHash32', + 'net.jpountz.xxhash.XXHashFactory', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + + 'org.conscrypt.AllocatedBuffer', + 'org.conscrypt.BufferAllocator', + 'org.conscrypt.Conscrypt', + 'org.conscrypt.HandshakeListener', + + 'reactor.blockhound.BlockHound$Builder', + 'reactor.blockhound.integration.BlockHoundIntegration' + ) + + ignoreViolations( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$1', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$2', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$3', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$4', + 'io.grpc.netty.shaded.io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$5', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$1', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$2', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$3', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$4', + 'io.grpc.netty.shaded.io.netty.util.internal.PlatformDependent0$6', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeLongArrayAccess', + 'io.grpc.netty.shaded.io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess' + ) +} diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 b/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 new file mode 100644 index 0000000000000..67723f6f51248 --- /dev/null +++ b/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 @@ -0,0 +1 @@ +32b299e45105aa9b0df8279c74dc1edfcf313ff0 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt b/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/error_prone_annotations-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-NOTICE.txt b/plugins/transport-grpc/licenses/error_prone_annotations-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 b/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 new file mode 100644 index 0000000000000..4798b37e20691 --- /dev/null +++ b/plugins/transport-grpc/licenses/failureaccess-1.0.1.jar.sha1 @@ -0,0 +1 @@ +1dcf1de382a0bf95a3d8b0849546c88bac1292c9 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt b/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/failureaccess-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/failureaccess-NOTICE.txt b/plugins/transport-grpc/licenses/failureaccess-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/grpc-LICENSE.txt b/plugins/transport-grpc/licenses/grpc-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/grpc-NOTICE.txt b/plugins/transport-grpc/licenses/grpc-NOTICE.txt new file mode 100644 index 0000000000000..f70c5620cf75a --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-NOTICE.txt @@ -0,0 +1,62 @@ +Copyright 2014 The gRPC Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'OkHttp', an open source +HTTP & SPDY client for Android and Java applications, which can be obtained +at: + + * LICENSE: + * okhttp/third_party/okhttp/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/square/okhttp + * LOCATION_IN_GRPC: + * okhttp/third_party/okhttp + +This product contains a modified portion of 'Envoy', an open source +cloud-native high-performance edge/middle/service proxy, which can be +obtained at: + + * LICENSE: + * xds/third_party/envoy/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/envoy/NOTICE + * HOMEPAGE: + * https://www.envoyproxy.io + * LOCATION_IN_GRPC: + * xds/third_party/envoy + +This product contains a modified portion of 'protoc-gen-validate (PGV)', +an open source protoc plugin to generate polyglot message validators, +which can be obtained at: + + * LICENSE: + * xds/third_party/protoc-gen-validate/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/protoc-gen-validate/NOTICE + * HOMEPAGE: + * https://github.com/envoyproxy/protoc-gen-validate + * LOCATION_IN_GRPC: + * xds/third_party/protoc-gen-validate + +This product contains a modified portion of 'udpa', +an open source universal data plane API, which can be obtained at: + + * LICENSE: + * xds/third_party/udpa/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/cncf/udpa + * LOCATION_IN_GRPC: + * xds/third_party/udpa diff --git a/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..1844172dec982 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-api-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a257a5dd25dda1c97a99b56d5b9c1e56c12ae554 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..e20345d29e914 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-core-1.68.2.jar.sha1 @@ -0,0 +1 @@ +b0fd51a1c029785d1c9ae2cfc80a296b60dfcfdb \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..53fa705a66129 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-netty-shaded-1.68.2.jar.sha1 @@ -0,0 +1 @@ +8ea4186fbdcc5432664364ed53e03cf0d458c3ec \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..e861b41837f33 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-protobuf-1.68.2.jar.sha1 @@ -0,0 +1 @@ +35b28e0d57874021cd31e76dd4a795f76a82471e \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..b2401f9752829 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-protobuf-lite-1.68.2.jar.sha1 @@ -0,0 +1 @@ +a53064b896adcfefe74362a33e111492351dfc03 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..c4edf923791e5 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-services-1.68.2.jar.sha1 @@ -0,0 +1 @@ +6c2a0b0640544b9010a42bcf76f2791116a75c9d \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..118464f8f48ff --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-stub-1.68.2.jar.sha1 @@ -0,0 +1 @@ +d58ee1cf723b4b5536d44b67e328c163580a8d98 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 b/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 new file mode 100644 index 0000000000000..c3261b012e502 --- /dev/null +++ b/plugins/transport-grpc/licenses/grpc-util-1.68.2.jar.sha1 @@ -0,0 +1 @@ +2d195570e9256d1357d584146a8e6b19587d4044 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 b/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 new file mode 100644 index 0000000000000..27d5304e326df --- /dev/null +++ b/plugins/transport-grpc/licenses/guava-33.2.1-jre.jar.sha1 @@ -0,0 +1 @@ +818e780da2c66c63bbb6480fef1f3855eeafa3e4 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/guava-LICENSE.txt b/plugins/transport-grpc/licenses/guava-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-grpc/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/guava-NOTICE.txt b/plugins/transport-grpc/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 b/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 new file mode 100644 index 0000000000000..abf1becd13298 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-0.26.0.jar.sha1 @@ -0,0 +1 @@ +ef65452adaf20bf7d12ef55913aba24037b82738 \ No newline at end of file diff --git a/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt b/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt b/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt new file mode 100644 index 0000000000000..7d74b6569cf64 --- /dev/null +++ b/plugins/transport-grpc/licenses/perfmark-api-NOTICE.txt @@ -0,0 +1,40 @@ +Copyright 2019 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'Catapult', an open source +Trace Event viewer for Chome, Linux, and Android applications, which can +be obtained at: + + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/catapult-project/catapult + +This product contains a modified portion of 'Polymer', a library for Web +Components, which can be obtained at: + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/polymer/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/Polymer/polymer + + +This product contains a modified portion of 'ASM', an open source +Java Bytecode library, which can be obtained at: + + * LICENSE: + * agent/src/main/resources/io/perfmark/agent/third_party/asm/LICENSE (BSD style License) + * HOMEPAGE: + * https://asm.ow2.io/ diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java new file mode 100644 index 0000000000000..0a464e135350b --- /dev/null +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/GrpcPlugin.java @@ -0,0 +1,69 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.transport.grpc; + +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.plugins.NetworkPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.telemetry.tracing.Tracer; +import org.opensearch.threadpool.ThreadPool; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.GRPC_TRANSPORT_SETTING_KEY; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_BIND_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PORTS; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PUBLISH_HOST; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_PUBLISH_PORT; +import static org.opensearch.transport.grpc.Netty4GrpcServerTransport.SETTING_GRPC_WORKER_COUNT; + +/** + * Main class for the gRPC plugin. + */ +public final class GrpcPlugin extends Plugin implements NetworkPlugin { + + /** + * Creates a new GrpcPlugin instance. + */ + public GrpcPlugin() {} + + @Override + public Map> getAuxTransports( + Settings settings, + ThreadPool threadPool, + CircuitBreakerService circuitBreakerService, + NetworkService networkService, + ClusterSettings clusterSettings, + Tracer tracer + ) { + return Collections.singletonMap( + GRPC_TRANSPORT_SETTING_KEY, + () -> new Netty4GrpcServerTransport(settings, Collections.emptyList(), networkService) + ); + } + + @Override + public List> getSettings() { + return List.of( + SETTING_GRPC_PORTS, + SETTING_GRPC_HOST, + SETTING_GRPC_PUBLISH_HOST, + SETTING_GRPC_BIND_HOST, + SETTING_GRPC_WORKER_COUNT, + SETTING_GRPC_PUBLISH_PORT + ); + } +} diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java new file mode 100644 index 0000000000000..61c0722772b92 --- /dev/null +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/Netty4GrpcServerTransport.java @@ -0,0 +1,277 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.transport.grpc; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; +import org.opensearch.plugins.NetworkPlugin; +import org.opensearch.transport.BindTransportException; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +import io.grpc.BindableService; +import io.grpc.InsecureServerCredentials; +import io.grpc.Server; +import io.grpc.netty.shaded.io.grpc.netty.NettyServerBuilder; +import io.grpc.netty.shaded.io.netty.channel.EventLoopGroup; +import io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoopGroup; +import io.grpc.netty.shaded.io.netty.channel.socket.nio.NioServerSocketChannel; +import io.grpc.protobuf.services.HealthStatusManager; +import io.grpc.protobuf.services.ProtoReflectionService; + +import static java.util.Collections.emptyList; +import static org.opensearch.common.settings.Setting.intSetting; +import static org.opensearch.common.settings.Setting.listSetting; +import static org.opensearch.common.util.concurrent.OpenSearchExecutors.daemonThreadFactory; +import static org.opensearch.transport.Transport.resolveTransportPublishPort; + +/** + * Netty4 gRPC server implemented as a LifecycleComponent. + * Services injected through BindableService list. + */ +public class Netty4GrpcServerTransport extends NetworkPlugin.AuxTransport { + private static final Logger logger = LogManager.getLogger(Netty4GrpcServerTransport.class); + + /** + * Type key for configuring settings of this auxiliary transport. + */ + public static final String GRPC_TRANSPORT_SETTING_KEY = "experimental-transport-grpc"; + + /** + * Port range on which to bind. + * Note this setting is configured through AffixSetting AUX_TRANSPORT_PORTS where the aux transport type matches the GRPC_TRANSPORT_SETTING_KEY. + */ + public static final Setting SETTING_GRPC_PORTS = AUX_TRANSPORT_PORTS.getConcreteSettingForNamespace( + GRPC_TRANSPORT_SETTING_KEY + ); + + /** + * Port published to peers for this server. + */ + public static final Setting SETTING_GRPC_PUBLISH_PORT = intSetting("grpc.publish_port", -1, -1, Setting.Property.NodeScope); + + /** + * Host list to bind and publish. + * For distinct bind/publish hosts configure SETTING_GRPC_BIND_HOST + SETTING_GRPC_PUBLISH_HOST separately. + */ + public static final Setting> SETTING_GRPC_HOST = listSetting( + "grpc.host", + emptyList(), + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Host list to bind. + */ + public static final Setting> SETTING_GRPC_BIND_HOST = listSetting( + "grpc.bind_host", + SETTING_GRPC_HOST, + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Host list published to peers. + */ + public static final Setting> SETTING_GRPC_PUBLISH_HOST = listSetting( + "grpc.publish_host", + SETTING_GRPC_HOST, + Function.identity(), + Setting.Property.NodeScope + ); + + /** + * Configure size of thread pool backing this transport server. + */ + public static final Setting SETTING_GRPC_WORKER_COUNT = new Setting<>( + "grpc.netty.worker_count", + (s) -> Integer.toString(OpenSearchExecutors.allocatedProcessors(s)), + (s) -> Setting.parseInt(s, 1, "grpc.netty.worker_count"), + Setting.Property.NodeScope + ); + + private final Settings settings; + private final NetworkService networkService; + private final List services; + private final CopyOnWriteArrayList servers = new CopyOnWriteArrayList<>(); + private final String[] bindHosts; + private final String[] publishHosts; + private final PortsRange port; + private final int nettyEventLoopThreads; + + private volatile BoundTransportAddress boundAddress; + private volatile EventLoopGroup eventLoopGroup; + + /** + * Creates a new Netty4GrpcServerTransport instance. + * @param settings the configured settings. + * @param services the gRPC compatible services to be registered with the server. + * @param networkService the bind/publish addresses. + */ + public Netty4GrpcServerTransport(Settings settings, List services, NetworkService networkService) { + this.settings = Objects.requireNonNull(settings); + this.services = Objects.requireNonNull(services); + this.networkService = Objects.requireNonNull(networkService); + + final List httpBindHost = SETTING_GRPC_BIND_HOST.get(settings); + this.bindHosts = (httpBindHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(settings) : httpBindHost).toArray( + Strings.EMPTY_ARRAY + ); + + final List httpPublishHost = SETTING_GRPC_PUBLISH_HOST.get(settings); + this.publishHosts = (httpPublishHost.isEmpty() ? NetworkService.GLOBAL_NETWORK_PUBLISH_HOST_SETTING.get(settings) : httpPublishHost) + .toArray(Strings.EMPTY_ARRAY); + + this.port = SETTING_GRPC_PORTS.get(settings); + this.nettyEventLoopThreads = SETTING_GRPC_WORKER_COUNT.get(settings); + } + + BoundTransportAddress boundAddress() { + return this.boundAddress; + } + + @Override + protected void doStart() { + boolean success = false; + try { + this.eventLoopGroup = new NioEventLoopGroup(nettyEventLoopThreads, daemonThreadFactory(settings, "grpc_event_loop")); + bindServer(); + success = true; + logger.info("Started gRPC server on port {}", port); + } finally { + if (!success) { + doStop(); + } + } + } + + @Override + protected void doStop() { + for (Server server : servers) { + if (server != null) { + server.shutdown(); + try { + server.awaitTermination(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warn("Interrupted while shutting down gRPC server"); + } finally { + server.shutdownNow(); + } + } + } + if (eventLoopGroup != null) { + try { + eventLoopGroup.shutdownGracefully(0, 10, TimeUnit.SECONDS).await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warn("Failed to shut down event loop group"); + } + } + } + + @Override + protected void doClose() { + + } + + private void bindServer() { + InetAddress[] hostAddresses; + try { + hostAddresses = networkService.resolveBindHostAddresses(bindHosts); + } catch (IOException e) { + throw new BindTransportException("Failed to resolve host [" + Arrays.toString(bindHosts) + "]", e); + } + + List boundAddresses = new ArrayList<>(hostAddresses.length); + for (InetAddress address : hostAddresses) { + boundAddresses.add(bindAddress(address, port)); + } + + final InetAddress publishInetAddress; + try { + publishInetAddress = networkService.resolvePublishHostAddresses(publishHosts); + } catch (Exception e) { + throw new BindTransportException("Failed to resolve publish address", e); + } + + final int publishPort = resolveTransportPublishPort(SETTING_GRPC_PUBLISH_PORT.get(settings), boundAddresses, publishInetAddress); + if (publishPort < 0) { + throw new BindTransportException( + "Failed to auto-resolve grpc publish port, multiple bound addresses " + + boundAddresses + + " with distinct ports and none of them matched the publish address (" + + publishInetAddress + + "). " + + "Please specify a unique port by setting " + + SETTING_GRPC_PORTS.getKey() + + " or " + + SETTING_GRPC_PUBLISH_PORT.getKey() + ); + } + + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); + this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); + logger.info("{}", boundAddress); + } + + private TransportAddress bindAddress(InetAddress hostAddress, PortsRange portRange) { + AtomicReference lastException = new AtomicReference<>(); + AtomicReference addr = new AtomicReference<>(); + + boolean success = portRange.iterate(portNumber -> { + try { + + final InetSocketAddress address = new InetSocketAddress(hostAddress, portNumber); + final NettyServerBuilder serverBuilder = NettyServerBuilder.forAddress(address, InsecureServerCredentials.create()) + .bossEventLoopGroup(eventLoopGroup) + .workerEventLoopGroup(eventLoopGroup) + .channelType(NioServerSocketChannel.class) + .addService(new HealthStatusManager().getHealthService()) + .addService(ProtoReflectionService.newInstance()); + + services.forEach(serverBuilder::addService); + + Server srv = serverBuilder.build().start(); + servers.add(srv); + addr.set(new TransportAddress(hostAddress, portNumber)); + logger.debug("Bound gRPC to address {{}}", address); + return true; + } catch (Exception e) { + lastException.set(e); + return false; + } + }); + + if (!success) { + throw new RuntimeException("Failed to bind to " + hostAddress + " on ports " + portRange, lastException.get()); + } + + return addr.get(); + } +} diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java similarity index 59% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java rename to plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java index 2cf5db786a60f..4a5d9d02b5b91 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/query/package-info.java +++ b/plugins/transport-grpc/src/main/java/org/opensearch/transport/grpc/package-info.java @@ -7,6 +7,7 @@ */ /** - * correlation query builder package + * gRPC transport implementation for OpenSearch. + * Provides network communication using the gRPC protocol. */ -package org.opensearch.plugin.correlation.core.index.query; +package org.opensearch.transport.grpc; diff --git a/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy b/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..398de576b6c5a --- /dev/null +++ b/plugins/transport-grpc/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +grant codeBase "${codebase.grpc-netty-shaded}" { + // for reading the system-wide configuration for the backlog of established sockets + permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; + + // netty makes and accepts socket connections + permission java.net.SocketPermission "*", "accept,connect"; + + // Netty sets custom classloader for some of its internal threads + permission java.lang.RuntimePermission "*", "setContextClassLoader"; +}; diff --git a/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java b/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java new file mode 100644 index 0000000000000..ebeff62c2c23c --- /dev/null +++ b/plugins/transport-grpc/src/test/java/org/opensearch/transport/grpc/Netty4GrpcServerTransportTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.transport.grpc; + +import org.opensearch.common.network.NetworkService; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; +import org.hamcrest.MatcherAssert; +import org.junit.Before; + +import java.util.List; + +import io.grpc.BindableService; + +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.not; + +public class Netty4GrpcServerTransportTests extends OpenSearchTestCase { + + private NetworkService networkService; + private List services; + + @Before + public void setup() { + networkService = new NetworkService(List.of()); + services = List.of(); + } + + public void test() { + try (Netty4GrpcServerTransport transport = new Netty4GrpcServerTransport(createSettings(), services, networkService)) { + transport.start(); + + MatcherAssert.assertThat(transport.boundAddress().boundAddresses(), not(emptyArray())); + assertNotNull(transport.boundAddress().publishAddress().address()); + + transport.stop(); + } + } + + private static Settings createSettings() { + return Settings.builder().put(Netty4GrpcServerTransport.SETTING_GRPC_PORTS.getKey(), getPortRange()).build(); + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml index 9ec39660a4928..c840276ee1157 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/92_flat_object_support_doc_values.yml @@ -1,8 +1,9 @@ --- # The test setup includes: -# - Create flat_object mapping for flat_object_doc_values_test index -# - Index 9 example documents -# - Search tests about doc_values and index +# - 1.Create flat_object mapping for flat_object_doc_values_test index +# - 2.Index 9 example documents +# - 3.Search tests about doc_values and index +# - 4.Fetch doc_value from flat_object field setup: - skip: @@ -786,3 +787,48 @@ teardown: - length: { hits.hits: 1 } - match: { hits.hits.0._source.order: "order8" } + + # Stored Fields with exact dot path. + - do: + search: + body: { + _source: false, + query: { + bool: { + must: [ + { + term: { + order: "order0" + } + } + ] + } + }, + stored_fields: "_none_", + docvalue_fields: [ "issue.labels.name","order" ] + } + + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields: { "order" : [ "order0" ], "issue.labels.name": [ "abc0" ] } } + + - do: + search: + body: { + _source: false, + query: { + bool: { + must: [ + { + term: { + order: "order0" + } + } + ] + } + }, + stored_fields: "_none_", + docvalue_fields: [ "issue.labels.name" ] + } + + - length: { hits.hits: 1 } + - match: { hits.hits.0.fields: { "issue.labels.name": [ "abc0" ] } } diff --git a/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 deleted file mode 100644 index fd952034f3742..0000000000000 --- a/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c2503cfaba37249e20ea877555cb52ee89d1ae1 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.12.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..2b9a8cf6e43fd --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.12.1.jar.sha1 @@ -0,0 +1 @@ +86836497e35c1ab33259d9864ceb280c0016075e \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 deleted file mode 100644 index 2993134edd610..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68fe98c94e9644a584ea1bf525e68d9406fc61ec \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.12.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..89d6ddbec3eec --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.12.1.jar.sha1 @@ -0,0 +1 @@ +d0e79d06a0ed021663737e4df777ab7b80cd28c4 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.12.0.jar.sha1 b/server/licenses/lucene-core-9.12.0.jar.sha1 deleted file mode 100644 index e55f896dedb63..0000000000000 --- a/server/licenses/lucene-core-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fdb055d569bb20bfce9618fe2b01c29bab7f290c \ No newline at end of file diff --git a/server/licenses/lucene-core-9.12.1.jar.sha1 b/server/licenses/lucene-core-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..2521c91a81d64 --- /dev/null +++ b/server/licenses/lucene-core-9.12.1.jar.sha1 @@ -0,0 +1 @@ +91447c90c1180122142773b5baddaf8547124794 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.12.0.jar.sha1 b/server/licenses/lucene-grouping-9.12.0.jar.sha1 deleted file mode 100644 index 48388974bb38f..0000000000000 --- a/server/licenses/lucene-grouping-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ccf99f8db57aa97b2c1f95c5cc2a11156a043921 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.12.1.jar.sha1 b/server/licenses/lucene-grouping-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..61d7ff62ac3cc --- /dev/null +++ b/server/licenses/lucene-grouping-9.12.1.jar.sha1 @@ -0,0 +1 @@ +e4bc3d0aa7eec4f41b4f350de0263a8d5625d2b3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.12.0.jar.sha1 b/server/licenses/lucene-highlighter-9.12.0.jar.sha1 deleted file mode 100644 index 3d457579da892..0000000000000 --- a/server/licenses/lucene-highlighter-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e93429f66fbcd3b58d81f01223d6ce5688047296 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.12.1.jar.sha1 b/server/licenses/lucene-highlighter-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..57fc10a58b806 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.12.1.jar.sha1 @@ -0,0 +1 @@ +2eeedfcec47dd65969f36e88931ed452291dd43e \ No newline at end of file diff --git a/server/licenses/lucene-join-9.12.0.jar.sha1 b/server/licenses/lucene-join-9.12.0.jar.sha1 deleted file mode 100644 index c5f6d16598a60..0000000000000 --- a/server/licenses/lucene-join-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -14c802d6955eaf11107375a2ada8fe8ec53b3e01 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.12.1.jar.sha1 b/server/licenses/lucene-join-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..8d46f20c39974 --- /dev/null +++ b/server/licenses/lucene-join-9.12.1.jar.sha1 @@ -0,0 +1 @@ +3c5e9ff2925a8373ae0d35c1d0a7b2465cebec9f \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.12.0.jar.sha1 b/server/licenses/lucene-memory-9.12.0.jar.sha1 deleted file mode 100644 index e7ac44089c006..0000000000000 --- a/server/licenses/lucene-memory-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ffe090339540876b40df792aee51a42af6b3f37f \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.12.1.jar.sha1 b/server/licenses/lucene-memory-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..55de1c9322aa3 --- /dev/null +++ b/server/licenses/lucene-memory-9.12.1.jar.sha1 @@ -0,0 +1 @@ +e80eecfb1dcc324140387c8357c81e12c2a01937 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.12.0.jar.sha1 b/server/licenses/lucene-misc-9.12.0.jar.sha1 deleted file mode 100644 index afb546be4e032..0000000000000 --- a/server/licenses/lucene-misc-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad17704ee90eb926b6d3105f7027485cdadbecd9 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.12.1.jar.sha1 b/server/licenses/lucene-misc-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..86982eb1c900c --- /dev/null +++ b/server/licenses/lucene-misc-9.12.1.jar.sha1 @@ -0,0 +1 @@ +4e65d01d1c23f3f49dc325d552701bbefafee7ee \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.12.0.jar.sha1 b/server/licenses/lucene-queries-9.12.0.jar.sha1 deleted file mode 100644 index e24756e38dad2..0000000000000 --- a/server/licenses/lucene-queries-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3ac2a62b0b55c5725bb65f0c5454f9f8a401cf43 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.12.1.jar.sha1 b/server/licenses/lucene-queries-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..f2087ec8eb623 --- /dev/null +++ b/server/licenses/lucene-queries-9.12.1.jar.sha1 @@ -0,0 +1 @@ +14f24315041b686683dba4bc679ca7dc6a505906 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.12.0.jar.sha1 b/server/licenses/lucene-queryparser-9.12.0.jar.sha1 deleted file mode 100644 index e93e00a063dd0..0000000000000 --- a/server/licenses/lucene-queryparser-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55959399373876f4c184944315458dc6b88fbd81 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.12.1.jar.sha1 b/server/licenses/lucene-queryparser-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..489e6719da342 --- /dev/null +++ b/server/licenses/lucene-queryparser-9.12.1.jar.sha1 @@ -0,0 +1 @@ +aa6df09a99f8881d843e9863aa1713dc9f3ed24f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.12.0.jar.sha1 b/server/licenses/lucene-sandbox-9.12.0.jar.sha1 deleted file mode 100644 index a3fd8446e0dbc..0000000000000 --- a/server/licenses/lucene-sandbox-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f65882536d681c11a1cbc920e5679201101e3603 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.12.1.jar.sha1 b/server/licenses/lucene-sandbox-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..c1d613e23f1fe --- /dev/null +++ b/server/licenses/lucene-sandbox-9.12.1.jar.sha1 @@ -0,0 +1 @@ +1a66485629d60779f039fc26360f4374ef1496e7 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 deleted file mode 100644 index b0f11fb667faf..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d00cc7cc2279822ef6740f0677cafacfb439fa8 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.12.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..c38b794ce9948 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.12.1.jar.sha1 @@ -0,0 +1 @@ +0a7379410eff21676472adc8ea76a57891ec83c2 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 deleted file mode 100644 index 858eee25ac191..0000000000000 --- a/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e3092632ca1d4427d3ebb2c866ac89d90f5b61ec \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.12.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..bc327a8cec830 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.12.1.jar.sha1 @@ -0,0 +1 @@ +d2fdea4edabb1f616f494999651c43abfd0aa124 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.12.0.jar.sha1 b/server/licenses/lucene-suggest-9.12.0.jar.sha1 deleted file mode 100644 index 973a7726d845d..0000000000000 --- a/server/licenses/lucene-suggest-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1c6636499317ebe498f3490a1ec8b86b8a363dd \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.12.1.jar.sha1 b/server/licenses/lucene-suggest-9.12.1.jar.sha1 new file mode 100644 index 0000000000000..961f6da619149 --- /dev/null +++ b/server/licenses/lucene-suggest-9.12.1.jar.sha1 @@ -0,0 +1 @@ +0660e0996ec7653fe0c13c608137e264645eecac \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsActionIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsActionIT.java index 32d5b3db85629..a7cb4847b45e5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsActionIT.java @@ -8,9 +8,15 @@ package org.opensearch.action.admin.cluster.shards; +import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.opensearch.action.admin.indices.datastream.DataStreamTestCase; import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; +import org.opensearch.action.admin.indices.stats.ShardStats; +import org.opensearch.action.pagination.PageParams; +import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.action.ActionListener; @@ -20,15 +26,19 @@ import org.opensearch.test.OpenSearchIntegTestCase; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.opensearch.common.unit.TimeValue.timeValueMillis; import static org.opensearch.search.SearchService.NO_TIMEOUT; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(numDataNodes = 0, scope = OpenSearchIntegTestCase.Scope.TEST) -public class TransportCatShardsActionIT extends OpenSearchIntegTestCase { +public class TransportCatShardsActionIT extends DataStreamTestCase { public void testCatShardsWithSuccessResponse() throws InterruptedException { internalCluster().startClusterManagerOnlyNodes(1); @@ -125,4 +135,334 @@ public void onFailure(Exception e) { latch.await(); } + public void testListShardsWithHiddenIndex() throws Exception { + final int numShards = 1; + final int numReplicas = 1; + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(2); + createIndex( + "test-hidden-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .build() + ); + ensureGreen(); + + // Verify result for a default query: "_list/shards" + CatShardsRequest listShardsRequest = getListShardsTransportRequest(Strings.EMPTY_ARRAY, 100); + ActionFuture listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-hidden-idx", 2, true); + + // Verify result when hidden index is explicitly queried: "_list/shards" + listShardsRequest = getListShardsTransportRequest(new String[] { "test-hidden-idx" }, 100); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-hidden-idx", 2, true); + + // Verify result when hidden index is queried with wildcard: "_list/shards*" + // Since the ClusterStateAction underneath is invoked with lenientExpandOpen IndicesOptions, + // Wildcards for hidden indices should not get resolved. + listShardsRequest = getListShardsTransportRequest(new String[] { "test-hidden-idx*" }, 100); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertEquals(0, listShardsResponse.get().getResponseShards().size()); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-hidden-idx", 0, false); + } + + public void testListShardsWithClosedIndex() throws Exception { + final int numShards = 1; + final int numReplicas = 1; + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(2); + createIndex( + "test-closed-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + ensureGreen(); + + // close index "test-closed-idx" + client().admin().indices().close(Requests.closeIndexRequest("test-closed-idx")).get(); + ensureGreen(); + + // Verify result for a default query: "_list/shards" + CatShardsRequest listShardsRequest = getListShardsTransportRequest(Strings.EMPTY_ARRAY, 100); + ActionFuture listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-closed-idx", 2, false); + + // Verify result when closed index is explicitly queried: "_list/shards" + listShardsRequest = getListShardsTransportRequest(new String[] { "test-closed-idx" }, 100); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-closed-idx", 2, false); + + // Verify result when closed index is queried with wildcard: "_list/shards*" + // Since the ClusterStateAction underneath is invoked with lenientExpandOpen IndicesOptions, + // Wildcards for closed indices should not get resolved. + listShardsRequest = getListShardsTransportRequest(new String[] { "test-closed-idx*" }, 100); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), "test-closed-idx", 0, false); + } + + public void testListShardsWithClosedAndHiddenIndices() throws InterruptedException, ExecutionException { + final int numIndices = 4; + final int numShards = 1; + final int numReplicas = 2; + final int pageSize = 100; + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(3); + createIndex( + "test", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-2", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-closed-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-hidden-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .build() + ); + // close index "test-closed-idx" + client().admin().indices().close(Requests.closeIndexRequest("test-closed-idx")).get(); + ensureGreen(); + + // Verifying response for default queries: /_list/shards + // all the shards should be part of response, however stats should not be displayed for closed index + CatShardsRequest listShardsRequest = getListShardsTransportRequest(Strings.EMPTY_ARRAY, pageSize); + ActionFuture listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertTrue(listShardsResponse.get().getResponseShards().stream().anyMatch(shard -> shard.getIndexName().equals("test-closed-idx"))); + assertTrue(listShardsResponse.get().getResponseShards().stream().anyMatch(shard -> shard.getIndexName().equals("test-hidden-idx"))); + assertEquals(numIndices * numShards * (numReplicas + 1), listShardsResponse.get().getResponseShards().size()); + assertFalse( + Arrays.stream(listShardsResponse.get().getIndicesStatsResponse().getShards()) + .anyMatch(shardStats -> shardStats.getShardRouting().getIndexName().equals("test-closed-idx")) + ); + assertEquals( + (numIndices - 1) * numShards * (numReplicas + 1), + listShardsResponse.get().getIndicesStatsResponse().getShards().length + ); + + // Verifying responses when hidden indices are explicitly queried: /_list/shards/test-hidden-idx + // Shards for hidden index should appear in response along with stats + listShardsRequest.setIndices(List.of("test-hidden-idx").toArray(new String[0])); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertTrue(listShardsResponse.get().getResponseShards().stream().allMatch(shard -> shard.getIndexName().equals("test-hidden-idx"))); + assertTrue( + Arrays.stream(listShardsResponse.get().getIndicesStatsResponse().getShards()) + .allMatch(shardStats -> shardStats.getShardRouting().getIndexName().equals("test-hidden-idx")) + ); + assertEquals( + listShardsResponse.get().getResponseShards().size(), + listShardsResponse.get().getIndicesStatsResponse().getShards().length + ); + + // Verifying responses when hidden indices are queried with wildcards: /_list/shards/test-hidden-idx* + // Shards for hidden index should not appear in response with stats. + listShardsRequest.setIndices(List.of("test-hidden-idx*").toArray(new String[0])); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertEquals(0, listShardsResponse.get().getResponseShards().size()); + assertEquals(0, listShardsResponse.get().getIndicesStatsResponse().getShards().length); + + // Explicitly querying for closed index: /_list/shards/test-closed-idx + // should output closed shards without stats. + listShardsRequest.setIndices(List.of("test-closed-idx").toArray(new String[0])); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertTrue(listShardsResponse.get().getResponseShards().stream().anyMatch(shard -> shard.getIndexName().equals("test-closed-idx"))); + assertEquals(0, listShardsResponse.get().getIndicesStatsResponse().getShards().length); + + // Querying for closed index with wildcards: /_list/shards/test-closed-idx* + // should not output any closed shards. + listShardsRequest.setIndices(List.of("test-closed-idx*").toArray(new String[0])); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertEquals(0, listShardsResponse.get().getResponseShards().size()); + assertEquals(0, listShardsResponse.get().getIndicesStatsResponse().getShards().length); + } + + public void testListShardsWithClosedIndicesAcrossPages() throws InterruptedException, ExecutionException { + final int numIndices = 4; + final int numShards = 1; + final int numReplicas = 2; + final int pageSize = numShards * (numReplicas + 1); + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(3); + createIndex( + "test-open-idx-1", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-closed-idx-1", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-open-idx-2", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-closed-idx-2", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .build() + ); + // close index "test-closed-idx-1" + client().admin().indices().close(Requests.closeIndexRequest("test-closed-idx-1")).get(); + ensureGreen(); + // close index "test-closed-idx-2" + client().admin().indices().close(Requests.closeIndexRequest("test-closed-idx-2")).get(); + ensureGreen(); + + // Verifying response for default queries: /_list/shards + List responseShardRouting = new ArrayList<>(); + List responseShardStats = new ArrayList<>(); + String nextToken = null; + CatShardsRequest listShardsRequest; + ActionFuture listShardsResponse; + do { + listShardsRequest = getListShardsTransportRequest(Strings.EMPTY_ARRAY, nextToken, pageSize); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + nextToken = listShardsResponse.get().getPageToken().getNextToken(); + responseShardRouting.addAll(listShardsResponse.get().getResponseShards()); + responseShardStats.addAll(List.of(listShardsResponse.get().getIndicesStatsResponse().getShards())); + } while (nextToken != null); + + assertTrue(responseShardRouting.stream().anyMatch(shard -> shard.getIndexName().equals("test-closed-idx-1"))); + assertTrue(responseShardRouting.stream().anyMatch(shard -> shard.getIndexName().equals("test-closed-idx-2"))); + assertEquals(numIndices * numShards * (numReplicas + 1), responseShardRouting.size()); + // ShardsStats should only appear for 2 open indices + assertFalse( + responseShardStats.stream().anyMatch(shardStats -> shardStats.getShardRouting().getIndexName().contains("test-closed-idx")) + ); + assertEquals(2 * numShards * (numReplicas + 1), responseShardStats.size()); + } + + public void testListShardsWithDataStream() throws Exception { + final int numDataNodes = 3; + String dataStreamName = "logs-test"; + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(numDataNodes); + // Create an index template for data streams. + createDataStreamIndexTemplate("data-stream-template", List.of("logs-*")); + // Create data streams matching the "logs-*" index pattern. + createDataStream(dataStreamName); + ensureGreen(); + // Verifying default query's result. Data stream should have created a hidden backing index in the + // background and all the corresponding shards should appear in the response along with stats. + CatShardsRequest listShardsRequest = getListShardsTransportRequest(Strings.EMPTY_ARRAY, numDataNodes * numDataNodes); + ActionFuture listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), dataStreamName, numDataNodes + 1, true); + // Verifying result when data stream is directly queried. Again, all the shards with stats should appear + listShardsRequest = getListShardsTransportRequest(new String[] { dataStreamName }, numDataNodes * numDataNodes); + listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertSingleIndexResponseShards(listShardsResponse.get(), dataStreamName, numDataNodes + 1, true); + } + + public void testListShardsWithAliases() throws Exception { + final int numShards = 1; + final int numReplicas = 1; + final String aliasName = "test-alias"; + internalCluster().startClusterManagerOnlyNodes(1); + internalCluster().startDataOnlyNodes(3); + createIndex( + "test-closed-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .build() + ); + createIndex( + "test-hidden-idx", + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) + .build() + ); + ensureGreen(); + + // Point test alias to both the indices (one being hidden while the other is closed) + final IndicesAliasesRequest request = new IndicesAliasesRequest().origin("allowed"); + request.addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test-closed-idx").alias(aliasName)); + assertAcked(client().admin().indices().aliases(request).actionGet()); + + request.addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test-hidden-idx").alias(aliasName)); + assertAcked(client().admin().indices().aliases(request).actionGet()); + + // close index "test-closed-idx" + client().admin().indices().close(Requests.closeIndexRequest("test-closed-idx")).get(); + ensureGreen(); + + // Verifying result when an alias is explicitly queried. + CatShardsRequest listShardsRequest = getListShardsTransportRequest(new String[] { aliasName }, 100); + ActionFuture listShardsResponse = client().execute(CatShardsAction.INSTANCE, listShardsRequest); + assertTrue( + listShardsResponse.get() + .getResponseShards() + .stream() + .allMatch(shard -> shard.getIndexName().equals("test-hidden-idx") || shard.getIndexName().equals("test-closed-idx")) + ); + assertTrue( + Arrays.stream(listShardsResponse.get().getIndicesStatsResponse().getShards()) + .allMatch(shardStats -> shardStats.getShardRouting().getIndexName().equals("test-hidden-idx")) + ); + assertEquals(4, listShardsResponse.get().getResponseShards().size()); + assertEquals(2, listShardsResponse.get().getIndicesStatsResponse().getShards().length); + } + + private void assertSingleIndexResponseShards( + CatShardsResponse catShardsResponse, + String indexNamePattern, + final int totalNumShards, + boolean shardStatsExist + ) { + assertTrue(catShardsResponse.getResponseShards().stream().allMatch(shard -> shard.getIndexName().contains(indexNamePattern))); + assertEquals(totalNumShards, catShardsResponse.getResponseShards().size()); + if (shardStatsExist) { + assertTrue( + Arrays.stream(catShardsResponse.getIndicesStatsResponse().getShards()) + .allMatch(shardStats -> shardStats.getShardRouting().getIndexName().contains(indexNamePattern)) + ); + } + assertEquals(shardStatsExist ? totalNumShards : 0, catShardsResponse.getIndicesStatsResponse().getShards().length); + } + + private CatShardsRequest getListShardsTransportRequest(String[] indices, final int pageSize) { + return getListShardsTransportRequest(indices, null, pageSize); + } + + private CatShardsRequest getListShardsTransportRequest(String[] indices, String nextToken, final int pageSize) { + CatShardsRequest listShardsRequest = new CatShardsRequest(); + listShardsRequest.setCancelAfterTimeInterval(NO_TIMEOUT); + listShardsRequest.setIndices(indices); + listShardsRequest.setPageParams(new PageParams(nextToken, PageParams.PARAM_ASC_SORT_VALUE, pageSize)); + return listShardsRequest; + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index c91c4d7bbb63b..3f9053576329c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -56,7 +56,7 @@ public class StarTreeMapperIT extends OpenSearchIntegTestCase { .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)) .build(); - private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean ipdim) { + private static XContentBuilder createMinimalTestMapping(boolean invalidDim, boolean invalidMetric, boolean wildcard) { try { return jsonBuilder().startObject() .startObject("composite") @@ -68,7 +68,7 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .endObject() .startArray("ordered_dimensions") .startObject() - .field("name", getDim(invalidDim, ipdim)) + .field("name", getDim(invalidDim, wildcard)) .endObject() .startObject() .field("name", "keyword_dv") @@ -102,8 +102,16 @@ private static XContentBuilder createMinimalTestMapping(boolean invalidDim, bool .field("type", "keyword") .field("doc_values", false) .endObject() + .startObject("ip_no_dv") + .field("type", "ip") + .field("doc_values", false) + .endObject() .startObject("ip") .field("type", "ip") + .field("doc_values", true) + .endObject() + .startObject("wildcard") + .field("type", "wildcard") .field("doc_values", false) .endObject() .endObject() @@ -362,11 +370,11 @@ private XContentBuilder getMappingWithDuplicateFields(boolean isDuplicateDim, bo return mapping; } - private static String getDim(boolean hasDocValues, boolean isKeyword) { + private static String getDim(boolean hasDocValues, boolean isWildCard) { if (hasDocValues) { - return random().nextBoolean() ? "numeric" : "keyword"; - } else if (isKeyword) { - return "ip"; + return random().nextBoolean() ? "numeric" : random().nextBoolean() ? "keyword" : "ip_no_dv"; + } else if (isWildCard) { + return "wildcard"; } return "numeric_dv"; } @@ -748,7 +756,7 @@ public void testUnsupportedDim() { () -> prepareCreate(TEST_INDEX).setSettings(settings).setMapping(createMinimalTestMapping(false, false, true)).get() ); assertEquals( - "Failed to parse mapping [_doc]: unsupported field type associated with dimension [ip] as part of star tree field [startree-1]", + "Failed to parse mapping [_doc]: unsupported field type associated with dimension [wildcard] as part of star tree field [startree-1]", ex.getMessage() ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SearchReplicaReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SearchReplicaReplicationIT.java index a1b512c326ac5..f660695af9965 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SearchReplicaReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SearchReplicaReplicationIT.java @@ -8,14 +8,20 @@ package org.opensearch.indices.replication; +import org.opensearch.action.admin.indices.replication.SegmentReplicationStatsResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; +import org.opensearch.index.SegmentReplicationPerGroupStats; +import org.opensearch.index.SegmentReplicationShardStats; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.OpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; import java.nio.file.Path; +import java.util.List; +import java.util.Set; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class SearchReplicaReplicationIT extends SegmentReplicationBaseIT { @@ -82,4 +88,47 @@ public void testReplication() throws Exception { waitForSearchableDocs(docCount, primary, replica); } + public void testSegmentReplicationStatsResponseWithSearchReplica() throws Exception { + internalCluster().startClusterManagerOnlyNode(); + final List nodes = internalCluster().startDataOnlyNodes(2); + createIndex( + INDEX_NAME, + Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .put("number_of_search_only_replicas", 1) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .build() + ); + ensureGreen(INDEX_NAME); + + final int docCount = 5; + for (int i = 0; i < docCount; i++) { + client().prepareIndex(INDEX_NAME).setId(Integer.toString(i)).setSource("field", "value" + i).execute().get(); + } + refresh(INDEX_NAME); + waitForSearchableDocs(docCount, nodes); + + SegmentReplicationStatsResponse segmentReplicationStatsResponse = dataNodeClient().admin() + .indices() + .prepareSegmentReplicationStats(INDEX_NAME) + .setDetailed(true) + .execute() + .actionGet(); + + // Verify the number of indices + assertEquals(1, segmentReplicationStatsResponse.getReplicationStats().size()); + // Verify total shards + assertEquals(2, segmentReplicationStatsResponse.getTotalShards()); + // Verify the number of primary shards + assertEquals(1, segmentReplicationStatsResponse.getReplicationStats().get(INDEX_NAME).size()); + + SegmentReplicationPerGroupStats perGroupStats = segmentReplicationStatsResponse.getReplicationStats().get(INDEX_NAME).get(0); + Set replicaStats = perGroupStats.getReplicaStats(); + // Verify the number of replica stats + assertEquals(1, replicaStats.size()); + for (SegmentReplicationShardStats replicaStat : replicaStats) { + assertNotNull(replicaStat.getCurrentReplicationState()); + } + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index ef7da395d2151..79caef1f45a26 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -82,8 +82,7 @@ protected Settings nodeSettings(int nodeOrdinal) { } public void testSimpleTimeout() throws Exception { - final int numDocs = 1000; - for (int i = 0; i < numDocs; i++) { + for (int i = 0; i < 32; i++) { client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value").get(); } refresh("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 2ce96092203e8..60a6e59014e11 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -1023,7 +1023,7 @@ public void testDocValueFields() throws Exception { .startObject("ip_field") .field("type", "ip") .endObject() - .startObject("flat_object_field") + .startObject("flat_object_field1") .field("type", "flat_object") .endObject() .endObject() @@ -1050,9 +1050,11 @@ public void testDocValueFields() throws Exception { .field("boolean_field", true) .field("binary_field", new byte[] { 42, 100 }) .field("ip_field", "::1") - .field("flat_object_field") + .field("flat_object_field1") .startObject() + .field("fooa", "bara") .field("foo", "bar") + .field("foob", "barb") .endObject() .endObject() ) @@ -1075,7 +1077,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("boolean_field") .addDocValueField("binary_field") .addDocValueField("ip_field") - .addDocValueField("flat_object_field"); + .addDocValueField("flat_object_field1.foo"); SearchResponse searchResponse = builder.get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -1097,7 +1099,7 @@ public void testDocValueFields() throws Exception { "keyword_field", "binary_field", "ip_field", - "flat_object_field" + "flat_object_field1.foo" ) ) ); @@ -1116,7 +1118,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field1.foo").getValue(), equalTo("bar")); builder = client().prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"); searchResponse = builder.get(); @@ -1139,8 +1141,7 @@ public void testDocValueFields() throws Exception { "text_field", "keyword_field", "binary_field", - "ip_field", - "flat_object_field" + "ip_field" ) ) ); @@ -1160,7 +1161,6 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); builder = client().prepareSearch() .setQuery(matchAllQuery()) @@ -1176,7 +1176,7 @@ public void testDocValueFields() throws Exception { .addDocValueField("boolean_field", "use_field_mapping") .addDocValueField("binary_field", "use_field_mapping") .addDocValueField("ip_field", "use_field_mapping") - .addDocValueField("flat_object_field", "use_field_mapping"); + .addDocValueField("flat_object_field1.foo", null); ; searchResponse = builder.get(); @@ -1199,7 +1199,7 @@ public void testDocValueFields() throws Exception { "keyword_field", "binary_field", "ip_field", - "flat_object_field" + "flat_object_field1.foo" ) ) ); @@ -1219,7 +1219,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValue(), equalTo("KmQ")); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1")); - assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field").getValue(), equalTo("flat_object_field.foo")); + assertThat(searchResponse.getHits().getAt(0).getFields().get("flat_object_field1.foo").getValue(), equalTo("bar")); builder = client().prepareSearch() .setQuery(matchAllQuery()) diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsAction.java index 7b36b7a10f4f2..01efa96a7369e 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/shards/TransportCatShardsAction.java @@ -18,6 +18,8 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.TimeoutTaskCancellationUtility; import org.opensearch.client.node.NodeClient; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.breaker.ResponseLimitBreachedException; import org.opensearch.common.breaker.ResponseLimitSettings; import org.opensearch.common.inject.Inject; @@ -27,6 +29,7 @@ import org.opensearch.tasks.Task; import org.opensearch.transport.TransportService; +import java.util.List; import java.util.Objects; import static org.opensearch.common.breaker.ResponseLimitSettings.LimitEntity.SHARDS; @@ -98,9 +101,6 @@ public void onResponse(ClusterStateResponse clusterStateResponse) { shardsRequest.getPageParams(), clusterStateResponse ); - String[] indices = Objects.isNull(paginationStrategy) - ? shardsRequest.getIndices() - : paginationStrategy.getRequestedIndices().toArray(new String[0]); catShardsResponse.setNodes(clusterStateResponse.getState().getNodes()); catShardsResponse.setResponseShards( Objects.isNull(paginationStrategy) @@ -108,8 +108,12 @@ public void onResponse(ClusterStateResponse clusterStateResponse) { : paginationStrategy.getRequestedEntities() ); catShardsResponse.setPageToken(Objects.isNull(paginationStrategy) ? null : paginationStrategy.getResponseToken()); + + String[] indices = Objects.isNull(paginationStrategy) + ? shardsRequest.getIndices() + : filterClosedIndices(clusterStateResponse.getState(), paginationStrategy.getRequestedIndices()); // For paginated queries, if strategy outputs no shards to be returned, avoid fetching IndicesStats. - if (shouldSkipIndicesStatsRequest(paginationStrategy)) { + if (shouldSkipIndicesStatsRequest(paginationStrategy, indices)) { catShardsResponse.setIndicesStatsResponse(IndicesStatsResponse.getEmptyResponse()); cancellableListener.onResponse(catShardsResponse); return; @@ -166,7 +170,19 @@ private void validateRequestLimit( } } - private boolean shouldSkipIndicesStatsRequest(ShardPaginationStrategy paginationStrategy) { - return Objects.nonNull(paginationStrategy) && paginationStrategy.getRequestedEntities().isEmpty(); + private boolean shouldSkipIndicesStatsRequest(ShardPaginationStrategy paginationStrategy, String[] indices) { + return Objects.nonNull(paginationStrategy) && (indices == null || indices.length == 0); + } + + /** + * Will be used by paginated query (_list/shards) to filter out closed indices (only consider OPEN) before fetching + * IndicesStats. Since pagination strategy always passes concrete indices to TransportIndicesStatsAction, + * the default behaviour of StrictExpandOpenAndForbidClosed leads to errors if closed indices are encountered. + */ + private String[] filterClosedIndices(ClusterState clusterState, List strategyIndices) { + return strategyIndices.stream().filter(index -> { + IndexMetadata metadata = clusterState.metadata().indices().get(index); + return metadata != null && metadata.getState().equals(IndexMetadata.State.CLOSE) == false; + }).toArray(String[]::new); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsAction.java index fc97d67c6c3af..44408c5043fcf 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsAction.java @@ -21,7 +21,6 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.index.IndexService; import org.opensearch.index.SegmentReplicationPerGroupStats; import org.opensearch.index.SegmentReplicationPressureService; import org.opensearch.index.SegmentReplicationShardStats; @@ -38,7 +37,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Transport action for shard segment replication operation. This transport action does not actually @@ -96,11 +97,11 @@ protected SegmentReplicationStatsResponse newResponse( ) { String[] shards = request.shards(); final List shardsToFetch = Arrays.stream(shards).map(Integer::valueOf).collect(Collectors.toList()); - // organize replica responses by allocationId. final Map replicaStats = new HashMap<>(); // map of index name to list of replication group stats. final Map> primaryStats = new HashMap<>(); + for (SegmentReplicationShardStatsResponse response : responses) { if (response != null) { if (response.getReplicaStats() != null) { @@ -109,6 +110,7 @@ protected SegmentReplicationStatsResponse newResponse( replicaStats.putIfAbsent(shardRouting.allocationId().getId(), response.getReplicaStats()); } } + if (response.getPrimaryStats() != null) { final ShardId shardId = response.getPrimaryStats().getShardId(); if (shardsToFetch.isEmpty() || shardsToFetch.contains(shardId.getId())) { @@ -126,15 +128,20 @@ protected SegmentReplicationStatsResponse newResponse( } } } - // combine the replica stats to the shard stat entry in each group. - for (Map.Entry> entry : primaryStats.entrySet()) { - for (SegmentReplicationPerGroupStats group : entry.getValue()) { - for (SegmentReplicationShardStats replicaStat : group.getReplicaStats()) { - replicaStat.setCurrentReplicationState(replicaStats.getOrDefault(replicaStat.getAllocationId(), null)); - } - } - } - return new SegmentReplicationStatsResponse(totalShards, successfulShards, failedShards, primaryStats, shardFailures); + + Map> replicationStats = primaryStats.entrySet() + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> entry.getValue() + .stream() + .map(groupStats -> updateGroupStats(groupStats, replicaStats)) + .collect(Collectors.toList()) + ) + ); + + return new SegmentReplicationStatsResponse(totalShards, successfulShards, failedShards, replicationStats, shardFailures); } @Override @@ -144,9 +151,8 @@ protected SegmentReplicationStatsRequest readRequestFrom(StreamInput in) throws @Override protected SegmentReplicationShardStatsResponse shardOperation(SegmentReplicationStatsRequest request, ShardRouting shardRouting) { - IndexService indexService = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()); - IndexShard indexShard = indexService.getShard(shardRouting.shardId().id()); ShardId shardId = shardRouting.shardId(); + IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); if (indexShard.indexSettings().isSegRepEnabledOrRemoteNode() == false) { return null; @@ -156,11 +162,7 @@ protected SegmentReplicationShardStatsResponse shardOperation(SegmentReplication return new SegmentReplicationShardStatsResponse(pressureService.getStatsForShard(indexShard)); } - // return information about only on-going segment replication events. - if (request.activeOnly()) { - return new SegmentReplicationShardStatsResponse(targetService.getOngoingEventSegmentReplicationState(shardId)); - } - return new SegmentReplicationShardStatsResponse(targetService.getSegmentReplicationState(shardId)); + return new SegmentReplicationShardStatsResponse(getSegmentReplicationState(shardId, request.activeOnly())); } @Override @@ -181,4 +183,83 @@ protected ClusterBlockException checkRequestBlock( ) { return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, concreteIndices); } + + private SegmentReplicationPerGroupStats updateGroupStats( + SegmentReplicationPerGroupStats groupStats, + Map replicaStats + ) { + // Update the SegmentReplicationState for each of the replicas + Set updatedReplicaStats = groupStats.getReplicaStats() + .stream() + .peek(replicaStat -> replicaStat.setCurrentReplicationState(replicaStats.getOrDefault(replicaStat.getAllocationId(), null))) + .collect(Collectors.toSet()); + + // Compute search replica stats + Set searchReplicaStats = computeSearchReplicaStats(groupStats.getShardId(), replicaStats); + + // Combine ReplicaStats and SearchReplicaStats + Set combinedStats = Stream.concat(updatedReplicaStats.stream(), searchReplicaStats.stream()) + .collect(Collectors.toSet()); + + return new SegmentReplicationPerGroupStats(groupStats.getShardId(), combinedStats, groupStats.getRejectedRequestCount()); + } + + private Set computeSearchReplicaStats( + ShardId shardId, + Map replicaStats + ) { + return replicaStats.values() + .stream() + .filter(segmentReplicationState -> segmentReplicationState.getShardRouting().shardId().equals(shardId)) + .filter(segmentReplicationState -> segmentReplicationState.getShardRouting().isSearchOnly()) + .map(segmentReplicationState -> { + ShardRouting shardRouting = segmentReplicationState.getShardRouting(); + SegmentReplicationShardStats segmentReplicationStats = computeSegmentReplicationShardStats(shardRouting); + segmentReplicationStats.setCurrentReplicationState(segmentReplicationState); + return segmentReplicationStats; + }) + .collect(Collectors.toSet()); + } + + SegmentReplicationShardStats computeSegmentReplicationShardStats(ShardRouting shardRouting) { + ShardId shardId = shardRouting.shardId(); + SegmentReplicationState completedSegmentReplicationState = targetService.getlatestCompletedEventSegmentReplicationState(shardId); + SegmentReplicationState ongoingSegmentReplicationState = targetService.getOngoingEventSegmentReplicationState(shardId); + + return new SegmentReplicationShardStats( + shardRouting.allocationId().getId(), + 0, + calculateBytesRemainingToReplicate(ongoingSegmentReplicationState), + 0, + getCurrentReplicationLag(ongoingSegmentReplicationState), + getLastCompletedReplicationLag(completedSegmentReplicationState) + ); + } + + private SegmentReplicationState getSegmentReplicationState(ShardId shardId, boolean isActiveOnly) { + if (isActiveOnly) { + return targetService.getOngoingEventSegmentReplicationState(shardId); + } else { + return targetService.getSegmentReplicationState(shardId); + } + } + + private long calculateBytesRemainingToReplicate(SegmentReplicationState ongoingSegmentReplicationState) { + if (ongoingSegmentReplicationState == null) { + return 0; + } + return ongoingSegmentReplicationState.getIndex() + .fileDetails() + .stream() + .mapToLong(index -> index.length() - index.recovered()) + .sum(); + } + + private long getCurrentReplicationLag(SegmentReplicationState ongoingSegmentReplicationState) { + return ongoingSegmentReplicationState != null ? ongoingSegmentReplicationState.getTimer().time() : 0; + } + + private long getLastCompletedReplicationLag(SegmentReplicationState completedSegmentReplicationState) { + return completedSegmentReplicationState != null ? completedSegmentReplicationState.getTimer().time() : 0; + } } diff --git a/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java b/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java index 819e09312a0df..558b7370749d5 100644 --- a/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java +++ b/server/src/main/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeAction.java @@ -430,6 +430,13 @@ private ClusterState getStateFromLocalNode(GetTermVersionResponse termVersionRes if (remoteClusterStateService != null && termVersionResponse.isStatePresentInRemote()) { try { + logger.info( + () -> new ParameterizedMessage( + "Term version checker downloading full cluster state for term {}, version {}", + termVersion.getTerm(), + termVersion.getVersion() + ) + ); ClusterStateTermVersion clusterStateTermVersion = termVersionResponse.getClusterStateTermVersion(); Optional clusterMetadataManifest = remoteClusterStateService .getClusterMetadataManifestByTermVersion( @@ -454,7 +461,7 @@ private ClusterState getStateFromLocalNode(GetTermVersionResponse termVersionRes return clusterStateFromRemote; } } catch (Exception e) { - logger.trace("Error while fetching from remote cluster state", e); + logger.error("Error while fetching from remote cluster state", e); } } return null; diff --git a/server/src/main/java/org/opensearch/action/support/clustermanager/term/TransportGetTermVersionAction.java b/server/src/main/java/org/opensearch/action/support/clustermanager/term/TransportGetTermVersionAction.java index 1cab739a20838..22861e0ba5c31 100644 --- a/server/src/main/java/org/opensearch/action/support/clustermanager/term/TransportGetTermVersionAction.java +++ b/server/src/main/java/org/opensearch/action/support/clustermanager/term/TransportGetTermVersionAction.java @@ -98,7 +98,7 @@ private GetTermVersionResponse buildResponse(GetTermVersionRequest request, Clus ClusterStateTermVersion termVersion = new ClusterStateTermVersion(state); if (discovery instanceof Coordinator) { Coordinator coordinator = (Coordinator) discovery; - if (coordinator.isRemotePublicationEnabled()) { + if (coordinator.canDownloadFullStateFromRemote()) { return new GetTermVersionResponse(termVersion, coordinator.isRemotePublicationEnabled()); } } diff --git a/server/src/main/java/org/opensearch/bootstrap/Security.java b/server/src/main/java/org/opensearch/bootstrap/Security.java index 53b1d990f9a0c..9f1dcbe8fb587 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Security.java +++ b/server/src/main/java/org/opensearch/bootstrap/Security.java @@ -35,7 +35,9 @@ import org.opensearch.cli.Command; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.PathUtils; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; import org.opensearch.env.Environment; import org.opensearch.http.HttpTransportSettings; import org.opensearch.plugins.PluginInfo; @@ -71,6 +73,9 @@ import static org.opensearch.bootstrap.FilePermissionUtils.addDirectoryPath; import static org.opensearch.bootstrap.FilePermissionUtils.addSingleFilePath; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_PORT_DEFAULTS; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_PORTS; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING; /** * Initializes SecurityManager with necessary permissions. @@ -402,6 +407,7 @@ static void addFilePermissions(Permissions policy, Environment environment) thro private static void addBindPermissions(Permissions policy, Settings settings) { addSocketPermissionForHttp(policy, settings); addSocketPermissionForTransportProfiles(policy, settings); + addSocketPermissionForAux(policy, settings); } /** @@ -416,6 +422,29 @@ private static void addSocketPermissionForHttp(final Permissions policy, final S addSocketPermissionForPortRange(policy, httpRange); } + /** + * Add dynamic {@link SocketPermission} based on AffixSetting AUX_TRANSPORT_PORTS. + * If an auxiliary transport type is enabled but has no corresponding port range setting fall back to AUX_PORT_DEFAULTS. + * + * @param policy the {@link Permissions} instance to apply the dynamic {@link SocketPermission}s to. + * @param settings the {@link Settings} instance to read the gRPC settings from + */ + private static void addSocketPermissionForAux(final Permissions policy, final Settings settings) { + Set portsRanges = new HashSet<>(); + for (String auxType : AUX_TRANSPORT_TYPES_SETTING.get(settings)) { + Setting auxTypePortSettings = AUX_TRANSPORT_PORTS.getConcreteSettingForNamespace(auxType); + if (auxTypePortSettings.exists(settings)) { + portsRanges.add(auxTypePortSettings.get(settings)); + } else { + portsRanges.add(new PortsRange(AUX_PORT_DEFAULTS)); + } + } + + for (PortsRange portRange : portsRanges) { + addSocketPermissionForPortRange(policy, portRange.getPortRangeString()); + } + } + /** * Add dynamic {@link SocketPermission} based on transport settings. This method will first check if there is a port range specified in * the transport profile specified by {@code profileSettings} and will fall back to {@code settings}. diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java index 6fee2037501e7..ef0f49b8ae394 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java @@ -1906,4 +1906,12 @@ public boolean isRemotePublicationEnabled() { } return false; } + + public boolean canDownloadFullStateFromRemote() { + if (remoteClusterStateService != null) { + return remoteClusterStateService.isRemotePublicationEnabled() && remoteClusterStateService.canDownloadFromRemoteForReadAPI(); + } + return false; + } + } diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java index 7275d72f2db9f..4ad5b80038048 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java @@ -258,7 +258,7 @@ PublishWithJoinResponse handleIncomingRemotePublishRequest(RemotePublishRequest } if (applyFullState == true) { - logger.debug( + logger.info( () -> new ParameterizedMessage( "Downloading full cluster state for term {}, version {}, stateUUID {}", manifest.getClusterTerm(), diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java index a05938c176678..7999faece52ca 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java @@ -247,11 +247,17 @@ void balance() { final Map nodePrimaryShardCount = calculateNodePrimaryShardCount(remoteRoutingNodes); int totalPrimaryShardCount = nodePrimaryShardCount.values().stream().reduce(0, Integer::sum); - totalPrimaryShardCount += routingNodes.unassigned().getNumPrimaries(); - int avgPrimaryPerNode = (totalPrimaryShardCount + routingNodes.size() - 1) / routingNodes.size(); + int unassignedRemotePrimaryShardCount = 0; + for (ShardRouting shard : routingNodes.unassigned()) { + if (RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) && shard.primary()) { + unassignedRemotePrimaryShardCount++; + } + } + totalPrimaryShardCount += unassignedRemotePrimaryShardCount; + final int avgPrimaryPerNode = (totalPrimaryShardCount + remoteRoutingNodes.size() - 1) / remoteRoutingNodes.size(); - ArrayDeque sourceNodes = new ArrayDeque<>(); - ArrayDeque targetNodes = new ArrayDeque<>(); + final ArrayDeque sourceNodes = new ArrayDeque<>(); + final ArrayDeque targetNodes = new ArrayDeque<>(); for (RoutingNode node : remoteRoutingNodes) { if (nodePrimaryShardCount.get(node.nodeId()) > avgPrimaryPerNode) { sourceNodes.add(node); diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java index d0b6f812e9ee2..6489f3cb33ce0 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java @@ -233,6 +233,13 @@ public ClusterState state() { return clusterState; } + /** + * Returns true if the appliedClusterState is not null + */ + public boolean isStateInitialised() { + return this.state.get() != null; + } + /** * Returns true if the appliedClusterState is not null */ diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java index 1a79161d223e2..b4f2250f6dec9 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java @@ -183,6 +183,13 @@ public ClusterState state() { return clusterApplierService.state(); } + /** + * Returns true if the state in appliedClusterState is not null + */ + public boolean isStateInitialised() { + return clusterApplierService.isStateInitialised(); + } + /** * The state that is persisted to store but may not be applied to cluster. * @return ClusterState diff --git a/server/src/main/java/org/opensearch/common/network/NetworkModule.java b/server/src/main/java/org/opensearch/common/network/NetworkModule.java index bb8da190a6f35..5d55fb52c323d 100644 --- a/server/src/main/java/org/opensearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/opensearch/common/network/NetworkModule.java @@ -80,6 +80,9 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_KEY; +import static org.opensearch.plugins.NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING; + /** * A module to handle registering and binding all network related classes. * @@ -157,6 +160,8 @@ public final class NetworkModule { private final Map> transportFactories = new HashMap<>(); private final Map> transportHttpFactories = new HashMap<>(); + private final Map> transportAuxFactories = new HashMap<>(); + private final List transportInterceptors = new ArrayList<>(); /** @@ -222,6 +227,18 @@ public NetworkModule( registerHttpTransport(entry.getKey(), entry.getValue()); } + Map> auxTransportFactory = plugin.getAuxTransports( + settings, + threadPool, + circuitBreakerService, + networkService, + clusterSettings, + tracer + ); + for (Map.Entry> entry : auxTransportFactory.entrySet()) { + registerAuxTransport(entry.getKey(), entry.getValue()); + } + Map> transportFactory = plugin.getTransports( settings, threadPool, @@ -305,6 +322,12 @@ private void registerHttpTransport(String key, Supplier fac } } + private void registerAuxTransport(String key, Supplier factory) { + if (transportAuxFactories.putIfAbsent(key, factory) != null) { + throw new IllegalArgumentException("transport for name: " + key + " is already registered"); + } + } + /** * Register an allocation command. *

@@ -346,6 +369,25 @@ public Supplier getHttpServerTransportSupplier() { return factory; } + /** + * Optional client/server transports that run in parallel to HttpServerTransport. + * Multiple transport types can be registered and enabled via AUX_TRANSPORT_TYPES_SETTING. + * An IllegalStateException is thrown if a transport type is enabled not registered. + */ + public List getAuxServerTransportList() { + List serverTransportSuppliers = new ArrayList<>(); + + for (String transportType : AUX_TRANSPORT_TYPES_SETTING.get(settings)) { + final Supplier factory = transportAuxFactories.get(transportType); + if (factory == null) { + throw new IllegalStateException("Unsupported " + AUX_TRANSPORT_TYPES_KEY + " [" + transportType + "]"); + } + serverTransportSuppliers.add(factory.get()); + } + + return serverTransportSuppliers; + } + public Supplier getTransportSupplier() { final String name; if (TRANSPORT_TYPE_SETTING.exists(settings)) { diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 04a19e32c4ebc..c27efa080ac4e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -149,6 +149,7 @@ import org.opensearch.node.resource.tracker.ResourceTrackerSettings; import org.opensearch.persistent.PersistentTasksClusterService; import org.opensearch.persistent.decider.EnableAssignmentDecider; +import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.PluginsService; import org.opensearch.ratelimitting.admissioncontrol.AdmissionControlSettings; import org.opensearch.ratelimitting.admissioncontrol.settings.CpuBasedAdmissionControllerSettings; @@ -362,6 +363,7 @@ public void apply(Settings value, Settings current, Settings previous) { NetworkModule.TRANSPORT_SSL_DUAL_MODE_ENABLED, NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION, NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION_RESOLVE_HOST_NAME, + NetworkPlugin.AuxTransport.AUX_TRANSPORT_TYPES_SETTING, HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS, HttpTransportSettings.SETTING_CORS_ENABLED, HttpTransportSettings.SETTING_CORS_MAX_AGE, @@ -738,6 +740,8 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteClusterStateCleanupManager.REMOTE_CLUSTER_STATE_CLEANUP_INTERVAL_SETTING, RemoteClusterStateService.REMOTE_CLUSTER_STATE_ENABLED_SETTING, RemoteClusterStateService.REMOTE_PUBLICATION_SETTING, + RemoteClusterStateService.REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API, + INDEX_METADATA_UPLOAD_TIMEOUT_SETTING, GLOBAL_METADATA_UPLOAD_TIMEOUT_SETTING, METADATA_MANIFEST_UPLOAD_TIMEOUT_SETTING, diff --git a/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java b/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java index c5fc6d5cae6a7..778ab3e56cf76 100644 --- a/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java +++ b/server/src/main/java/org/opensearch/gateway/remote/RemoteClusterStateService.java @@ -129,6 +129,7 @@ public class RemoteClusterStateService implements Closeable { * Gates the functionality of remote publication. */ public static final String REMOTE_PUBLICATION_SETTING_KEY = "cluster.remote_store.publication.enabled"; + public static final String REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API_KEY = "cluster.remote_state.download.serve_read_api.enabled"; public static final Setting REMOTE_PUBLICATION_SETTING = Setting.boolSetting( REMOTE_PUBLICATION_SETTING_KEY, @@ -137,6 +138,13 @@ public class RemoteClusterStateService implements Closeable { Property.Dynamic ); + public static final Setting REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API = Setting.boolSetting( + REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API_KEY, + true, + Property.NodeScope, + Property.Dynamic + ); + /** * Used to specify if cluster state metadata should be published to remote store */ @@ -235,6 +243,9 @@ public static RemoteClusterStateValidationMode parseString(String mode) { + "indices, coordination metadata updated : [{}], settings metadata updated : [{}], templates metadata " + "updated : [{}], custom metadata updated : [{}], indices routing updated : [{}]"; private volatile AtomicBoolean isPublicationEnabled; + + private volatile AtomicBoolean downloadFromRemoteForReadAPI; + private final String remotePathPrefix; private final RemoteClusterStateCache remoteClusterStateCache; @@ -281,6 +292,8 @@ public RemoteClusterStateService( && RemoteStoreNodeAttribute.isRemoteRoutingTableConfigured(settings) ); clusterSettings.addSettingsUpdateConsumer(REMOTE_PUBLICATION_SETTING, this::setRemotePublicationSetting); + this.downloadFromRemoteForReadAPI = new AtomicBoolean(clusterSettings.get(REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API)); + clusterSettings.addSettingsUpdateConsumer(REMOTE_STATE_DOWNLOAD_TO_SERVE_READ_API, this::setRemoteDownloadForReadAPISetting); this.remotePathPrefix = CLUSTER_REMOTE_STORE_STATE_PATH_PREFIX.get(settings); this.remoteRoutingTableService = RemoteRoutingTableServiceFactory.getService( repositoriesService, @@ -1124,6 +1137,14 @@ private void setRemotePublicationSetting(boolean remotePublicationSetting) { } } + private void setRemoteDownloadForReadAPISetting(boolean remoteDownloadForReadAPISetting) { + this.downloadFromRemoteForReadAPI.set(remoteDownloadForReadAPISetting); + } + + public boolean canDownloadFromRemoteForReadAPI() { + return this.downloadFromRemoteForReadAPI.get(); + } + // Package private for unit test RemoteRoutingTableService getRemoteRoutingTableService() { return this.remoteRoutingTableService; @@ -1473,8 +1494,22 @@ public ClusterState getClusterStateForManifest( try { ClusterState stateFromCache = remoteClusterStateCache.getState(clusterName, manifest); if (stateFromCache != null) { + logger.trace( + () -> new ParameterizedMessage( + "Found cluster state in cache for term {} and version {}", + manifest.getClusterTerm(), + manifest.getStateVersion() + ) + ); return stateFromCache; } + logger.info( + () -> new ParameterizedMessage( + "Cluster state not found in cache for term {} and version {}", + manifest.getClusterTerm(), + manifest.getStateVersion() + ) + ); final ClusterState clusterState; final long startTimeNanos = relativeTimeNanosSupplier.getAsLong(); diff --git a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java index 991fbf12072be..7f78ae0b9d2ff 100644 --- a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java @@ -62,6 +62,7 @@ import org.opensearch.telemetry.tracing.channels.TraceableRestChannel; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.BindTransportException; +import org.opensearch.transport.Transport; import java.io.IOException; import java.net.InetAddress; @@ -71,7 +72,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -192,7 +192,25 @@ protected void bindServer() { throw new BindTransportException("Failed to resolve publish address", e); } - final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress); + final int publishPort = Transport.resolveTransportPublishPort( + SETTING_HTTP_PUBLISH_PORT.get(settings), + boundAddresses, + publishInetAddress + ); + if (publishPort < 0) { + throw new BindHttpException( + "Failed to auto-resolve http publish port, multiple bound addresses " + + boundAddresses + + " with distinct ports and none of them matched the publish address (" + + publishInetAddress + + "). " + + "Please specify a unique port by setting " + + SETTING_HTTP_PORT.getKey() + + " or " + + SETTING_HTTP_PUBLISH_PORT.getKey() + ); + } + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); logger.info("{}", boundAddress); @@ -258,47 +276,6 @@ protected void doClose() {} */ protected abstract void stopInternal(); - // package private for tests - static int resolvePublishPort(Settings settings, List boundAddresses, InetAddress publishInetAddress) { - int publishPort = SETTING_HTTP_PUBLISH_PORT.get(settings); - - if (publishPort < 0) { - for (TransportAddress boundAddress : boundAddresses) { - InetAddress boundInetAddress = boundAddress.address().getAddress(); - if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { - publishPort = boundAddress.getPort(); - break; - } - } - } - - // if no matching boundAddress found, check if there is a unique port for all bound addresses - if (publishPort < 0) { - final Set ports = new HashSet<>(); - for (TransportAddress boundAddress : boundAddresses) { - ports.add(boundAddress.getPort()); - } - if (ports.size() == 1) { - publishPort = ports.iterator().next(); - } - } - - if (publishPort < 0) { - throw new BindHttpException( - "Failed to auto-resolve http publish port, multiple bound addresses " - + boundAddresses - + " with distinct ports and none of them matched the publish address (" - + publishInetAddress - + "). " - + "Please specify a unique port by setting " - + SETTING_HTTP_PORT.getKey() - + " or " - + SETTING_HTTP_PUBLISH_PORT.getKey() - ); - } - return publishPort; - } - public void onException(HttpChannel channel, Exception e) { channel.handleException(e); if (lifecycle.started() == false) { diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java index 904d6a7aba5c6..ca52d8bf4bca0 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java @@ -33,9 +33,11 @@ import org.opensearch.index.compositeindex.datacube.startree.builder.StarTreesBuilder; import org.opensearch.index.compositeindex.datacube.startree.index.CompositeIndexValues; import org.opensearch.index.compositeindex.datacube.startree.index.StarTreeValues; +import org.opensearch.index.fielddata.IndexNumericFieldData; +import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.mapper.CompositeMappedFieldType; import org.opensearch.index.mapper.DocCountFieldMapper; -import org.opensearch.index.mapper.KeywordFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import java.io.IOException; @@ -44,6 +46,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -262,22 +265,38 @@ public SortedSetDocValues getSortedSet(FieldInfo field) { return DocValues.emptySortedSet(); } }); - } - // TODO : change this logic to evaluate for sortedNumericField specifically - else { + } else if (isSortedNumericField(compositeField)) { fieldProducerMap.put(compositeField, new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) { return DocValues.emptySortedNumeric(); } }); + } else { + throw new IllegalStateException( + String.format(Locale.ROOT, "Unsupported DocValues field associated with the composite field : %s", compositeField) + ); } } compositeFieldSet.remove(compositeField); } private boolean isSortedSetField(String field) { - return mapperService.fieldType(field) instanceof KeywordFieldMapper.KeywordFieldType; + MappedFieldType ft = mapperService.fieldType(field); + assert ft.isAggregatable(); + return ft.fielddataBuilder( + "", + () -> { throw new UnsupportedOperationException("SearchLookup not available"); } + ) instanceof SortedSetOrdinalsIndexFieldData.Builder; + } + + private boolean isSortedNumericField(String field) { + MappedFieldType ft = mapperService.fieldType(field); + assert ft.isAggregatable(); + return ft.fielddataBuilder( + "", + () -> { throw new UnsupportedOperationException("SearchLookup not available"); } + ) instanceof IndexNumericFieldData.Builder; } @Override @@ -370,5 +389,4 @@ private static SegmentWriteState getSegmentWriteState(SegmentWriteState segmentW segmentWriteState.segmentSuffix ); } - } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java index e834706e2fa9d..b1e78d78d3ad2 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionFactory.java @@ -24,7 +24,8 @@ import java.util.stream.Collectors; import static org.opensearch.index.compositeindex.datacube.DateDimension.CALENDAR_INTERVALS; -import static org.opensearch.index.compositeindex.datacube.KeywordDimension.KEYWORD; +import static org.opensearch.index.compositeindex.datacube.IpDimension.IP; +import static org.opensearch.index.compositeindex.datacube.OrdinalDimension.ORDINAL; /** * Dimension factory class mainly used to parse and create dimension from the mappings @@ -44,8 +45,10 @@ public static Dimension parseAndCreateDimension( return parseAndCreateDateDimension(name, dimensionMap, c); case NumericDimension.NUMERIC: return new NumericDimension(name); - case KEYWORD: - return new KeywordDimension(name); + case ORDINAL: + return new OrdinalDimension(name); + case IP: + return new IpDimension(name); default: throw new IllegalArgumentException( String.format(Locale.ROOT, "unsupported field type associated with dimension [%s] as part of star tree field", name) @@ -69,8 +72,10 @@ public static Dimension parseAndCreateDimension( return parseAndCreateDateDimension(name, dimensionMap, c); case NUMERIC: return new NumericDimension(name); - case KEYWORD: - return new KeywordDimension(name); + case ORDINAL: + return new OrdinalDimension(name); + case IP: + return new IpDimension(name); default: throw new IllegalArgumentException( String.format(Locale.ROOT, "unsupported field type associated with star tree dimension [%s]", name) diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java index d327f8ca1fa1e..f7911e72f36fc 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/DimensionType.java @@ -30,8 +30,14 @@ public enum DimensionType { DATE, /** - * Represents a keyword dimension type. - * This is used for dimensions that contain keyword ordinals. + * Represents dimension types which uses ordinals. + * This is used for dimensions that contain sortedSet ordinals. */ - KEYWORD + ORDINAL, + + /** + * Represents an IP dimension type. + * This is used for dimensions that contain IP ordinals. + */ + IP } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java new file mode 100644 index 0000000000000..9c3682bd2e0ea --- /dev/null +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/IpDimension.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.compositeindex.datacube; + +import org.apache.lucene.index.DocValuesType; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.mapper.CompositeDataCubeFieldType; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; + +/** + * Composite index keyword dimension class + * + * @opensearch.experimental + */ +@ExperimentalApi +public class IpDimension implements Dimension { + public static final String IP = "ip"; + private final String field; + + public IpDimension(String field) { + this.field = field; + } + + @Override + public String getField() { + return field; + } + + @Override + public int getNumSubDimensions() { + return 1; + } + + @Override + public void setDimensionValues(Long value, Consumer dimSetter) { + // This will set the keyword dimension value's ordinal + dimSetter.accept(value); + } + + @Override + public List getSubDimensionNames() { + return List.of(field); + } + + @Override + public DocValuesType getDocValuesType() { + return DocValuesType.SORTED_SET; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CompositeDataCubeFieldType.NAME, field); + builder.field(CompositeDataCubeFieldType.TYPE, IP); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IpDimension dimension = (IpDimension) o; + return Objects.equals(field, dimension.getField()); + } + + @Override + public int hashCode() { + return Objects.hash(field); + } +} diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java similarity index 87% rename from server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java rename to server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java index 58e248fd548d6..9cb4cd78bdaac 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/KeywordDimension.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/OrdinalDimension.java @@ -24,11 +24,11 @@ * @opensearch.experimental */ @ExperimentalApi -public class KeywordDimension implements Dimension { - public static final String KEYWORD = "keyword"; +public class OrdinalDimension implements Dimension { + public static final String ORDINAL = "ordinal"; private final String field; - public KeywordDimension(String field) { + public OrdinalDimension(String field) { this.field = field; } @@ -62,7 +62,7 @@ public DocValuesType getDocValuesType() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CompositeDataCubeFieldType.NAME, field); - builder.field(CompositeDataCubeFieldType.TYPE, KEYWORD); + builder.field(CompositeDataCubeFieldType.TYPE, ORDINAL); builder.endObject(); return builder; } @@ -71,7 +71,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - KeywordDimension dimension = (KeywordDimension) o; + OrdinalDimension dimension = (OrdinalDimension) o; return Objects.equals(field, dimension.getField()); } diff --git a/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java b/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java index 827792cdb1091..48da9b30ac1b0 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocValueFetcher.java @@ -43,6 +43,7 @@ import java.util.List; import static java.util.Collections.emptyList; +import static org.opensearch.index.mapper.FlatObjectFieldMapper.DOC_VALUE_NO_MATCH; /** * Value fetcher that loads from doc values. @@ -70,7 +71,10 @@ public List fetchValues(SourceLookup lookup) throws IOException { } List result = new ArrayList(leaf.docValueCount()); for (int i = 0, count = leaf.docValueCount(); i < count; ++i) { - result.add(leaf.nextValue()); + Object value = leaf.nextValue(); + if (value != DOC_VALUE_NO_MATCH) { + result.add(value); + } } return result; } diff --git a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java index 0ccdb40f9d33a..13063a4761006 100644 --- a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java @@ -28,6 +28,7 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.JsonToStringXContentParser; import org.opensearch.core.common.ParsingException; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; @@ -36,11 +37,13 @@ import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.support.CoreValuesSourceType; import org.opensearch.search.lookup.SearchLookup; import java.io.IOException; import java.io.UncheckedIOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -63,6 +66,7 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper { public static final String CONTENT_TYPE = "flat_object"; + public static final Object DOC_VALUE_NO_MATCH = new Object(); /** * In flat_object field mapper, field type is similar to keyword field type @@ -272,7 +276,7 @@ NamedAnalyzer normalizer() { @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { failIfNoDocValues(); - return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.BYTES); + return new SortedSetOrdinalsIndexFieldData.Builder(valueFieldType().name(), CoreValuesSourceType.BYTES); } @Override @@ -304,6 +308,30 @@ protected String parseSourceValue(Object value) { }; } + @Override + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); + } + if (timeZone != null) { + throw new IllegalArgumentException( + "Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones" + ); + } + if (mappedFieldTypeName != null) { + return new FlatObjectDocValueFormat(mappedFieldTypeName + DOT_SYMBOL + name() + EQUAL_SYMBOL); + } else { + throw new IllegalArgumentException( + "Field [" + name() + "] of type [" + typeName() + "] does not support doc_value in root field" + ); + } + } + + @Override + public boolean isAggregatable() { + return false; + } + @Override public Object valueForDisplay(Object value) { if (value == null) { @@ -530,6 +558,39 @@ public Query wildcardQuery( return valueFieldType().wildcardQuery(rewriteValue(value), method, caseInsensitve, context); } + /** + * A doc_value formatter for flat_object field. + */ + public class FlatObjectDocValueFormat implements DocValueFormat { + private static final String NAME = "flat_object"; + private final String prefix; + + public FlatObjectDocValueFormat(String prefix) { + this.prefix = prefix; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) {} + + @Override + public Object format(BytesRef value) { + String parsedValue = inputToString(value); + if (parsedValue.startsWith(prefix) == false) { + return DOC_VALUE_NO_MATCH; + } + return parsedValue.substring(prefix.length()); + } + + @Override + public BytesRef parseBytesRef(String value) { + return new BytesRef((String) valueFieldType.rewriteForDocValue(rewriteValue(value))); + } + } } private final ValueFieldMapper valueFieldMapper; diff --git a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java index e23a48f94f450..1283aa302c111 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java @@ -52,6 +52,7 @@ import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.InetAddresses; import org.opensearch.common.network.NetworkAddress; +import org.opensearch.index.compositeindex.datacube.DimensionType; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; @@ -68,6 +69,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.function.BiFunction; import java.util.function.Supplier; @@ -161,6 +163,11 @@ public IpFieldMapper build(BuilderContext context) { ); } + @Override + public Optional getSupportedDataCubeDimensionType() { + return Optional.of(DimensionType.IP); + } + } public static final TypeParser PARSER = new TypeParser((n, c) -> { diff --git a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java index df14a5811f6a0..90e43c818e137 100644 --- a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java @@ -259,7 +259,7 @@ public KeywordFieldMapper build(BuilderContext context) { @Override public Optional getSupportedDataCubeDimensionType() { - return Optional.of(DimensionType.KEYWORD); + return Optional.of(DimensionType.ORDINAL); } } diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index c78ee6711dcda..704a23890b07a 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -1216,6 +1216,9 @@ protected Node( SearchExecutionStatsCollector.makeWrapper(responseCollectorService) ); final HttpServerTransport httpServerTransport = newHttpTransport(networkModule); + + pluginComponents.addAll(newAuxTransports(networkModule)); + final IndexingPressureService indexingPressureService = new IndexingPressureService(settings, clusterService); // Going forward, IndexingPressureService will have required constructs for exposing listeners/interfaces for plugin // development. Then we can deprecate Getter and Setter for IndexingPressureService in ClusterService (#478). @@ -2113,6 +2116,10 @@ protected HttpServerTransport newHttpTransport(NetworkModule networkModule) { return networkModule.getHttpServerTransportSupplier().get(); } + protected List newAuxTransports(NetworkModule networkModule) { + return networkModule.getAuxServerTransportList(); + } + private static class LocalNodeFactory implements Function { private final SetOnce localNode = new SetOnce<>(); private final String persistentNodeId; diff --git a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java index 138ef6f71280d..516aa94534f94 100644 --- a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java @@ -31,9 +31,13 @@ package org.opensearch.plugins; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; +import org.opensearch.common.transport.PortsRange; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; @@ -49,8 +53,12 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; +import static java.util.Collections.emptyList; +import static org.opensearch.common.settings.Setting.affixKeySetting; + /** * Plugin for extending network and transport related classes * @@ -58,6 +66,49 @@ */ public interface NetworkPlugin { + /** + * Auxiliary transports are lifecycle components with an associated port range. + * These pluggable client/server transport implementations have their lifecycle managed by Node. + * + * Auxiliary transports are additionally defined by a port range on which they bind. Opening permissions on these + * ports is awkward as {@link org.opensearch.bootstrap.Security} is configured previous to Node initialization during + * bootstrap. To allow pluggable AuxTransports access to configurable port ranges we require the port range be provided + * through an {@link org.opensearch.common.settings.Setting.AffixSetting} of the form 'AUX_SETTINGS_PREFIX.{aux-transport-key}.ports'. + */ + abstract class AuxTransport extends AbstractLifecycleComponent { + public static final String AUX_SETTINGS_PREFIX = "aux.transport."; + public static final String AUX_TRANSPORT_TYPES_KEY = AUX_SETTINGS_PREFIX + "types"; + public static final String AUX_PORT_DEFAULTS = "9400-9500"; + public static final Setting.AffixSetting AUX_TRANSPORT_PORTS = affixKeySetting( + AUX_SETTINGS_PREFIX, + "ports", + key -> new Setting<>(key, AUX_PORT_DEFAULTS, PortsRange::new, Setting.Property.NodeScope) + ); + + public static final Setting> AUX_TRANSPORT_TYPES_SETTING = Setting.listSetting( + AUX_TRANSPORT_TYPES_KEY, + emptyList(), + Function.identity(), + Setting.Property.NodeScope + ); + } + + /** + * Auxiliary transports are optional and run in parallel to the default HttpServerTransport. + * Returns a map of AuxTransport suppliers. + */ + @ExperimentalApi + default Map> getAuxTransports( + Settings settings, + ThreadPool threadPool, + CircuitBreakerService circuitBreakerService, + NetworkService networkService, + ClusterSettings clusterSettings, + Tracer tracer + ) { + return Collections.emptyMap(); + } + /** * Returns a list of {@link TransportInterceptor} instances that are used to intercept incoming and outgoing * transport (inter-node) requests. This must not return null diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java index a82c05dab0b44..998ae5e4791b7 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java @@ -2994,7 +2994,12 @@ public String startVerification() { */ private BlobContainer testContainer(String seed) { BlobPath testBlobPath; - if (prefixModeVerification == true) { + + if (prefixModeVerification == true + && (clusterService.isStateInitialised() == false + || clusterService.state().nodes().getMinNodeVersion().onOrAfter(Version.V_2_17_0))) { + // During the remote store node bootstrap, the cluster state is not initialised + // Otherwise, the cluster state is initialised and available with the min node version information PathInput pathInput = PathInput.builder().basePath(basePath()).indexUUID(seed).build(); testBlobPath = PathType.HASHED_PREFIX.path(pathInput, FNV_1A_COMPOSITE_1); } else { diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index 269a4c87dfb72..59d3b110aeca8 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -198,7 +198,7 @@ public static ThreadPoolType fromType(String type) { map.put(Names.REMOTE_PURGE, ThreadPoolType.SCALING); map.put(Names.REMOTE_REFRESH_RETRY, ThreadPoolType.SCALING); map.put(Names.REMOTE_RECOVERY, ThreadPoolType.SCALING); - map.put(Names.REMOTE_STATE_READ, ThreadPoolType.SCALING); + map.put(Names.REMOTE_STATE_READ, ThreadPoolType.FIXED); map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); map.put(Names.REMOTE_STATE_CHECKSUM, ThreadPoolType.FIXED); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); @@ -306,7 +306,7 @@ public ThreadPool( ); builders.put( Names.REMOTE_STATE_READ, - new ScalingExecutorBuilder(Names.REMOTE_STATE_READ, 1, boundedBy(4 * allocatedProcessors, 4, 32), TimeValue.timeValueMinutes(5)) + new FixedExecutorBuilder(settings, Names.REMOTE_STATE_READ, boundedBy(4 * allocatedProcessors, 4, 32), 120000) ); builders.put( Names.INDEX_SEARCHER, diff --git a/server/src/main/java/org/opensearch/transport/TcpTransport.java b/server/src/main/java/org/opensearch/transport/TcpTransport.java index f56cd146ce953..f80a29872a78d 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransport.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransport.java @@ -521,38 +521,8 @@ private BoundTransportAddress createBoundTransportAddress(ProfileSettings profil throw new BindTransportException("Failed to resolve publish address", e); } - final int publishPort = resolvePublishPort(profileSettings, boundAddresses, publishInetAddress); - final TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); - return new BoundTransportAddress(transportBoundAddresses, publishAddress); - } - - // package private for tests - static int resolvePublishPort(ProfileSettings profileSettings, List boundAddresses, InetAddress publishInetAddress) { - int publishPort = profileSettings.publishPort; - - // if port not explicitly provided, search for port of address in boundAddresses that matches publishInetAddress - if (publishPort < 0) { - for (InetSocketAddress boundAddress : boundAddresses) { - InetAddress boundInetAddress = boundAddress.getAddress(); - if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { - publishPort = boundAddress.getPort(); - break; - } - } - } - - // if no matching boundAddress found, check if there is a unique port for all bound addresses - if (publishPort < 0) { - final Set ports = new HashSet<>(); - for (InetSocketAddress boundAddress : boundAddresses) { - ports.add(boundAddress.getPort()); - } - if (ports.size() == 1) { - publishPort = ports.iterator().next(); - } - } - - if (publishPort < 0) { + final int publishPort = Transport.resolvePublishPort(profileSettings.publishPort, boundAddresses, publishInetAddress); + if (publishPort == -1) { String profileExplanation = profileSettings.isDefaultProfile ? "" : " for profile " + profileSettings.profileName; throw new BindTransportException( "Failed to auto-resolve publish port" @@ -568,7 +538,9 @@ static int resolvePublishPort(ProfileSettings profileSettings, List boundAddresses, InetAddress publishInetAddress) { + if (publishPort < 0) { + for (InetSocketAddress boundAddress : boundAddresses) { + InetAddress boundInetAddress = boundAddress.getAddress(); + if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) { + publishPort = boundAddress.getPort(); + break; + } + } + } + + if (publishPort < 0) { + final Set ports = new HashSet<>(); + for (InetSocketAddress boundAddress : boundAddresses) { + ports.add(boundAddress.getPort()); + } + if (ports.size() == 1) { + publishPort = ports.iterator().next(); + } + } + + return publishPort; + } + + static int resolveTransportPublishPort(int publishPort, List boundAddresses, InetAddress publishInetAddress) { + return Transport.resolvePublishPort( + publishPort, + boundAddresses.stream().map(TransportAddress::address).collect(Collectors.toList()), + publishInetAddress + ); + } + /** * A unidirectional connection to a {@link DiscoveryNode} * diff --git a/server/src/test/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsActionTests.java new file mode 100644 index 0000000000000..ea455d607f058 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/admin/indices/replication/TransportSegmentReplicationStatsActionTests.java @@ -0,0 +1,595 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.indices.replication; + +import org.opensearch.Version; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlock; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.block.ClusterBlocks; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.routing.AllocationId; +import org.opensearch.cluster.routing.RoutingTable; +import org.opensearch.cluster.routing.ShardIterator; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.ShardsIterator; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.support.DefaultShardOperationFailedException; +import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.index.IndexService; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.SegmentReplicationPerGroupStats; +import org.opensearch.index.SegmentReplicationPressureService; +import org.opensearch.index.SegmentReplicationShardStats; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.indices.IndicesService; +import org.opensearch.indices.replication.SegmentReplicationState; +import org.opensearch.indices.replication.SegmentReplicationTargetService; +import org.opensearch.indices.replication.common.ReplicationLuceneIndex; +import org.opensearch.indices.replication.common.ReplicationTimer; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.transport.TransportService; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TransportSegmentReplicationStatsActionTests extends OpenSearchTestCase { + @Mock + private ClusterService clusterService; + @Mock + private TransportService transportService; + @Mock + private IndicesService indicesService; + @Mock + private SegmentReplicationTargetService targetService; + @Mock + private ActionFilters actionFilters; + @Mock + private IndexNameExpressionResolver indexNameExpressionResolver; + @Mock + private SegmentReplicationPressureService pressureService; + @Mock + private IndexShard indexShard; + @Mock + private IndexService indexService; + + private TransportSegmentReplicationStatsAction action; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.openMocks(this); + super.setUp(); + action = new TransportSegmentReplicationStatsAction( + clusterService, + transportService, + indicesService, + targetService, + actionFilters, + indexNameExpressionResolver, + pressureService + ); + } + + public void testShardReturnsAllTheShardsForTheIndex() { + SegmentReplicationStatsRequest segmentReplicationStatsRequest = mock(SegmentReplicationStatsRequest.class); + String[] concreteIndices = new String[] { "test-index" }; + ClusterState clusterState = mock(ClusterState.class); + RoutingTable routingTables = mock(RoutingTable.class); + ShardsIterator shardsIterator = mock(ShardIterator.class); + + when(clusterState.routingTable()).thenReturn(routingTables); + when(routingTables.allShardsIncludingRelocationTargets(any())).thenReturn(shardsIterator); + assertEquals(shardsIterator, action.shards(clusterState, segmentReplicationStatsRequest, concreteIndices)); + } + + public void testShardOperationWithPrimaryShard() { + ShardRouting shardRouting = mock(ShardRouting.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.primary()).thenReturn(true); + when(indicesService.indexServiceSafe(shardId.getIndex())).thenReturn(indexService); + when(indexService.getShard(shardId.id())).thenReturn(indexShard); + when(indexShard.indexSettings()).thenReturn(createIndexSettingsWithSegRepEnabled()); + + SegmentReplicationShardStatsResponse response = action.shardOperation(request, shardRouting); + + assertNotNull(response); + verify(pressureService).getStatsForShard(any()); + } + + public void testShardOperationWithReplicaShard() { + ShardRouting shardRouting = mock(ShardRouting.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + request.activeOnly(false); + SegmentReplicationState completedSegmentReplicationState = mock(SegmentReplicationState.class); + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.primary()).thenReturn(false); + when(indicesService.indexServiceSafe(shardId.getIndex())).thenReturn(indexService); + when(indexService.getShard(shardId.id())).thenReturn(indexShard); + when(indexShard.indexSettings()).thenReturn(createIndexSettingsWithSegRepEnabled()); + when(targetService.getSegmentReplicationState(shardId)).thenReturn(completedSegmentReplicationState); + + SegmentReplicationShardStatsResponse response = action.shardOperation(request, shardRouting); + + assertNotNull(response); + assertNull(response.getPrimaryStats()); + assertNotNull(response.getReplicaStats()); + verify(targetService).getSegmentReplicationState(shardId); + } + + public void testShardOperationWithReplicaShardActiveOnly() { + ShardRouting shardRouting = mock(ShardRouting.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + request.activeOnly(true); + SegmentReplicationState onGoingSegmentReplicationState = mock(SegmentReplicationState.class); + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.primary()).thenReturn(false); + when(indicesService.indexServiceSafe(shardId.getIndex())).thenReturn(indexService); + when(indexService.getShard(shardId.id())).thenReturn(indexShard); + when(indexShard.indexSettings()).thenReturn(createIndexSettingsWithSegRepEnabled()); + when(targetService.getOngoingEventSegmentReplicationState(shardId)).thenReturn(onGoingSegmentReplicationState); + + SegmentReplicationShardStatsResponse response = action.shardOperation(request, shardRouting); + + assertNotNull(response); + assertNull(response.getPrimaryStats()); + assertNotNull(response.getReplicaStats()); + verify(targetService).getOngoingEventSegmentReplicationState(shardId); + } + + public void testComputeBytesRemainingToReplicateWhenCompletedAndOngoingStateNotNull() { + ShardRouting shardRouting = mock(ShardRouting.class); + SegmentReplicationState completedSegmentReplicationState = mock(SegmentReplicationState.class); + SegmentReplicationState onGoingSegmentReplicationState = mock(SegmentReplicationState.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + AllocationId allocationId = AllocationId.newInitializing(); + ReplicationTimer replicationTimerCompleted = mock(ReplicationTimer.class); + ReplicationTimer replicationTimerOngoing = mock(ReplicationTimer.class); + long time1 = 10; + long time2 = 15; + ReplicationLuceneIndex replicationLuceneIndex = new ReplicationLuceneIndex(); + replicationLuceneIndex.addFileDetail("name1", 10, false); + replicationLuceneIndex.addFileDetail("name2", 15, false); + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.allocationId()).thenReturn(allocationId); + when(targetService.getlatestCompletedEventSegmentReplicationState(shardId)).thenReturn(completedSegmentReplicationState); + when(targetService.getOngoingEventSegmentReplicationState(shardId)).thenReturn(onGoingSegmentReplicationState); + when(completedSegmentReplicationState.getTimer()).thenReturn(replicationTimerCompleted); + when(onGoingSegmentReplicationState.getTimer()).thenReturn(replicationTimerOngoing); + when(replicationTimerOngoing.time()).thenReturn(time1); + when(replicationTimerCompleted.time()).thenReturn(time2); + when(onGoingSegmentReplicationState.getIndex()).thenReturn(replicationLuceneIndex); + + SegmentReplicationShardStats segmentReplicationShardStats = action.computeSegmentReplicationShardStats(shardRouting); + + assertNotNull(segmentReplicationShardStats); + assertEquals(25, segmentReplicationShardStats.getBytesBehindCount()); + assertEquals(10, segmentReplicationShardStats.getCurrentReplicationLagMillis()); + assertEquals(15, segmentReplicationShardStats.getLastCompletedReplicationTimeMillis()); + + verify(targetService).getlatestCompletedEventSegmentReplicationState(shardId); + verify(targetService).getOngoingEventSegmentReplicationState(shardId); + } + + public void testCalculateBytesRemainingToReplicateWhenNoCompletedState() { + ShardRouting shardRouting = mock(ShardRouting.class); + SegmentReplicationState onGoingSegmentReplicationState = mock(SegmentReplicationState.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + AllocationId allocationId = AllocationId.newInitializing(); + ReplicationTimer replicationTimerOngoing = mock(ReplicationTimer.class); + long time1 = 10; + ReplicationLuceneIndex replicationLuceneIndex = new ReplicationLuceneIndex(); + replicationLuceneIndex.addFileDetail("name1", 10, false); + replicationLuceneIndex.addFileDetail("name2", 15, false); + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.allocationId()).thenReturn(allocationId); + when(targetService.getOngoingEventSegmentReplicationState(shardId)).thenReturn(onGoingSegmentReplicationState); + when(onGoingSegmentReplicationState.getTimer()).thenReturn(replicationTimerOngoing); + when(replicationTimerOngoing.time()).thenReturn(time1); + when(onGoingSegmentReplicationState.getIndex()).thenReturn(replicationLuceneIndex); + + SegmentReplicationShardStats segmentReplicationShardStats = action.computeSegmentReplicationShardStats(shardRouting); + + assertNotNull(segmentReplicationShardStats); + assertEquals(25, segmentReplicationShardStats.getBytesBehindCount()); + assertEquals(10, segmentReplicationShardStats.getCurrentReplicationLagMillis()); + assertEquals(0, segmentReplicationShardStats.getLastCompletedReplicationTimeMillis()); + + verify(targetService).getlatestCompletedEventSegmentReplicationState(shardId); + verify(targetService).getOngoingEventSegmentReplicationState(shardId); + } + + public void testCalculateBytesRemainingToReplicateWhenNoOnGoingState() { + ShardRouting shardRouting = mock(ShardRouting.class); + SegmentReplicationState completedSegmentReplicationState = mock(SegmentReplicationState.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + AllocationId allocationId = AllocationId.newInitializing(); + ReplicationTimer replicationTimerCompleted = mock(ReplicationTimer.class); + long time2 = 15; + + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.allocationId()).thenReturn(allocationId); + when(targetService.getlatestCompletedEventSegmentReplicationState(shardId)).thenReturn(completedSegmentReplicationState); + when(completedSegmentReplicationState.getTimer()).thenReturn(replicationTimerCompleted); + when(replicationTimerCompleted.time()).thenReturn(time2); + + SegmentReplicationShardStats segmentReplicationShardStats = action.computeSegmentReplicationShardStats(shardRouting); + + assertNotNull(segmentReplicationShardStats); + assertEquals(0, segmentReplicationShardStats.getBytesBehindCount()); + assertEquals(0, segmentReplicationShardStats.getCurrentReplicationLagMillis()); + assertEquals(15, segmentReplicationShardStats.getLastCompletedReplicationTimeMillis()); + + verify(targetService).getlatestCompletedEventSegmentReplicationState(shardId); + verify(targetService).getOngoingEventSegmentReplicationState(shardId); + } + + public void testCalculateBytesRemainingToReplicateWhenNoCompletedAndOngoingState() { + ShardRouting shardRouting = mock(ShardRouting.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + AllocationId allocationId = AllocationId.newInitializing(); + when(shardRouting.shardId()).thenReturn(shardId); + when(shardRouting.allocationId()).thenReturn(allocationId); + + SegmentReplicationShardStats segmentReplicationShardStats = action.computeSegmentReplicationShardStats(shardRouting); + + assertNotNull(segmentReplicationShardStats); + assertEquals(0, segmentReplicationShardStats.getBytesBehindCount()); + assertEquals(0, segmentReplicationShardStats.getCurrentReplicationLagMillis()); + assertEquals(0, segmentReplicationShardStats.getLastCompletedReplicationTimeMillis()); + + verify(targetService).getlatestCompletedEventSegmentReplicationState(shardId); + verify(targetService).getOngoingEventSegmentReplicationState(shardId); + } + + public void testNewResponseWhenAllReplicasReturnResponseCombinesTheResults() { + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + List shardFailures = new ArrayList<>(); + String[] shards = { "0", "1" }; + request.shards(shards); + + int totalShards = 6; + int successfulShards = 6; + int failedShard = 0; + String allocIdOne = "allocIdOne"; + String allocIdTwo = "allocIdTwo"; + String allocIdThree = "allocIdThree"; + String allocIdFour = "allocIdFour"; + String allocIdFive = "allocIdFive"; + String allocIdSix = "allocIdSix"; + + ShardId shardId0 = mock(ShardId.class); + ShardRouting primary0 = mock(ShardRouting.class); + ShardRouting replica0 = mock(ShardRouting.class); + ShardRouting searchReplica0 = mock(ShardRouting.class); + + ShardId shardId1 = mock(ShardId.class); + ShardRouting primary1 = mock(ShardRouting.class); + ShardRouting replica1 = mock(ShardRouting.class); + ShardRouting searchReplica1 = mock(ShardRouting.class); + + when(shardId0.getId()).thenReturn(0); + when(shardId0.getIndexName()).thenReturn("test-index-1"); + when(primary0.shardId()).thenReturn(shardId0); + when(replica0.shardId()).thenReturn(shardId0); + when(searchReplica0.shardId()).thenReturn(shardId0); + + when(shardId1.getId()).thenReturn(1); + when(shardId1.getIndexName()).thenReturn("test-index-1"); + when(primary1.shardId()).thenReturn(shardId1); + when(replica1.shardId()).thenReturn(shardId1); + when(searchReplica1.shardId()).thenReturn(shardId1); + + AllocationId allocationIdOne = mock(AllocationId.class); + AllocationId allocationIdTwo = mock(AllocationId.class); + AllocationId allocationIdThree = mock(AllocationId.class); + AllocationId allocationIdFour = mock(AllocationId.class); + AllocationId allocationIdFive = mock(AllocationId.class); + AllocationId allocationIdSix = mock(AllocationId.class); + + when(allocationIdOne.getId()).thenReturn(allocIdOne); + when(allocationIdTwo.getId()).thenReturn(allocIdTwo); + when(allocationIdThree.getId()).thenReturn(allocIdThree); + when(allocationIdFour.getId()).thenReturn(allocIdFour); + when(allocationIdFive.getId()).thenReturn(allocIdFive); + when(allocationIdSix.getId()).thenReturn(allocIdSix); + when(primary0.allocationId()).thenReturn(allocationIdOne); + when(replica0.allocationId()).thenReturn(allocationIdTwo); + when(searchReplica0.allocationId()).thenReturn(allocationIdThree); + when(primary1.allocationId()).thenReturn(allocationIdFour); + when(replica1.allocationId()).thenReturn(allocationIdFive); + when(searchReplica1.allocationId()).thenReturn(allocationIdSix); + + when(primary0.isSearchOnly()).thenReturn(false); + when(replica0.isSearchOnly()).thenReturn(false); + when(searchReplica0.isSearchOnly()).thenReturn(true); + when(primary1.isSearchOnly()).thenReturn(false); + when(replica1.isSearchOnly()).thenReturn(false); + when(searchReplica1.isSearchOnly()).thenReturn(true); + + Set segmentReplicationShardStats0 = new HashSet<>(); + SegmentReplicationShardStats segmentReplicationShardStatsOfReplica0 = new SegmentReplicationShardStats(allocIdTwo, 0, 0, 0, 0, 0); + segmentReplicationShardStats0.add(segmentReplicationShardStatsOfReplica0); + + Set segmentReplicationShardStats1 = new HashSet<>(); + SegmentReplicationShardStats segmentReplicationShardStatsOfReplica1 = new SegmentReplicationShardStats(allocIdFive, 0, 0, 0, 0, 0); + segmentReplicationShardStats1.add(segmentReplicationShardStatsOfReplica1); + + SegmentReplicationPerGroupStats segmentReplicationPerGroupStats0 = new SegmentReplicationPerGroupStats( + shardId0, + segmentReplicationShardStats0, + 0 + ); + + SegmentReplicationPerGroupStats segmentReplicationPerGroupStats1 = new SegmentReplicationPerGroupStats( + shardId1, + segmentReplicationShardStats1, + 0 + ); + + SegmentReplicationState segmentReplicationState0 = mock(SegmentReplicationState.class); + SegmentReplicationState searchReplicaSegmentReplicationState0 = mock(SegmentReplicationState.class); + SegmentReplicationState segmentReplicationState1 = mock(SegmentReplicationState.class); + SegmentReplicationState searchReplicaSegmentReplicationState1 = mock(SegmentReplicationState.class); + + when(segmentReplicationState0.getShardRouting()).thenReturn(replica0); + when(searchReplicaSegmentReplicationState0.getShardRouting()).thenReturn(searchReplica0); + when(segmentReplicationState1.getShardRouting()).thenReturn(replica1); + when(searchReplicaSegmentReplicationState1.getShardRouting()).thenReturn(searchReplica1); + + List responses = List.of( + new SegmentReplicationShardStatsResponse(segmentReplicationPerGroupStats0), + new SegmentReplicationShardStatsResponse(segmentReplicationState0), + new SegmentReplicationShardStatsResponse(searchReplicaSegmentReplicationState0), + new SegmentReplicationShardStatsResponse(segmentReplicationPerGroupStats1), + new SegmentReplicationShardStatsResponse(segmentReplicationState1), + new SegmentReplicationShardStatsResponse(searchReplicaSegmentReplicationState1) + ); + + SegmentReplicationStatsResponse response = action.newResponse( + request, + totalShards, + successfulShards, + failedShard, + responses, + shardFailures, + ClusterState.EMPTY_STATE + ); + + List responseStats = response.getReplicationStats().get("test-index-1"); + SegmentReplicationPerGroupStats primStats0 = responseStats.get(0); + Set replicaStats0 = primStats0.getReplicaStats(); + assertEquals(2, replicaStats0.size()); + for (SegmentReplicationShardStats replicaStat : replicaStats0) { + if (replicaStat.getAllocationId().equals(allocIdTwo)) { + assertEquals(segmentReplicationState0, replicaStat.getCurrentReplicationState()); + } + + if (replicaStat.getAllocationId().equals(allocIdThree)) { + assertEquals(searchReplicaSegmentReplicationState0, replicaStat.getCurrentReplicationState()); + } + } + + SegmentReplicationPerGroupStats primStats1 = responseStats.get(1); + Set replicaStats1 = primStats1.getReplicaStats(); + assertEquals(2, replicaStats1.size()); + for (SegmentReplicationShardStats replicaStat : replicaStats1) { + if (replicaStat.getAllocationId().equals(allocIdFive)) { + assertEquals(segmentReplicationState1, replicaStat.getCurrentReplicationState()); + } + + if (replicaStat.getAllocationId().equals(allocIdSix)) { + assertEquals(searchReplicaSegmentReplicationState1, replicaStat.getCurrentReplicationState()); + } + } + } + + public void testNewResponseWhenShardsToFetchEmptyAndResponsesContainsNull() { + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + List shardFailures = new ArrayList<>(); + String[] shards = {}; + request.shards(shards); + + int totalShards = 3; + int successfulShards = 3; + int failedShard = 0; + String allocIdOne = "allocIdOne"; + String allocIdTwo = "allocIdTwo"; + ShardId shardIdOne = mock(ShardId.class); + ShardId shardIdTwo = mock(ShardId.class); + ShardId shardIdThree = mock(ShardId.class); + ShardRouting shardRoutingOne = mock(ShardRouting.class); + ShardRouting shardRoutingTwo = mock(ShardRouting.class); + ShardRouting shardRoutingThree = mock(ShardRouting.class); + when(shardIdOne.getId()).thenReturn(1); + when(shardIdTwo.getId()).thenReturn(2); + when(shardIdThree.getId()).thenReturn(3); + when(shardRoutingOne.shardId()).thenReturn(shardIdOne); + when(shardRoutingTwo.shardId()).thenReturn(shardIdTwo); + when(shardRoutingThree.shardId()).thenReturn(shardIdThree); + AllocationId allocationId = mock(AllocationId.class); + when(allocationId.getId()).thenReturn(allocIdOne); + when(shardRoutingTwo.allocationId()).thenReturn(allocationId); + when(shardIdOne.getIndexName()).thenReturn("test-index"); + + Set segmentReplicationShardStats = new HashSet<>(); + SegmentReplicationShardStats segmentReplicationShardStatsOfReplica = new SegmentReplicationShardStats(allocIdOne, 0, 0, 0, 0, 0); + segmentReplicationShardStats.add(segmentReplicationShardStatsOfReplica); + SegmentReplicationPerGroupStats segmentReplicationPerGroupStats = new SegmentReplicationPerGroupStats( + shardIdOne, + segmentReplicationShardStats, + 0 + ); + + SegmentReplicationState segmentReplicationState = mock(SegmentReplicationState.class); + SegmentReplicationShardStats segmentReplicationShardStatsFromSearchReplica = mock(SegmentReplicationShardStats.class); + when(segmentReplicationShardStatsFromSearchReplica.getAllocationId()).thenReturn("alloc2"); + when(segmentReplicationState.getShardRouting()).thenReturn(shardRoutingTwo); + + List responses = new ArrayList<>(); + responses.add(null); + responses.add(new SegmentReplicationShardStatsResponse(segmentReplicationPerGroupStats)); + responses.add(new SegmentReplicationShardStatsResponse(segmentReplicationState)); + + SegmentReplicationStatsResponse response = action.newResponse( + request, + totalShards, + successfulShards, + failedShard, + responses, + shardFailures, + ClusterState.EMPTY_STATE + ); + + List responseStats = response.getReplicationStats().get("test-index"); + SegmentReplicationPerGroupStats primStats = responseStats.get(0); + Set segRpShardStatsSet = primStats.getReplicaStats(); + + for (SegmentReplicationShardStats segRpShardStats : segRpShardStatsSet) { + if (segRpShardStats.getAllocationId().equals(allocIdOne)) { + assertEquals(segmentReplicationState, segRpShardStats.getCurrentReplicationState()); + } + + if (segRpShardStats.getAllocationId().equals(allocIdTwo)) { + assertEquals(segmentReplicationShardStatsFromSearchReplica, segRpShardStats); + } + } + } + + public void testShardOperationWithSegRepDisabled() { + ShardRouting shardRouting = mock(ShardRouting.class); + ShardId shardId = new ShardId(new Index("test-index", "test-uuid"), 0); + SegmentReplicationStatsRequest request = new SegmentReplicationStatsRequest(); + + when(shardRouting.shardId()).thenReturn(shardId); + when(indicesService.indexServiceSafe(shardId.getIndex())).thenReturn(indexService); + when(indexService.getShard(shardId.id())).thenReturn(indexShard); + when(indexShard.indexSettings()).thenReturn(createIndexSettingsWithSegRepDisabled()); + + SegmentReplicationShardStatsResponse response = action.shardOperation(request, shardRouting); + + assertNull(response); + } + + public void testGlobalBlockCheck() { + ClusterBlock writeClusterBlock = new ClusterBlock( + 1, + "uuid", + "", + true, + true, + true, + RestStatus.OK, + EnumSet.of(ClusterBlockLevel.METADATA_WRITE) + ); + + ClusterBlock readClusterBlock = new ClusterBlock( + 1, + "uuid", + "", + true, + true, + true, + RestStatus.OK, + EnumSet.of(ClusterBlockLevel.METADATA_READ) + ); + + ClusterBlocks.Builder builder = ClusterBlocks.builder(); + builder.addGlobalBlock(writeClusterBlock); + ClusterState metadataWriteBlockedState = ClusterState.builder(ClusterState.EMPTY_STATE).blocks(builder).build(); + assertNull(action.checkGlobalBlock(metadataWriteBlockedState, new SegmentReplicationStatsRequest())); + + builder = ClusterBlocks.builder(); + builder.addGlobalBlock(readClusterBlock); + ClusterState metadataReadBlockedState = ClusterState.builder(ClusterState.EMPTY_STATE).blocks(builder).build(); + assertNotNull(action.checkGlobalBlock(metadataReadBlockedState, new SegmentReplicationStatsRequest())); + } + + public void testIndexBlockCheck() { + ClusterBlock writeClusterBlock = new ClusterBlock( + 1, + "uuid", + "", + true, + true, + true, + RestStatus.OK, + EnumSet.of(ClusterBlockLevel.METADATA_WRITE) + ); + + ClusterBlock readClusterBlock = new ClusterBlock( + 1, + "uuid", + "", + true, + true, + true, + RestStatus.OK, + EnumSet.of(ClusterBlockLevel.METADATA_READ) + ); + + String indexName = "test"; + ClusterBlocks.Builder builder = ClusterBlocks.builder(); + builder.addIndexBlock(indexName, writeClusterBlock); + ClusterState metadataWriteBlockedState = ClusterState.builder(ClusterState.EMPTY_STATE).blocks(builder).build(); + assertNull(action.checkRequestBlock(metadataWriteBlockedState, new SegmentReplicationStatsRequest(), new String[] { indexName })); + + builder = ClusterBlocks.builder(); + builder.addIndexBlock(indexName, readClusterBlock); + ClusterState metadataReadBlockedState = ClusterState.builder(ClusterState.EMPTY_STATE).blocks(builder).build(); + assertNotNull(action.checkRequestBlock(metadataReadBlockedState, new SegmentReplicationStatsRequest(), new String[] { indexName })); + } + + private IndexSettings createIndexSettingsWithSegRepEnabled() { + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + + return new IndexSettings(IndexMetadata.builder("test").settings(settings).build(), settings); + } + + private IndexSettings createIndexSettingsWithSegRepDisabled() { + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + return new IndexSettings(IndexMetadata.builder("test").settings(settings).build(), settings); + } +} diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java index 6a03a1f79bcde..a7f18aabf8436 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java @@ -194,7 +194,7 @@ public AllocationService createRemoteCapableAllocationService() { } public AllocationService createRemoteCapableAllocationService(String excludeNodes) { - Settings settings = Settings.builder().put("cluster.routing.allocation.exclude.node_id", excludeNodes).build(); + Settings settings = Settings.builder().put("cluster.routing.allocation.exclude._id", excludeNodes).build(); return new MockAllocationService( randomAllocationDeciders(settings, EMPTY_CLUSTER_SETTINGS, random()), new TestGatewayAllocator(), diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsRebalanceShardsTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsRebalanceShardsTests.java index e1c0a7eff1f6e..e55a9de160114 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsRebalanceShardsTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsRebalanceShardsTests.java @@ -25,25 +25,51 @@ public class RemoteShardsRebalanceShardsTests extends RemoteShardsBalancerBaseTe * Post rebalance primaries should be balanced across all the nodes. */ public void testShardAllocationAndRebalance() { - int localOnlyNodes = 20; - int remoteCapableNodes = 40; - int localIndices = 40; - int remoteIndices = 80; + final int localOnlyNodes = 20; + final int remoteCapableNodes = 40; + final int halfRemoteCapableNodes = remoteCapableNodes / 2; + final int localIndices = 40; + final int remoteIndices = 80; ClusterState clusterState = createInitialCluster(localOnlyNodes, remoteCapableNodes, localIndices, remoteIndices); - AllocationService service = this.createRemoteCapableAllocationService(); + final StringBuilder excludeNodes = new StringBuilder(); + for (int i = 0; i < halfRemoteCapableNodes; i++) { + excludeNodes.append(getNodeId(i, true)); + if (i != (remoteCapableNodes / 2 - 1)) { + excludeNodes.append(", "); + } + } + AllocationService service = this.createRemoteCapableAllocationService(excludeNodes.toString()); clusterState = allocateShardsAndBalance(clusterState, service); RoutingNodes routingNodes = clusterState.getRoutingNodes(); RoutingAllocation allocation = getRoutingAllocation(clusterState, routingNodes); - final Map nodePrimariesCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, true); - final Map nodeReplicaCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, false); + Map nodePrimariesCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, true); + Map nodeReplicaCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, false); int avgPrimariesPerNode = getTotalShardCountAcrossNodes(nodePrimariesCounter) / remoteCapableNodes; - // Primary and replica are balanced post first reroute + // Primary and replica are balanced after first allocating unassigned + for (RoutingNode node : routingNodes) { + if (RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getNodePool(node))) { + if (Integer.parseInt(node.nodeId().split("-")[4]) < halfRemoteCapableNodes) { + assertEquals(0, (int) nodePrimariesCounter.getOrDefault(node.nodeId(), 0)); + } else { + assertEquals(avgPrimariesPerNode * 2, (int) nodePrimariesCounter.get(node.nodeId())); + } + assertTrue(nodeReplicaCounter.getOrDefault(node.nodeId(), 0) >= 0); + } + } + + // Remove exclude constraint and rebalance + service = this.createRemoteCapableAllocationService(); + clusterState = allocateShardsAndBalance(clusterState, service); + routingNodes = clusterState.getRoutingNodes(); + allocation = getRoutingAllocation(clusterState, routingNodes); + nodePrimariesCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, true); + nodeReplicaCounter = getShardCounterPerNodeForRemoteCapablePool(clusterState, allocation, false); for (RoutingNode node : routingNodes) { if (RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getNodePool(node))) { - assertInRange(nodePrimariesCounter.get(node.nodeId()), avgPrimariesPerNode, remoteCapableNodes - 1); - assertTrue(nodeReplicaCounter.get(node.nodeId()) >= 0); + assertEquals(avgPrimariesPerNode, (int) nodePrimariesCounter.get(node.nodeId())); + assertTrue(nodeReplicaCounter.getOrDefault(node.nodeId(), 0) >= 0); } } } diff --git a/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java b/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java index be07aa0d05e9f..e3684178a18ea 100644 --- a/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java +++ b/server/src/test/java/org/opensearch/gateway/remote/RemoteClusterStateServiceTests.java @@ -2354,6 +2354,14 @@ public void testReadLatestClusterStateFromCache() throws IOException { .getState(clusterState.getClusterName().value(), expectedManifest); assertEquals(stateFromCache.getMetadata(), state.getMetadata()); + ClusterState stateFromCache2 = remoteClusterStateService.getClusterStateForManifest( + clusterState.getClusterName().value(), + expectedManifest, + "nodeA", + true + ); + assertEquals(stateFromCache2.getMetadata(), state.getMetadata()); + final ClusterMetadataManifest notExistMetadata = ClusterMetadataManifest.builder() .indices(List.of()) .clusterTerm(1L) diff --git a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java index c34f13041cb11..a4295289c3109 100644 --- a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java @@ -59,6 +59,7 @@ import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.Transport; import org.junit.After; import org.junit.Before; @@ -70,8 +71,6 @@ import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.opensearch.http.AbstractHttpServerTransport.resolvePublishPort; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class AbstractHttpServerTransportTests extends OpenSearchTestCase { @@ -101,47 +100,40 @@ public void testHttpPublishPort() throws Exception { int boundPort = randomIntBetween(9000, 9100); int otherBoundPort = randomIntBetween(9200, 9300); - int publishPort = resolvePublishPort( - Settings.builder().put(HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT.getKey(), 9080).build(), - randomAddresses(), - getByName("127.0.0.2") - ); + int publishPort = Transport.resolveTransportPublishPort(9080, randomAddresses(), getByName("127.0.0.2")); assertThat("Publish port should be explicitly set to 9080", publishPort, equalTo(9080)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matched address", publishPort, equalTo(boundPort)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("127.0.0.1", boundPort), address("127.0.0.2", boundPort)), getByName("127.0.0.3") ); assertThat("Publish port should be derived from unique port of bound addresses", publishPort, equalTo(boundPort)); - final BindHttpException e = expectThrows( - BindHttpException.class, - () -> resolvePublishPort( - Settings.EMPTY, - asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), - getByName("127.0.0.3") - ) + publishPort = Transport.resolveTransportPublishPort( + -1, + asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.3") ); - assertThat(e.getMessage(), containsString("Failed to auto-resolve http publish port")); + assertThat(publishPort, equalTo(-1)); - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); if (NetworkUtils.SUPPORTS_V6) { - publishPort = resolvePublishPort( - Settings.EMPTY, + publishPort = Transport.resolveTransportPublishPort( + -1, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("::1") ); diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java index 402ed1dbee98a..5603fe4e30f9f 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; @@ -25,6 +26,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.network.InetAddresses; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; import org.opensearch.index.codec.composite.CompositeIndexReader; @@ -36,6 +38,8 @@ import org.opensearch.index.mapper.NumberFieldMapper; import java.io.IOException; +import java.net.InetAddress; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -65,12 +69,15 @@ public void testStarTreeKeywordDocValues() throws IOException { doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new StringField("_id", "2", Field.Store.NO)); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.flush(); iw.deleteDocuments(new Term("_id", "2")); @@ -80,12 +87,14 @@ public void testStarTreeKeywordDocValues() throws IOException { doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new StringField("_id", "4", Field.Store.NO)); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); iw.addDocument(doc); iw.flush(); iw.deleteDocuments(new Term("_id", "4")); @@ -166,6 +175,9 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { doc.add(new SortedSetDocValuesField("keyword2", new BytesRef(keyword2Value))); map.put(keyword1Value + "-" + keyword2Value, sndvValue + map.getOrDefault(keyword1Value + "-" + keyword2Value, 0)); + doc.add( + new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10." + i)))) + ); iw.addDocument(doc); documents.put(id, doc); } @@ -221,9 +233,7 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { SortedSetStarTreeValuesIterator k1 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( "keyword1" ); - SortedSetStarTreeValuesIterator k2 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator( - "keyword2" - ); + SortedSetStarTreeValuesIterator k2 = (SortedSetStarTreeValuesIterator) starTreeValues.getDimensionValuesIterator("ip1"); for (StarTreeDocument starDoc : actualStarTreeDocuments) { String keyword1 = null; if (starDoc.dimensions[0] != null) { @@ -232,7 +242,11 @@ public void testStarTreeKeywordDocValuesWithDeletions() throws IOException { String keyword2 = null; if (starDoc.dimensions[1] != null) { - keyword2 = k2.lookupOrd(starDoc.dimensions[1]).utf8ToString(); + BytesRef encoded = k2.lookupOrd(starDoc.dimensions[1]); + InetAddress address = InetAddressPoint.decode( + Arrays.copyOfRange(encoded.bytes, encoded.offset, encoded.offset + encoded.length) + ); + keyword2 = InetAddresses.toAddrString(address); } double metric = (double) starDoc.metrics[0]; if (map.containsKey(keyword1 + "-" + keyword2)) { @@ -254,21 +268,28 @@ public void testStarKeywordDocValuesWithMissingDocs() throws IOException { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); + iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 1)); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); iw.addDocument(doc); iw.forceMerge(1); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); + iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.forceMerge(1); iw.close(); @@ -340,11 +361,14 @@ public void testStarKeywordDocValuesWithMissingDocsInSegment() throws IOExceptio doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text1"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text2"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.10"))))); iw.addDocument(doc); doc = new Document(); doc.add(new SortedNumericDocValuesField("sndv", 2)); doc.add(new SortedSetDocValuesField("keyword1", new BytesRef("text11"))); doc.add(new SortedSetDocValuesField("keyword2", new BytesRef("text22"))); + doc.add(new SortedSetDocValuesField("ip1", new BytesRef(InetAddressPoint.encode(InetAddresses.forString("10.10.10.11"))))); + iw.addDocument(doc); iw.forceMerge(1); iw.close(); @@ -538,7 +562,7 @@ protected XContentBuilder getMapping() throws IOException { b.field("name", "keyword1"); b.endObject(); b.startObject(); - b.field("name", "keyword2"); + b.field("name", "ip1"); b.endObject(); b.endArray(); b.startArray("metrics"); @@ -566,6 +590,9 @@ protected XContentBuilder getMapping() throws IOException { b.startObject("keyword2"); b.field("type", "keyword"); b.endObject(); + b.startObject("ip1"); + b.field("type", "ip"); + b.endObject(); b.endObject(); }); } diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java index 440268f1f803c..70cc20fe4a9f6 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderFlushFlowTests.java @@ -20,10 +20,11 @@ import org.opensearch.index.codec.composite.LuceneDocValuesConsumerFactory; import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; import org.opensearch.index.compositeindex.datacube.Dimension; -import org.opensearch.index.compositeindex.datacube.KeywordDimension; +import org.opensearch.index.compositeindex.datacube.IpDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; +import org.opensearch.index.compositeindex.datacube.OrdinalDimension; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; @@ -426,7 +427,7 @@ public void testFlushFlowForKeywords() throws IOException { ); List metricsWithField = List.of(0, 1, 2, 3, 4, 5); - compositeField = getStarTreeFieldWithKeywordField(); + compositeField = getStarTreeFieldWithKeywordField(random().nextBoolean()); SortedSetStarTreeValuesIterator d1sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList, docsWithField)); SortedSetStarTreeValuesIterator d2sndv = new SortedSetStarTreeValuesIterator(getSortedSetMock(dimList2, docsWithField2)); SortedNumericStarTreeValuesIterator m1sndv = new SortedNumericStarTreeValuesIterator( @@ -531,9 +532,9 @@ private StarTreeField getStarTreeFieldWithMultipleMetrics() { return new StarTreeField("sf", dims, metrics, c); } - private StarTreeField getStarTreeFieldWithKeywordField() { - Dimension d1 = new KeywordDimension("field1"); - Dimension d2 = new KeywordDimension("field3"); + private StarTreeField getStarTreeFieldWithKeywordField(boolean isIp) { + Dimension d1 = isIp ? new IpDimension("field1") : new OrdinalDimension("field1"); + Dimension d2 = isIp ? new IpDimension("field3") : new OrdinalDimension("field3"); Metric m1 = new Metric("field2", List.of(MetricStat.SUM)); Metric m2 = new Metric("field2", List.of(MetricStat.VALUE_COUNT)); Metric m3 = new Metric("field2", List.of(MetricStat.AVG)); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java index be16961e781db..74ecff04076b1 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderMergeFlowTests.java @@ -1831,7 +1831,7 @@ public void testMergeFlowWithKeywords() throws IOException { List metricsList2 = List.of(0L, 1L, 2L, 3L, 4L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - compositeField = getStarTreeFieldWithKeywords(); + compositeField = getStarTreeFieldWithKeywords(random().nextBoolean()); StarTreeValues starTreeValues = getStarTreeValuesWithKeywords( getSortedSetMock(dimList, docsWithField), getSortedSetMock(dimList2, docsWithField2), diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java index 9c9beaea4f52c..cca987b6f9b16 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/StarTreeBuilderTestCase.java @@ -32,10 +32,11 @@ import org.opensearch.index.compositeindex.datacube.DataCubeDateTimeUnit; import org.opensearch.index.compositeindex.datacube.DateDimension; import org.opensearch.index.compositeindex.datacube.Dimension; -import org.opensearch.index.compositeindex.datacube.KeywordDimension; +import org.opensearch.index.compositeindex.datacube.IpDimension; import org.opensearch.index.compositeindex.datacube.Metric; import org.opensearch.index.compositeindex.datacube.MetricStat; import org.opensearch.index.compositeindex.datacube.NumericDimension; +import org.opensearch.index.compositeindex.datacube.OrdinalDimension; import org.opensearch.index.compositeindex.datacube.startree.StarTreeDocument; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.StarTreeFieldConfiguration; @@ -352,9 +353,9 @@ protected StarTreeMetadata getStarTreeMetadata( ); } - protected StarTreeField getStarTreeFieldWithKeywords() { - Dimension d1 = new KeywordDimension("field1"); - Dimension d2 = new KeywordDimension("field3"); + protected StarTreeField getStarTreeFieldWithKeywords(boolean ip) { + Dimension d1 = ip ? new IpDimension("field1") : new OrdinalDimension("field1"); + Dimension d2 = ip ? new IpDimension("field3") : new OrdinalDimension("field3"); Metric m1 = new Metric("field2", List.of(MetricStat.VALUE_COUNT, MetricStat.SUM)); List dims = List.of(d1, d2); List metrics = List.of(m1); diff --git a/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java index afd9e994ce3ae..7e6aa00c87290 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FlatObjectFieldMapperTests.java @@ -21,6 +21,7 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.DocValueFormat; import java.io.IOException; @@ -397,6 +398,27 @@ public void testDeduplicationValue() throws IOException { assertEquals(new BytesRef("field.labels=3"), fieldValueAndPaths[4].binaryValue()); } + public void testFetchDocValues() throws IOException { + MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "flat_object"))); + { + // test valueWithPathField + MappedFieldType ft = mapperService.fieldType("field.name"); + DocValueFormat format = ft.docValueFormat(null, null); + String storedValue = "field.field.name=1234"; + + Object object = format.format(new BytesRef(storedValue)); + assertEquals("1234", object); + } + + { + // test valueField + MappedFieldType ft = mapperService.fieldType("field"); + Throwable throwable = assertThrows(IllegalArgumentException.class, () -> ft.docValueFormat(null, null)); + assertEquals("Field [field] of type [flat_object] does not support doc_value in root field", throwable.getMessage()); + } + + } + @Override protected void registerParameters(ParameterChecker checker) throws IOException { // In the future we will want to make sure parameter updates are covered. diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index 8ec34b3eb660c..333cdbcab05c5 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -1085,6 +1085,9 @@ private XContentBuilder getInvalidMapping( b.startObject(); b.field("name", "keyword1"); b.endObject(); + b.startObject(); + b.field("name", "ip1"); + b.endObject(); } b.endArray(); b.startArray("metrics"); @@ -1117,7 +1120,7 @@ private XContentBuilder getInvalidMapping( if (!invalidDimType) { b.field("type", "integer"); } else { - b.field("type", "ip"); + b.field("type", "wildcard"); } b.endObject(); b.startObject("metric_field"); @@ -1130,6 +1133,9 @@ private XContentBuilder getInvalidMapping( b.startObject("keyword1"); b.field("type", "keyword"); b.endObject(); + b.startObject("ip1"); + b.field("type", "ip"); + b.endObject(); b.endObject(); }); } diff --git a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java index b4726bab50198..23c21648b1263 100644 --- a/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java +++ b/server/src/test/java/org/opensearch/threadpool/ScalingThreadPoolTests.java @@ -156,7 +156,6 @@ private int expectedSize(final String threadPoolName, final int numberOfProcesso sizes.put(ThreadPool.Names.REMOTE_PURGE, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_REFRESH_RETRY, ThreadPool::halfAllocatedProcessors); sizes.put(ThreadPool.Names.REMOTE_RECOVERY, ThreadPool::twiceAllocatedProcessors); - sizes.put(ThreadPool.Names.REMOTE_STATE_READ, n -> ThreadPool.boundedBy(4 * n, 4, 32)); return sizes.get(threadPoolName).apply(numberOfProcessors); } diff --git a/server/src/test/java/org/opensearch/transport/PublishPortTests.java b/server/src/test/java/org/opensearch/transport/PublishPortTests.java index 6a41409f6f181..2e5a57c4cdd60 100644 --- a/server/src/test/java/org/opensearch/transport/PublishPortTests.java +++ b/server/src/test/java/org/opensearch/transport/PublishPortTests.java @@ -43,8 +43,6 @@ import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.opensearch.transport.TcpTransport.resolvePublishPort; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class PublishPortTests extends OpenSearchTestCase { @@ -73,48 +71,44 @@ public void testPublishPort() throws Exception { } - int publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(settings, profile), + int publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(settings, profile).publishPort, randomAddresses(), getByName("127.0.0.2") ); assertThat("Publish port should be explicitly set", publishPort, equalTo(useProfile ? 9080 : 9081)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matched address", publishPort, equalTo(boundPort)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("127.0.0.1", boundPort), address("127.0.0.2", boundPort)), getByName("127.0.0.3") ); assertThat("Publish port should be derived from unique port of bound addresses", publishPort, equalTo(boundPort)); - try { - resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), - asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), - getByName("127.0.0.3") - ); - fail("Expected BindTransportException as publish_port not specified and non-unique port of bound addresses"); - } catch (BindTransportException e) { - assertThat(e.getMessage(), containsString("Failed to auto-resolve publish port")); - } + int resPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, + asList(address("127.0.0.1", boundPort), address("127.0.0.2", otherBoundPort)), + getByName("127.0.0.3") + ); + assertThat("as publish_port not specified and non-unique port of bound addresses", resPort, equalTo(-1)); - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("127.0.0.1") ); assertThat("Publish port should be derived from matching wildcard address", publishPort, equalTo(boundPort)); if (NetworkUtils.SUPPORTS_V6) { - publishPort = resolvePublishPort( - new TcpTransport.ProfileSettings(baseSettings, profile), + publishPort = Transport.resolvePublishPort( + new TcpTransport.ProfileSettings(baseSettings, profile).publishPort, asList(address("0.0.0.0", boundPort), address("127.0.0.2", otherBoundPort)), getByName("::1") ); diff --git a/settings.gradle b/settings.gradle index 035fe69eda7e9..a24da40069b90 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.develocity" version "3.18.2" + id "com.gradle.develocity" version "3.19" } ext.disableBuildCache = hasProperty('DISABLE_BUILD_CACHE') || System.getenv().containsKey('DISABLE_BUILD_CACHE') diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 4dd1a2787ee87..02aab575bbaf0 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -72,12 +72,12 @@ dependencies { api "org.eclipse.jetty:jetty-server:${versions.jetty}" api "org.eclipse.jetty.websocket:javax-websocket-server-impl:${versions.jetty}" api 'org.apache.zookeeper:zookeeper:3.9.3' - api "org.apache.commons:commons-text:1.12.0" + api "org.apache.commons:commons-text:1.13.0" api "commons-net:commons-net:3.11.1" api "ch.qos.logback:logback-core:1.5.12" - api "ch.qos.logback:logback-classic:1.5.12" + api "ch.qos.logback:logback-classic:1.5.15" api "org.jboss.xnio:xnio-nio:3.8.16.Final" - api 'org.jline:jline:3.27.1' + api 'org.jline:jline:3.28.0' api 'org.apache.commons:commons-configuration2:2.11.0' api 'com.nimbusds:nimbus-jose-jwt:9.47' api ('org.apache.kerby:kerb-admin:2.1.0') {